From b654230e65ea4da322b822ad55a1dbf5548b30f8 Mon Sep 17 00:00:00 2001 From: Byeongkeun Ahn <7p54ks3@naver.com> Date: Tue, 4 Apr 2023 14:59:36 +0900 Subject: [PATCH] Add 'Invertible Monotone Operators for Normalizing Flows' (#59) --- data/make_readme.py | 4 ++-- data/publications.yml | 7 +++++++ readme.md | 3 +++ 3 files changed, 12 insertions(+), 2 deletions(-) diff --git a/data/make_readme.py b/data/make_readme.py index 5097f53..c29d7aa 100644 --- a/data/make_readme.py +++ b/data/make_readme.py @@ -44,7 +44,7 @@ class Section(TypedDict): def load_items(key: str) -> list[Item]: """Load list[Item] from YAML file.""" - with open(f"{ROOT}/data/{key}.yml") as file: + with open(f"{ROOT}/data/{key}.yml", encoding="utf8") as file: return yaml.safe_load(file.read()) @@ -162,7 +162,7 @@ def validate_item(itm: Item, section_title: str) -> None: section["markdown"] += md_str + "\n\n" -with open(f"{ROOT}/readme.md", "r+") as file: +with open(f"{ROOT}/readme.md", "r+", encoding="utf8") as file: readme = file.read() for section in sections.values(): diff --git a/data/publications.yml b/data/publications.yml index 1623199..0b038e5 100644 --- a/data/publications.yml +++ b/data/publications.yml @@ -384,3 +384,10 @@ authors: Aditya Kallapa, Sandeep Nagar, Girish Varma description: propose a k×k convolutional layer and Deep Normalizing Flow architecture which i) has a fast parallel inversion algorithm with running time O(nk^2) (n is height and width of the input image and k is kernel size), ii) masks the minimal amount of learnable parameters in a layer. iii) gives better forward pass and sampling times comparable to other k×k convolution-based models on real-world benchmarks. We provide an implementation of the proposed parallel algorithm for sampling using our invertible convolutions on GPUs. repo: https://github.com/aditya-v-kallappa/FInCFlow + +- title: 'Invertible Monotone Operators for Normalizing Flows' + url: https://arxiv.org/abs/2210.08176 + date: 2022-10-15 + authors: Byeongkeun Ahn, Chiyoon Kim, Youngjoon Hong, Hyunwoo J. Kim + description: This work proposes the monotone formulation to overcome the issue of the Lipschitz constants in previous ResNet-based normalizing flows using monotone operators and provides an in-depth theoretical analysis. Furthermore, this work constructs an activation function called Concatenated Pila (CPila) to improve gradient flow. The resulting model, Monotone Flows, exhibits an excellent performance on multiple density estimation benchmarks (MNIST, CIFAR-10, ImageNet32, ImageNet64). + repo: https://github.com/mlvlab/MonotoneFlows diff --git a/readme.md b/readme.md index e364dc5..7abfcac 100644 --- a/readme.md +++ b/readme.md @@ -48,6 +48,9 @@ A list of awesome resources for understanding and applying normalizing flows (NF 1. 2023-01-03 - [FInC Flow: Fast and Invertible k×k Convolutions for Normalizing Flows](https://arxiv.org/abs/2301.09266) by Kallapa, Nagar et al.
propose a k×k convolutional layer and Deep Normalizing Flow architecture which i) has a fast parallel inversion algorithm with running time O(nk^2) (n is height and width of the input image and k is kernel size), ii) masks the minimal amount of learnable parameters in a layer. iii) gives better forward pass and sampling times comparable to other k×k convolution-based models on real-world benchmarks. We provide an implementation of the proposed parallel algorithm for sampling using our invertible convolutions on GPUs. [[Code](https://github.com/aditya-v-kallappa/FInCFlow)] +1. 2022-10-15 - [Invertible Monotone Operators for Normalizing Flows](https://arxiv.org/abs/2210.08176) by Ahn, Kim et al.
+ This work proposes the monotone formulation to overcome the issue of the Lipschitz constants in previous ResNet-based normalizing flows using monotone operators and provides an in-depth theoretical analysis. Furthermore, this work constructs an activation function called Concatenated Pila (CPila) to improve gradient flow. The resulting model, Monotone Flows, exhibits an excellent performance on multiple density estimation benchmarks (MNIST, CIFAR-10, ImageNet32, ImageNet64). [[Code](https://github.com/mlvlab/MonotoneFlows)] + 1. 2022-08-18 - [ManiFlow: Implicitly Representing Manifolds with Normalizing Flows](https://arxiv.org/abs/2208.08932) by Postels, Danelljan et al.
The invertibility constraint of NFs imposes limitations on data distributions that reside on lower dimensional manifolds embedded in higher dimensional space. This is often bypassed by adding noise to the data which impacts generated sample quality. This work generates samples from the original data distribution given full knowledge of perturbed distribution and noise model. They establish NFs trained on perturbed data implicitly represent the manifold in regions of maximum likelihood, then propose an optimization objective that recovers the most likely point on the manifold given a sample from the perturbed distribution.