Skip to content

Commit

Permalink
Fixed bug with train config
Browse files Browse the repository at this point in the history
  • Loading branch information
adnaniazi committed Jul 7, 2024
1 parent cdb95eb commit c3fbf0e
Show file tree
Hide file tree
Showing 5 changed files with 25 additions and 22 deletions.
3 changes: 2 additions & 1 deletion CHANGELOG.md
Original file line number Diff line number Diff line change
Expand Up @@ -4,6 +4,8 @@ All notable changes to this project will be documented in this file.
The format is based on [Keep a Changelog](https://keepachangelog.com/en/1.0.0/), and this project adheres to [Semantic Versioning](https://semver.org/spec/v2.0.0.html).

## [Unreleased]
### Fixed
- Bugs with using the train config functionality

## [0.1.4] - 2024-07-07
### Added
Expand Down Expand Up @@ -34,4 +36,3 @@ The format is based on [Keep a Changelog](https://keepachangelog.com/en/1.0.0/),
[0.1.2]: https://github.com/adnaniazi/capfinder/compare/0.1.1...0.1.2
[0.1.1]: https://pypi.org/manage/project/capfinder/release/0.1.1/
[0.1.0]: https://pypi.org/manage/project/capfinder/release/0.1.0/

12 changes: 6 additions & 6 deletions README.md
Original file line number Diff line number Diff line change
Expand Up @@ -21,17 +21,17 @@ A package for decoding RNA cap types

## Installation

# CPU installation
### CPU installation
```sh
pip install capfinder[cpu]
```

# GPU installation (CUDA 12)
### GPU installation (CUDA 12)
```sh
pip install capfinder[gpu] "jax[cuda12]" -f https://storage.googleapis.com/jax-releases/jax_cuda_releases.html
```

# TPU installation
### TPU installation
```sh
pip install capfinder[tpu] "jax[tpu]" -f https://storage.googleapis.com/jax-releases/libtpu_releases.html
```
Expand All @@ -46,18 +46,18 @@ pip install capfinder[tpu] "jax[tpu]" -f https://storage.googleapis.com/jax-rele
* Python 3.7+
* Create a virtual environment and install the dependencies

# CPU installation
### CPU installation
```sh
poetry install --extras cpu
```

# GPU installation (CUDA 12)
### GPU installation (CUDA 12)
```sh
poetry install --extras gpu
poetry run pip install "jax[cuda12]" -f https://storage.googleapis.com/jax-releases/jax_cuda_releases.html
```

# TPU installation
### TPU installation
```sh
poetry install --extras tpu
poetry run pip install "jax[tpu]" -f https://storage.googleapis.com/jax-releases/libtpu_releases.html
Expand Down
4 changes: 3 additions & 1 deletion pyproject.toml
Original file line number Diff line number Diff line change
Expand Up @@ -18,7 +18,9 @@ classifiers = [
"Operating System :: OS Independent",
"Programming Language :: Python",
"Programming Language :: Python :: 3",
"Programming Language :: Python :: 3.9",
"Programming Language :: Python :: 3.10",
"Programming Language :: Python :: 3.11",
"Programming Language :: Python :: 3.12",
"Topic :: Software Development :: Libraries :: Python Modules",
"Typing :: Typed",
]
Expand Down
4 changes: 2 additions & 2 deletions src/capfinder/cli.py
Original file line number Diff line number Diff line change
Expand Up @@ -255,8 +255,8 @@ def create_train_config(
"dtype": "float16",
"n_workers": 10,
"n_classes": 4,
"use_local_dataset": True,
"remote_dataset_version": "latest",
"use_local_dataset": False,
"remote_dataset_version": "8.0.0",
},
"tune_params": {
"comet_project_name": "capfinder_tfr_tune",
Expand Down
24 changes: 12 additions & 12 deletions src/capfinder/training.py
Original file line number Diff line number Diff line change
Expand Up @@ -235,6 +235,7 @@ def initialize_tuner(
hyper_model: "CNNLSTMModel | EncoderModel",
tune_params: dict,
model_save_dir: str,
model_type: ModelType,
) -> Union[Hyperband, BayesianOptimization, RandomSearch]:
"""Initialize a Keras Tuner object based on the specified tuning strategy.
Expand All @@ -247,6 +248,8 @@ def initialize_tuner(
model_save_dir: str
The directory where the model should be saved.
comet_project_name: str
model_type: ModelType
Type of the model to be trained.
Returns:
--------
Expand Down Expand Up @@ -465,16 +468,14 @@ def run_training_pipeline(
input_shape=(etl_params["target_length"], 1), n_classes=etl_params["n_classes"]
)

tuner = initialize_tuner(hyper_model, tune_params, model_save_dir)
tuner = initialize_tuner(hyper_model, tune_params, model_save_dir, model_type)

tensorboard_save_path = os.path.join(model_save_dir, "tensorboard_logs_encoder")
logger.info(
f"Run tensorboard as following:\ntensorboard --logdir {tensorboard_save_path}"
)
# tensorboard_save_path = os.path.join(model_save_dir, "tensorboard_logs_encoder", model_type)
# logger.info(
# f"Run tensorboard as following:\ntensorboard --logdir {tensorboard_save_path}"
# )

# Split train into train-val sets
# dataset_size = len(list(train_dataset)) # Calculate the dataset size

dataset_size = train_dataset.reduce(0, lambda x, _: x + 1).numpy()

train_size = int(0.8 * dataset_size) # 80% for training
Expand All @@ -494,7 +495,6 @@ def run_training_pipeline(
validation_data=val_dataset,
validation_steps=val_size,
epochs=tune_params["max_epochs_hpt"],
# batch_size=tune_params["batch_size"],
callbacks=[
keras.callbacks.EarlyStopping(
patience=tune_params["patience"], restore_best_weights=True
Expand Down Expand Up @@ -722,10 +722,10 @@ def run_training_pipeline(
text=conf_matrix_str_train,
metadata={"Description": "Train Confusion Matrix"},
)
train_experiment.log_text(
text=f"tensorboard --logdir {tensorboard_save_path}",
metadata={"Description": "Tensorboard command"},
)
# train_experiment.log_text(
# text=f"tensorboard --logdir {tensorboard_save_path}",
# metadata={"Description": "Tensorboard command"},
# )

# Log the test set confusion matrix to the Comet ML dashboard pane
train_experiment.log_confusion_matrix(
Expand Down

0 comments on commit c3fbf0e

Please sign in to comment.