Skip to content

Commit

Permalink
Incorporated zero masking
Browse files Browse the repository at this point in the history
  • Loading branch information
adnaniazi committed Jun 5, 2024
1 parent 911fb4b commit ad5296b
Show file tree
Hide file tree
Showing 5 changed files with 385 additions and 131 deletions.
1 change: 1 addition & 0 deletions src/capfinder/collate.py
Original file line number Diff line number Diff line change
Expand Up @@ -312,6 +312,7 @@ def collate_bam_pod5_worker(

# 9. Save the train/test and metadata information
# We need to store train/test data only for the good reads
# TODO: make sure that the ROI signal array is not empty example: c4e4c483-1c19-4812-909e-0f7dbd3c8921,1,[]
if read_type == "good_reads":
worker_state["data_writer"].writerow(
[read_id, cap_class, roi_data["roi_signal"]]
Expand Down
18 changes: 14 additions & 4 deletions src/capfinder/model.py
Original file line number Diff line number Diff line change
Expand Up @@ -53,7 +53,15 @@ def transformer_encoder(
x = layers.Dropout(dropout)(x)
x = layers.Conv1D(filters=inputs.shape[-1], kernel_size=1)(x)
x = layers.LayerNormalization(epsilon=1e-6)(x)
return x + res

# Masking for zero padding
mask = layers.Lambda(
lambda x: keras.ops.cast(keras.ops.equal(x, 0.0), dtype="float32"),
output_shape=(inputs.shape[1], 1), # Specify output shape
)(inputs)
x = (x + res) * mask # Apply mask to the summed output (including residual)

return x


def build_model(
Expand Down Expand Up @@ -110,12 +118,14 @@ def build_model(
for _ in range(num_transformer_blocks):
x = transformer_encoder(x, head_size, num_heads, ff_dim, dropout)

# Save the encoder output
encoder_output = x

# Apply global average pooling
x = layers.GlobalAveragePooling1D(data_format="channels_last")(x)

# Save the encoder output
# The vectors fro mthe GlobalAveragePolling1D are of fixed size
# Before this layer, they are not of fixed length
encoder_output = x

# Add multi-layer perceptron (MLP) layers
for dim in mlp_units:
x = layers.Dense(dim, activation="relu")(x)
Expand Down
Loading

0 comments on commit ad5296b

Please sign in to comment.