|
| 1 | +#!/usr/bin/env python |
| 2 | +# -*- coding: utf-8 -*- |
| 3 | + |
| 4 | +""" |
| 5 | +Example of using the new time series feature layers in keras-data-processor. |
| 6 | +
|
| 7 | +This example demonstrates how to use the WaveletTransformLayer and TSFreshFeatureLayer |
| 8 | +for extracting features from time series data. |
| 9 | +""" |
| 10 | + |
| 11 | +import numpy as np |
| 12 | +import matplotlib.pyplot as plt |
| 13 | +from tensorflow.keras.models import Model |
| 14 | +from tensorflow.keras.layers import Input, Dense, Concatenate |
| 15 | + |
| 16 | +from kdp.layers.time_series import ( |
| 17 | + WaveletTransformLayer, |
| 18 | + TSFreshFeatureLayer, |
| 19 | + LagFeatureLayer, |
| 20 | +) |
| 21 | + |
| 22 | + |
| 23 | +def generate_sample_data(n_samples=1000, n_features=1): |
| 24 | + """Generate a sample time series dataset.""" |
| 25 | + np.random.seed(42) |
| 26 | + |
| 27 | + # Time steps |
| 28 | + t = np.linspace(0, 10 * np.pi, n_samples) |
| 29 | + |
| 30 | + # Base sine wave with increasing frequency |
| 31 | + base_signal = np.sin(t * (1 + t / (10 * np.pi))) |
| 32 | + |
| 33 | + # Add trends and seasonality for complexity |
| 34 | + trend = 0.3 * t / (10 * np.pi) |
| 35 | + seasonality = 0.5 * np.sin(0.5 * t) |
| 36 | + |
| 37 | + # Create signal with noise |
| 38 | + signal = base_signal + trend + seasonality + np.random.normal(0, 0.2, n_samples) |
| 39 | + |
| 40 | + # Normalize |
| 41 | + signal = (signal - np.mean(signal)) / np.std(signal) |
| 42 | + |
| 43 | + # For multiple features, create variations |
| 44 | + if n_features > 1: |
| 45 | + signals = [signal] |
| 46 | + for i in range(1, n_features): |
| 47 | + # Create different variations with phase shifts and scaling |
| 48 | + variation = np.sin(t * (1 + t / (10 * np.pi) + i * 0.2)) + trend * ( |
| 49 | + 1.0 + 0.1 * i |
| 50 | + ) |
| 51 | + variation = (variation - np.mean(variation)) / np.std(variation) |
| 52 | + signals.append(variation) |
| 53 | + signal = np.column_stack(signals) |
| 54 | + |
| 55 | + # Create test/train split |
| 56 | + train_size = int(0.8 * n_samples) |
| 57 | + X_train = signal[:train_size] |
| 58 | + X_test = signal[train_size:] |
| 59 | + |
| 60 | + # Create target variable (for regression task) |
| 61 | + # We'll predict the next value in the series |
| 62 | + y_train = ( |
| 63 | + signal[1 : train_size + 1, 0] if n_features > 1 else signal[1 : train_size + 1] |
| 64 | + ) |
| 65 | + y_test = signal[train_size + 1 :, 0] if n_features > 1 else signal[train_size + 1 :] |
| 66 | + |
| 67 | + return X_train, y_train, X_test, y_test |
| 68 | + |
| 69 | + |
| 70 | +def build_model_with_feature_layers(input_shape): |
| 71 | + """Build a model that uses various time series feature layers.""" |
| 72 | + inputs = Input(shape=input_shape) |
| 73 | + |
| 74 | + # 1. Extract wavelet transform features |
| 75 | + wavelet_features = WaveletTransformLayer( |
| 76 | + levels=3, window_sizes=[4, 8, 16], flatten_output=True |
| 77 | + )(inputs) |
| 78 | + |
| 79 | + # 2. Extract statistical features using TSFreshFeatureLayer |
| 80 | + tsfresh_features = TSFreshFeatureLayer( |
| 81 | + features=["mean", "std", "min", "max", "median", "skewness", "kurtosis"], |
| 82 | + normalize=True, |
| 83 | + )(inputs) |
| 84 | + |
| 85 | + # 3. Extract lag features for temporal patterns |
| 86 | + lag_features = LagFeatureLayer( |
| 87 | + lag_indices=[1, 2, 3, 5, 7, 14, 21], |
| 88 | + drop_na=False, # We'll get zeros for missing values |
| 89 | + )(inputs) |
| 90 | + |
| 91 | + # Combine all features |
| 92 | + combined_features = Concatenate()( |
| 93 | + [wavelet_features, tsfresh_features, lag_features] |
| 94 | + ) |
| 95 | + |
| 96 | + # Dense layers for prediction |
| 97 | + x = Dense(64, activation="relu")(combined_features) |
| 98 | + x = Dense(32, activation="relu")(x) |
| 99 | + outputs = Dense(1)(x) |
| 100 | + |
| 101 | + model = Model(inputs=inputs, outputs=outputs) |
| 102 | + model.compile(optimizer="adam", loss="mse", metrics=["mae"]) |
| 103 | + |
| 104 | + return model |
| 105 | + |
| 106 | + |
| 107 | +def main(): |
| 108 | + """Run the example.""" |
| 109 | + # Generate sample data |
| 110 | + X_train, y_train, X_test, y_test = generate_sample_data( |
| 111 | + n_samples=1000, n_features=2 |
| 112 | + ) |
| 113 | + |
| 114 | + print(f"X_train shape: {X_train.shape}") |
| 115 | + print(f"y_train shape: {y_train.shape}") |
| 116 | + |
| 117 | + # Reshape for the model (add batch dimension if not already present) |
| 118 | + if len(X_train.shape) == 1: |
| 119 | + X_train = X_train.reshape(-1, 1) |
| 120 | + X_test = X_test.reshape(-1, 1) |
| 121 | + |
| 122 | + # Build model |
| 123 | + model = build_model_with_feature_layers(input_shape=(X_train.shape[1],)) |
| 124 | + |
| 125 | + # Print model summary |
| 126 | + model.summary() |
| 127 | + |
| 128 | + # Train model |
| 129 | + history = model.fit( |
| 130 | + X_train, |
| 131 | + y_train, |
| 132 | + validation_data=(X_test, y_test), |
| 133 | + epochs=50, |
| 134 | + batch_size=32, |
| 135 | + verbose=1, |
| 136 | + ) |
| 137 | + |
| 138 | + # Plot training history |
| 139 | + plt.figure(figsize=(12, 4)) |
| 140 | + |
| 141 | + plt.subplot(1, 2, 1) |
| 142 | + plt.plot(history.history["loss"]) |
| 143 | + plt.plot(history.history["val_loss"]) |
| 144 | + plt.title("Model loss") |
| 145 | + plt.ylabel("Loss (MSE)") |
| 146 | + plt.xlabel("Epoch") |
| 147 | + plt.legend(["Train", "Validation"], loc="upper right") |
| 148 | + |
| 149 | + plt.subplot(1, 2, 2) |
| 150 | + plt.plot(history.history["mae"]) |
| 151 | + plt.plot(history.history["val_mae"]) |
| 152 | + plt.title("Model MAE") |
| 153 | + plt.ylabel("MAE") |
| 154 | + plt.xlabel("Epoch") |
| 155 | + plt.legend(["Train", "Validation"], loc="upper right") |
| 156 | + |
| 157 | + plt.tight_layout() |
| 158 | + plt.savefig("time_series_features_training.png") |
| 159 | + print("Training plot saved as 'time_series_features_training.png'") |
| 160 | + |
| 161 | + # Evaluate on test set |
| 162 | + test_loss, test_mae = model.evaluate(X_test, y_test, verbose=0) |
| 163 | + print(f"Test Loss (MSE): {test_loss:.4f}") |
| 164 | + print(f"Test MAE: {test_mae:.4f}") |
| 165 | + |
| 166 | + # Make predictions and plot |
| 167 | + predictions = model.predict(X_test) |
| 168 | + |
| 169 | + plt.figure(figsize=(12, 6)) |
| 170 | + plt.plot(y_test, label="Actual") |
| 171 | + plt.plot(predictions, label="Predicted") |
| 172 | + plt.title("Time Series Prediction with Feature Layers") |
| 173 | + plt.xlabel("Time Step") |
| 174 | + plt.ylabel("Value") |
| 175 | + plt.legend() |
| 176 | + plt.savefig("time_series_features_prediction.png") |
| 177 | + print("Prediction plot saved as 'time_series_features_prediction.png'") |
| 178 | + |
| 179 | + |
| 180 | +if __name__ == "__main__": |
| 181 | + main() |
0 commit comments