From cd60732f01913fead0b9d8a1806bc25ce9f9e723 Mon Sep 17 00:00:00 2001 From: tbedford Date: Tue, 16 Apr 2024 11:58:42 +0100 Subject: [PATCH 1/3] [wip] - update tutorial to reflect updated template --- .../predictive-maintenance/alert-service.md | 14 +-- .../predictive-maintenance/data-generator.md | 46 +++------ .../predictive-maintenance/downsampling.md | 77 +++++++++------ .../forecast-service.md | 47 +++------ .../predictive-maintenance/get-project.md | 4 +- .../predictive-maintenance/influxdb-alerts.md | 5 +- .../influxdb-raw-data.md | 2 +- .../predictive-maintenance/overview.md | 97 +++---------------- .../predictive-maintenance/phone-alerts.md | 50 +++------- .../predictive-maintenance/summary.md | 4 +- mkdocs.yml | 22 ++--- 11 files changed, 128 insertions(+), 240 deletions(-) diff --git a/docs/tutorials/predictive-maintenance/alert-service.md b/docs/tutorials/predictive-maintenance/alert-service.md index b7a4978b..5a838fe1 100644 --- a/docs/tutorials/predictive-maintenance/alert-service.md +++ b/docs/tutorials/predictive-maintenance/alert-service.md @@ -4,7 +4,7 @@ Sends alerts to an output topic when the temperature is under or over the thresh It receives data from two topics (3d printer data and forecast) and triggers an alert (to output topic `alerts`) if the temperature is under or over the threshold. -![pipline section](./images/alert-pipeline-segment.png) +![pipeline section](./images/alert-pipeline-segment.png) The default thresholds are as shown in the following table: @@ -17,7 +17,7 @@ The default thresholds are as shown in the following table: | min_hotend_temperature | 245 | | max_hotend_temperature | 255 | -These thresholds are used to determine if the temperature, or forecast temperature, are under or over the threshold values. If so these alerts are published to the `alerts` topic. +These thresholds are used to determine if the temperature, or forecast temperature, are under or over the threshold values. If so these alerts are published to the `json-alerts` topic. Note there are different alert types, with the message format for the `no-alert` type alert: @@ -26,7 +26,7 @@ Note there are different alert types, with the message format for the `no-alert` "status": "no-alert", "parameter_name": "hotend_temperature", "message": "'Hotend temperature' is within normal parameters", - "alert_timestamp": 1701280033000000000, + "timestamp": 1701280033000000000, "alert_temperature": 246.04148121958596 } ``` @@ -37,7 +37,7 @@ An example of the `under-now` alert message format: { "status": "under-now", "parameter_name": "bed_temperature", - "alert_timestamp": 1701273328000000000, + "timestamp": 1701273328000000000, "alert_temperature": 104.0852349596566, "message": "'Bed temperature' is under the threshold (105ΒΊC)" } @@ -50,7 +50,7 @@ Here's an `over-forecast` alert message format: "status": "over-forecast", "parameter_name": "forecast_fluctuated_ambient_temperature", "alert_temperature": 55.014602460947586, - "alert_timestamp": 1701278280000000000, + "timestamp": 1701278280000000000, "message": "'Ambient temperature' is forecasted to go over 55ΒΊC in 1:36:29." } ``` @@ -62,14 +62,16 @@ Here's the `under-forecast` alert message format: "status": "under-forecast", "parameter_name": "forecast_fluctuated_ambient_temperature", "alert_temperature": 44.98135836928914, - "alert_timestamp": 1701277320000000000, + "timestamp": 1701277320000000000, "message": "'Ambient temperature' is forecasted to fall below 45ΒΊC in 1:20:28." } ``` + ## Check the log messages diff --git a/docs/tutorials/predictive-maintenance/data-generator.md b/docs/tutorials/predictive-maintenance/data-generator.md index 6af24184..334cd25e 100644 --- a/docs/tutorials/predictive-maintenance/data-generator.md +++ b/docs/tutorials/predictive-maintenance/data-generator.md @@ -14,7 +14,7 @@ The [forecasting algorithm](./forecast-service.md) that attempts to estimate whe ## Data published -The generated data is published to the `3d-printer-data` topic: +The generated data is published to the `json-3d-printer-data` topic: * Ambient temperature * Ambient temperature with fluctuations @@ -33,35 +33,13 @@ If you look at the messages in the `Messages` view, you'll see data has the foll ``` json { - "Epoch": 0, - "Timestamps": [ - 1701277527000000000 - ], - "NumericValues": { - "hotend_temperature": [ - 250.8167407832582 - ], - "bed_temperature": [ - 106.9299672495977 - ], - "ambient_temperature": [ - 36.92387946005222 - ], - "fluctuated_ambient_temperature": [ - 36.92387946005222 - ] - }, - "StringValues": { - "original_timestamp": [ - "2023-11-29 17:05:27" - ] - }, - "BinaryValues": {}, - "TagValues": { - "printer": [ - "Printer 72" - ] - } + "hotend_temperature": 249.52922614294954, + "bed_temperature": 110.12854118355098, + "ambient_temperature": 38.70099292962708, + "fluctuated_ambient_temperature": 38.70099292962708, + "timestamp": "2024-04-16T17:07:03.717628", + "original_timestamp": "2024-04-16T17:07:03.717628", + "printer": "Printer 33" } ``` @@ -91,15 +69,15 @@ Review the code, you'll see that data is generated for each printer, and each pr tasks = [] printer_data = generate_data() -# Distribute all printers over the data length -delay_seconds = int(os.environ['datalength']) / replay_speed / number_of_printers +# Distribute all printers over the data length (defaults to 60 seconds) +delay_seconds = get_data_length() / replay_speed / number_of_printers for i in range(number_of_printers): - # Set stream ID or leave parameters empty to get stream ID generated. + # Set MessageKey/StreamID or leave parameters empty to get a generated message key. name = f"Printer {i + 1}" # We don't want a Printer 0, so start at 1 # Start sending data, each printer will start with some delay after the previous one - tasks.append(asyncio.create_task(generate_data_and_close_stream_async(topic_producer, name, printer_data.copy(), delay_seconds * i))) + tasks.append(asyncio.create_task(generate_data_async(topic, producer, name, printer_data.copy(), int(delay_seconds * i)))) await asyncio.gather(*tasks) ``` diff --git a/docs/tutorials/predictive-maintenance/downsampling.md b/docs/tutorials/predictive-maintenance/downsampling.md index f69397c5..92b75920 100644 --- a/docs/tutorials/predictive-maintenance/downsampling.md +++ b/docs/tutorials/predictive-maintenance/downsampling.md @@ -4,47 +4,60 @@ This service reduces the sampling rate of data from one per second to one per mi ![Downsampling pipeline segment](./images/downsampling-pipeline-segment.png) -The service uses a buffer to buffer data for one minute before releasing. +Data is aggreagted using a 10 second tumbling window: ``` python -# buffer 1 minute of data -buffer_configuration = qx.TimeseriesBufferConfiguration() -buffer_configuration.time_span_in_milliseconds = 1 * 60 * 1000 +# create a tumbling window of 10 seconds +# use the reducer and initializer configured above +# get the 'final' values for the window once the window is closed. +sdf = ( + sdf.tumbling_window(timedelta(seconds=10)) + .reduce(reducer=reducer, initializer=initializer) + .final() +) ``` -During the buffering the data is aggregated in the dataframe handler: +The initializer and reducxer are shown here: ``` python -def on_dataframe_received_handler(originating_stream: qx.StreamConsumer, df: pd.DataFrame): - if originating_stream.properties.name is not None and stream_producer.properties.name is None: - stream_producer.properties.name = originating_stream.properties.name + "-down-sampled" - - # Identify numeric and string columns - numeric_columns = [col for col in df.columns if not col.startswith('TAG__') and - col not in ['time', 'timestamp', 'original_timestamp', 'date_time']] - string_columns = [col for col in df.columns if col.startswith('TAG__')] - - # Create an aggregation dictionary for numeric columns - numeric_aggregation = {col: 'mean' for col in numeric_columns} - - # Create an aggregation dictionary for string columns (keeping the last value) - string_aggregation = {col: 'last' for col in string_columns} - - # Merge the two aggregation dictionaries - aggregation_dict = {**numeric_aggregation, **string_aggregation} - - df["timestamp"] = pd.to_datetime(df["timestamp"]) - - # resample and get the mean of the input data - df = df.set_index("timestamp").resample('1min').agg(aggregation_dict).reset_index() - - # Send filtered data to output topic - stream_producer.timeseries.buffer.publish(df) +def reducer(state: dict, value: dict) -> dict: + """ + 'reducer' will be called for every message except the first. + We add the values to sum them so we can later divide by the + count to get an average. + """ + + state['sum_hotend_temperature'] += value['hotend_temperature'] + state['sum_bed_temperature'] += value['bed_temperature'] + state['sum_ambient_temperature'] += value['ambient_temperature'] + state['sum_fluctuated_ambient_temperature'] += value['fluctuated_ambient_temperature'] + state['sum_count'] += 1 + return state + +def initializer(value: dict) -> dict: + """ + 'initializer' will be called only for the first message. + This is the time to create and initialize the state for + use in the reducer funciton. + """ + + return { + 'sum_hotend_temperature': value['hotend_temperature'], + 'sum_bed_temperature': value['bed_temperature'], + 'sum_ambient_temperature': value['ambient_temperature'], + 'sum_fluctuated_ambient_temperature': value['fluctuated_ambient_temperature'], + 'sum_timestamp': value['timestamp'], + 'sum_original_timestamp': value['original_timestamp'], + 'sum_printer': value['printer'], + 'sum_count': 1 + } ``` -The aggregated data is published to the output stream (one stream for each printer). +The result is tyhat the mean is calculated for the temperatures over the period of the tumbling window. + +The aggregated data is published to the output topic. -The output topic for the service is `downsampled-3d-printer-data`. Other services such as the Forecast service, and the InfluxDB raw data storage service subscribe to this topic. +The output topic for the service is `json-downsampled-3d-printer-data`. Other services such as the Forecast service, and the InfluxDB raw data storage service subscribe to this topic. ## πŸƒβ€β™€οΈ Next step diff --git a/docs/tutorials/predictive-maintenance/forecast-service.md b/docs/tutorials/predictive-maintenance/forecast-service.md index fc960137..d5b40209 100644 --- a/docs/tutorials/predictive-maintenance/forecast-service.md +++ b/docs/tutorials/predictive-maintenance/forecast-service.md @@ -4,7 +4,7 @@ Generates a forecast for the temperature data received from the input topic. Thi ![Forecast pipeline segment](./images/forecast-pipeline-segment.png) -The forecast is made using the downsampled data as the input, and using the scikit-learn library. The forecasts are published to the `forecast` topic. The Alert service and Printers dashboard service both subscribe to this topic. +The forecast is made using the downsampled data as the input, and using the scikit-learn library. The forecasts are published to the `json-forecast` topic. The Alert service subscribes to this topic. ## Data format @@ -12,34 +12,8 @@ The forecast data format is: ```json { - "Epoch": 0, - "Timestamps": [ - 1701284880000000000, - 1701284940000000000, - 1701285000000000000, - ... - 1701313620000000000 - ], - "NumericValues": { - "forecast_fluctuated_ambient_temperature": [ - 42.35418149532191, - 42.43955555085827, - 42.52524883234062, - ... - 119.79365961797913 - ] - }, - "StringValues": {}, - "BinaryValues": {}, - "TagValues": { - "printer": [ - "Printer 19-down-sampled", - "Printer 19-down-sampled", - "Printer 19-down-sampled", - ... - "Printer 19-down-sampled" - ] - } + "timestamp": "2024-04-16 18:03:20", + "forecast": 72.21788743081183 } ``` @@ -48,7 +22,7 @@ The forecast data format is: The work of the prediction is carried out by the `scikit-learn` library, using a quadratic polynomial (second order) linear regression algorithm: ``` python -forecast_input = df[parameter_name] +forecast_input = list(map(lambda row: row["mean_fluctuated_ambient_temperature"], rows)) # Define the degree of the polynomial regression model degree = 2 @@ -59,8 +33,17 @@ model.fit(np.array(range(len(forecast_input))).reshape(-1, 1), forecast_input) # Forecast the future values forecast_array = np.array(range(len(forecast_input), len(forecast_input) + forecast_length)).reshape(-1, 1) forecast_values = model.predict(forecast_array) -# Create a DataFrame for the forecast -fcast = pd.DataFrame(forecast_values, columns=[forecast_label]) + +result = [] +timestamp = rows[-1]["timestamp"] + +for value in forecast_values: + timestamp += 60 * 1000 + result.append({ + "timestamp": timestamp, + "forecast": float(value) + }) +return result ``` ## πŸƒβ€β™€οΈ Next step diff --git a/docs/tutorials/predictive-maintenance/get-project.md b/docs/tutorials/predictive-maintenance/get-project.md index 5ebebd04..31642e7f 100644 --- a/docs/tutorials/predictive-maintenance/get-project.md +++ b/docs/tutorials/predictive-maintenance/get-project.md @@ -21,7 +21,7 @@ You'll need to configure the following credentials for each Quix service that ne | `INFLUXDB_MEASUREMENT_NAME` | The InfluxDB measurement to read data from. If not specified, the name of the output topic will be used. | | `TIMESTAMP_COLUMN` | This is the field in your data that represents the timestamp in nanoseconds. If you leave this blank, the message timestamp received from the broker is used. Case sensitive. Optional. | | `CONSUMER_GROUP_NAME` | The name of the consumer group to use when consuming from Kafka. | -| `bearer_token` | Printers dashboard | A [PAT](../../develop/authentication/personal-access-token.md) that the web app uses to authenticate the Streaming Reader and Streaming Writer APIs. | + The above is a list of environment variables that you are going to configure. @@ -35,7 +35,9 @@ To create the secret: 3. In the `Secrets management` dialog, click `+ New secret` and use this to create the `INFLUXDB_TOKEN` secret, and assign it to the `INFLUXDB_TOKEN` variable. + 4. Also create a secret for `bearer_token` - the value will be a PAT. You can learn how to generate a PAT [here](../../develop/authentication/personal-access-token.md). +--> These secrets are then assigned to their corresponding environment variables. diff --git a/docs/tutorials/predictive-maintenance/influxdb-alerts.md b/docs/tutorials/predictive-maintenance/influxdb-alerts.md index acd808f4..e33bef6f 100644 --- a/docs/tutorials/predictive-maintenance/influxdb-alerts.md +++ b/docs/tutorials/predictive-maintenance/influxdb-alerts.md @@ -4,7 +4,7 @@ This service uses the standard Quix InfluxDB 3.0 [connector](../../connectors/in ![InfluxDB raw data pipeline segment](./images/influxdb-alerts-pipeline-segment.png) -In this pipeline the connector subscribes to the `alerts` topic, and writes these messages into InfluxDB for permanent storage. +In this pipeline the connector subscribes to the `json-alerts` topic, and writes these messages into InfluxDB for permanent storage. ## Query the data in InfluxDB @@ -17,4 +17,5 @@ Explore the table of data to ensure you are familiar with the data stored. ## πŸƒβ€β™€οΈ Next step -[Part 8 - Printers dashboard :material-arrow-right-circle:{ align=right }](./printers-dashboard.md) + +[Part 8 - Summary :material-arrow-right-circle:{ align=right }](./summary.md) diff --git a/docs/tutorials/predictive-maintenance/influxdb-raw-data.md b/docs/tutorials/predictive-maintenance/influxdb-raw-data.md index b9ed9c8c..8909028e 100644 --- a/docs/tutorials/predictive-maintenance/influxdb-raw-data.md +++ b/docs/tutorials/predictive-maintenance/influxdb-raw-data.md @@ -4,7 +4,7 @@ This service uses the standard Quix InfluxDB 3.0 [connector](../../connectors/in ![InfluxDB raw data pipeline segment](./images/influxdb-raw-data-pipeline-segment.png) -In this pipeline the connector subscribes to the `downsampled-3d-printer-data` topic, and writes these messages into InfluxDB for permanent storage. +In this pipeline the connector subscribes to the `json-downsampled-3d-printer-data` topic, and writes these messages into InfluxDB for permanent storage. ## Query the data in InfluxDB diff --git a/docs/tutorials/predictive-maintenance/overview.md b/docs/tutorials/predictive-maintenance/overview.md index a06d00f4..23baab8a 100644 --- a/docs/tutorials/predictive-maintenance/overview.md +++ b/docs/tutorials/predictive-maintenance/overview.md @@ -4,7 +4,7 @@ In this tutorial you learn about a project template that demonstrates real-time This tutorial uses the [Quix predictive maintenance template project](https://github.com/quixio/template-predictive-maintenance){target=_blank}. -![Predictive maintenance pipeline](./images/predictive-maintenance-pipeline.png) + You'll fork the complete project from GitHub, and then create a Quix project from the forked repo, so you have a copy of the full application code running in your Quix account. You then examine the data flow through the project's pipeline, using tools provided by Quix. @@ -33,11 +33,13 @@ Some of the technologies used by this template project are listed here. * [scikit-learn](https://scikit-learn.org/stable/){target=_blank} * [InfluxDB](https://www.influxdata.com/products/influxdb-cloud/serverless/){target=_blank} + ## GitHub repository @@ -55,6 +57,7 @@ To get started make sure you have a [free Quix account](https://portal.platform. You'll need a [free InfluxDB](https://www.influxdata.com/products/influxdb-cloud/serverless/){target=_blank} account to try this out in your Quix account. + ### Git provider @@ -71,80 +75,6 @@ You also need to have a Git account. This could be GitHub, Bitbucket, GitLab, or While this tutorial uses an external Git account, Quix can also provide a Quix-hosted Git solution using Gitea for your own projects. You can watch a video on [how to create a project using Quix-hosted Git](https://www.loom.com/share/b4488be244834333aec56e1a35faf4db?sid=a9aa124a-a2b0-45f1-a756-11b4395d0efc){target=_blank}. -??? "Transcript" - - 0:01 Hi there, welcome to this video on creating a quick project. I should point out before I get into this video that I'm using the beta development environment at Quix and so what you see may be slightly different when you're testing Quicks. - - 0:21 Okay, so having said that I'll get straight into creating a new project. You can see this button over here. I'm going to click that to create a new project. - - 0:33 The new workflow in Quix starts with creating a project. Everything is done inside a project. Generally speaking, I'm simplifying it quite a lot. - - 0:44 A project corresponds to a Git repository. So everything that you create, including your pipeline configuration and so on, will be stored in a Git repository. - - 0:59 So I'll give my project a name and I'll call it video project. And the first thing I need to do is specify where this Git repository is going to be. - - 1:12 Now, the simplest option is to get quick to do all the work for you and just create the Git repository. - - 1:21 And the second thing I need to do is a product called gitty to host the Git repository. So we can do that for you. - - 1:27 It all happens in the background. And as you'll see in later videos, you have complete control about what happens in that repository. - - 1:37 You can review pull requests and do, merges and all of those normal things that you would do in a Git workflow. - - 1:47 The other option is to use an external provider. So for example, you could use GitLab, GitHub, Bitbucket and so on. - - 1:57 You can use any provider that supports the use of an SS. H key. For the purposes of this video, I'm going to keep it simple for now and just use the Quix manage Git. - - 2:11 So I'm now ready to create the project. Now every project will have at least one environment and usually several environments. - - 2:28 So what is an environment? An environment roughly corresponds to a branch in your Git repository, but there's a specific for an environment. - - 2:41 As well as you'll see as we go through this dialogue. But the first thing we need to do is give our environment a name. - - 2:52 So I'm going to name it. Based on the typical development workflow, so usually we'd have production. Branch or environment and you might have staging and development. - - 3:09 So I'm going to create the production environment and as I was saying earlier, that's going to roughly correspond to a branch. - - 3:17 So in this next section, we're going to specify the, ,branch that this environment corresponds to and I want production to correspond to the main branch. - - 3:30 And so there's nothing really I need to do here. I could create a new branch if I wanted to, ,but I want to use the default branch which is main. - - 3:41 The other, thing that I can do here is protect this branch and what that means is that prevents developers from merging or committing content, making changes directly to the main branch. - - 3:58 In order to change the main branch you'll have to raise, a pull request, a git pull request and that would have to be reviewed and approved and then merged in the usual way. - - 4:13 So we definitely want that because for production we don't want changes being made directly. So I'm protecting that and you'll, We'll see later how we go about taking changes that we've made and say the develop branch or the dev branch and merge those into the main branch. - - 4:32 I'll cover that in another video. So for now let's just click continue. Now for each environment that you create you can specify how you want to host Kafka. - - 4:45 Now as you probably know Quix uses Kafka as its broker. And you have several options here. You if you want the simplest option and most convenient option is to just let Quix do all the hard work for you. - - 5:01 We will create them. Kafka broker and you don't need to do any configuration. We just do it all for you. - - 5:08 We make sure that scales nicely as well. We use Kubernetes and you know as I say it's the least. It's it's the quickest option especially. - - 5:22 If you're testing things out, but there are other options as you can see here. You can use your own self hosted Kafka or you can connect to a Confluent Cloud and we'll cover those options in later videos. - - 5:39 So for now, I'm just going to go with the simplest option and click. Continue. The other thing that you'll need to do here is specify the storage option for the environment. - - 5:51 So the key thing to point out at this point is we were talking about environments as corresponding to a branch in get. - - 6:02 That is true, but it's also these other things like the Kafka options that you select and also the storage options. - - 6:11 Now the storage option that you're selecting here is for any data that you might possess. So in Quix it's possible to persist the data that's published to topics. - - 6:27 You can store that using our something called the data catalog and if you store, if you persist your topics and store the messages that are in the topics. - - 6:43 In our storage facilities, then there's a small charge associated with that. However, you don't have to persist topics. You can use external database solutions for storage of your data. - - 7:01 So you can use. Some of our standard connectors or even write your own connector to connect to more or less any database technology that you want. - - 7:11 The other thing that's stored here is metadata associated with messages. So for now, I'm just going to choose the standard option and create. - - 7:23 The environment. Okay, that's it. We'll wait for the project and the environment to be created. And then we'll have a look at what's in there in subsequent videos. - - 7:42 Okay, thanks for watching. And see you in the next video. - ## The pipeline There are several *main* stages in the [pipeline](https://portal.platform.quix.io/pipeline?token=pat-7381f57aaee34adf95382c3a60df6306&workspace=demo-predictivemaintenance-production){target=_blank}: @@ -155,7 +85,7 @@ There are several *main* stages in the [pipeline](https://portal.platform.quix.i 4. *Alerts* - triggers alerts if thresholds are exceeded for current temperature data, and forecast data. 5. *InfluxDB - raw data* - writes the downsampled data to InfluxDB for permanent storage. 6. *InfluxDB - alerts* - writes the alert messages to InfluxDB for permanent storage. -7. *Printers dashboard* - dipslays the temperature data for the specified data, including predicted ambient (enclosure) temperature. + More details are provided on all these services later in the tutorial. @@ -165,10 +95,10 @@ The following Kafka topics are present in the project: | Topic | Description | Producer service | Consumer service(s) |---|---|---|---| -| `3d-printer-data` | The generated 3D printer temperature data | Data generator | Downsampling, Printers dahsboard | -| `downsampled-3d-printer-data` | Down samples the data from 1 second to 1 minute | Downsampling | Forecast, InfluxDB raw data | -| `forecast` | Forecast temperature | Forecast | Alert, Printers dashboard | -| `alerts` | Temperature alert | Alert | Printers dashboard, InfluxDB alerts | +| `json-3d-printer-data` | The generated 3D printer temperature data | Data generator | Downsampling, | +| `json-downsampled-3d-printer-data` | Down samples the data from 1 second to 1 minute | Downsampling | Forecast, InfluxDB raw data | +| `json-forecast` | Forecast temperature | Forecast | Alert | +| `json-alerts` | Temperature alert | Alert | InfluxDB alerts | ## The parts of the tutorial @@ -188,11 +118,12 @@ This tutorial is divided up into several parts, to make it a more manageable lea 7. [InfluxDB - alerts](./influxdb-alerts.md) - you see how InfluxDB is used to permanently store alert messages. -8. [Printers dashboard](./printers-dashboard.md) - you learn how Streaming Reader API can enable your web app to subscribe to messages published to Quix topics. + -9. [Lab: add phone alerts](./phone-alerts.md) - you add a phone alerts service to your pipeline, using the Pushover service, and the Quix ready-to-use Pushover connector. + -10. [Summary](summary.md) - in this concluding part you are presented with a summary of the work you have completed, and also some next steps for more advanced learning about Quix. +8. [Summary](summary.md) - in this concluding part you are presented with a summary of the work you have completed, and also some next steps for more advanced learning about Quix. ## πŸƒβ€β™€οΈ Next step diff --git a/docs/tutorials/predictive-maintenance/phone-alerts.md b/docs/tutorials/predictive-maintenance/phone-alerts.md index c5046dd0..0ce4dbbf 100644 --- a/docs/tutorials/predictive-maintenance/phone-alerts.md +++ b/docs/tutorials/predictive-maintenance/phone-alerts.md @@ -56,33 +56,26 @@ To add a filter transformation service: You'll now edit the code to implement the functionality you require. -Modify your input topic to be `alerts`, and the output topic to be a new topic `ambient-alerts` (you'll need to use the `New topic` button to create this). +Modify your input topic to be `json-alerts`, and the output topic to be a new topic `ambient-alerts` (you'll need to use the `New topic` button to create this). You're only interested in forecast over temperature alerts, which have the message format: ``` json -[ - { - "Timestamp": 1701348695725335000, - "Tags": { - "TAG__printer": "Printer 11-down-sampled - Forecast" - }, - "Id": "over-forecast", - "Value": "{\"status\": \"over-forecast\", \"parameter_name\": \"forecast_fluctuated_ambient_temperature\", \"alert_temperature\": 55.00501762733715, \"alert_timestamp\": 1.70137236e+18, \"message\": \"'Ambient temperature' is forecasted to go over 55\\u00baC in 6:34:24.\"}" - } -] +{ + "status": "over-forecast", + "parameter_name": "ambient_temperature", + "alert_temperature": 72.82717847141305, + "timestamp": "2024-04-16 10:42:40", + "message": "'Ambient temperature' is forecasted to go over 73ΒΊC at 2024-04-16 10:42:40." +} ``` -So you are looking for an `Id` of `over-forecast`. +So you are looking for an `status` of `over-forecast`. You only want to publish the alert message to the output topic if it has the correct ID. Modify the event handler to the following: ``` python -def on_event_data_received_handler(stream_consumer: qx.StreamConsumer, data: qx.EventData): - if data.id == 'over-forecast': - print(data) - stream_producer = topic_producer.get_or_create_stream(stream_id = stream_consumer.stream_id) - stream_producer.events.publish(data) +# if status == over-forecast then send alert ``` Commit your changes. @@ -97,6 +90,7 @@ Now that you've modified the code, you'll now test your changes: 3. Once the `Alerts Filter` service is running, you are ready to proceed to the next step. + ## Add the Pushover destination In this section you create a destination service that sends a push notification to your phone when an alert condition occurs. @@ -135,25 +129,6 @@ You will shortly start receiving Pushover notifications on your phone, as shown ![Pushover Notification Example](./images/pushover-notification.png){width=60%} -If you want to change the message received you can edit the `quix_function.py` code in the Pushover destination: - -``` python -# Callback triggered for each new event -def on_event_data_handler(self, stream_consumer: qx.StreamConsumer, data: qx.EventData): - print(data) - - # send your push message - try: - pushmsg = {'token': self.apitoken, - 'user': self.userkey, - 'message': 'An event has been detected'} - requests.post(self.baseurl, json = pushmsg) - except Exception as e: - print(f"Error connecting to push API: {e}") -``` - -Change "An event has been detected" to anything you like. - !!! note In Pushover this application was named "3D Printers". @@ -213,5 +188,6 @@ Your new service builds and starts in the Tutorial environment, where you can no ## πŸƒβ€β™€οΈ Next step -[Part 10 - Summary :material-arrow-right-circle:{ align=right }](summary.md) + +[Part 9 - Summary :material-arrow-right-circle:{ align=right }](summary.md) diff --git a/docs/tutorials/predictive-maintenance/summary.md b/docs/tutorials/predictive-maintenance/summary.md index 61da5d4d..00e35085 100644 --- a/docs/tutorials/predictive-maintenance/summary.md +++ b/docs/tutorials/predictive-maintenance/summary.md @@ -16,9 +16,11 @@ In addition, you have seen how in Quix you can: Here are some suggested next steps to continue on your Quix learning journey: * Build something with our [Code Samples](../../develop/code-samples.md). + + ## Get help diff --git a/mkdocs.yml b/mkdocs.yml index 23877ef3..63bca72b 100644 --- a/mkdocs.yml +++ b/mkdocs.yml @@ -175,18 +175,18 @@ nav: - '4. Add InfluxDB v3 destination': 'tutorials/influxdb-migration/influxdb-destination.md' - '5. Summary': 'tutorials/influxdb-migration/summary.md' - 'Vector Store Embeddings': 'tutorials/ingest-embeddings/continuously_ingest_documents_into_a_vector_store_using_apache_kafka.md' - # - 'Predictive maintenance': - # - 'Overview': 'tutorials/predictive-maintenance/overview.md' - # - '1. Get the project': 'tutorials/predictive-maintenance/get-project.md' - # - '2. Data generator': 'tutorials/predictive-maintenance/data-generator.md' - # - '3. Downsampling': 'tutorials/predictive-maintenance/downsampling.md' - # - '4. Forecast': 'tutorials/predictive-maintenance/forecast-service.md' - # - '5. Alerts': 'tutorials/predictive-maintenance/alert-service.md' - # - '6. InfluxDB - raw data': 'tutorials/predictive-maintenance/influxdb-raw-data.md' - # - '7. InfluxDB - alerts': 'tutorials/predictive-maintenance/influxdb-alerts.md' + - 'Predictive maintenance': + - 'Overview': 'tutorials/predictive-maintenance/overview.md' + - '1. Get the project': 'tutorials/predictive-maintenance/get-project.md' + - '2. Data generator': 'tutorials/predictive-maintenance/data-generator.md' + - '3. Downsampling': 'tutorials/predictive-maintenance/downsampling.md' + - '4. Forecast': 'tutorials/predictive-maintenance/forecast-service.md' + - '5. Alerts': 'tutorials/predictive-maintenance/alert-service.md' + - '6. InfluxDB - raw data': 'tutorials/predictive-maintenance/influxdb-raw-data.md' + - '7. InfluxDB - alerts': 'tutorials/predictive-maintenance/influxdb-alerts.md' # - '8. Printers dashboard': 'tutorials/predictive-maintenance/printers-dashboard.md' - # - '9. Lab: Add phone alerts': 'tutorials/predictive-maintenance/phone-alerts.md' - # - '10. Summary': 'tutorials/predictive-maintenance/summary.md' + # - '8. Lab: Add phone alerts': 'tutorials/predictive-maintenance/phone-alerts.md' + - '8. Summary': 'tutorials/predictive-maintenance/summary.md' # - 'Clickstream analysis': # - 'Overview': 'tutorials/clickstream/overview.md' # - '1. Get the project': 'tutorials/clickstream/get-project.md' From 5ff52ee02aefde262fa3c9ee6279a58656d66a04 Mon Sep 17 00:00:00 2001 From: tbedford Date: Wed, 17 Apr 2024 09:47:53 +0100 Subject: [PATCH 2/3] [fix] - put transcript inside tip --- docs/tutorials/computer-vision/overview.md | 74 +++++++++--------- .../predictive-maintenance/overview.md | 78 ++++++++++++++++++- docs/tutorials/sentiment-analysis/overview.md | 78 ++++++++++--------- 3 files changed, 154 insertions(+), 76 deletions(-) diff --git a/docs/tutorials/computer-vision/overview.md b/docs/tutorials/computer-vision/overview.md index 3d51df24..c06bf736 100644 --- a/docs/tutorials/computer-vision/overview.md +++ b/docs/tutorials/computer-vision/overview.md @@ -88,79 +88,79 @@ You also need to have a Git account. This could be GitHub, Bitbucket, GitLab, or While this tutorial uses an external Git account, Quix can also provide a Quix-hosted Git solution using Gitea for your own projects. You can watch a video on [how to create a project using Quix-hosted Git](https://www.loom.com/share/b4488be244834333aec56e1a35faf4db?sid=a9aa124a-a2b0-45f1-a756-11b4395d0efc){target=_blank}. -??? "Transcript" + ??? "Transcript" - 0:01 Hi there, welcome to this video on creating a quick project. I should point out before I get into this video that I'm using the beta development environment at Quix and so what you see may be slightly different when you're testing Quicks. + 0:01 Hi there, welcome to this video on creating a quick project. I should point out before I get into this video that I'm using the beta development environment at Quix and so what you see may be slightly different when you're testing Quicks. - 0:21 Okay, so having said that I'll get straight into creating a new project. You can see this button over here. I'm going to click that to create a new project. + 0:21 Okay, so having said that I'll get straight into creating a new project. You can see this button over here. I'm going to click that to create a new project. - 0:33 The new workflow in Quix starts with creating a project. Everything is done inside a project. Generally speaking, I'm simplifying it quite a lot. + 0:33 The new workflow in Quix starts with creating a project. Everything is done inside a project. Generally speaking, I'm simplifying it quite a lot. - 0:44 A project corresponds to a Git repository. So everything that you create, including your pipeline configuration and so on, will be stored in a Git repository. + 0:44 A project corresponds to a Git repository. So everything that you create, including your pipeline configuration and so on, will be stored in a Git repository. - 0:59 So I'll give my project a name and I'll call it video project. And the first thing I need to do is specify where this Git repository is going to be. + 0:59 So I'll give my project a name and I'll call it video project. And the first thing I need to do is specify where this Git repository is going to be. - 1:12 Now, the simplest option is to get quick to do all the work for you and just create the Git repository. + 1:12 Now, the simplest option is to get quick to do all the work for you and just create the Git repository. - 1:21 And the second thing I need to do is a product called gitty to host the Git repository. So we can do that for you. + 1:21 And the second thing I need to do is a product called gitty to host the Git repository. So we can do that for you. - 1:27 It all happens in the background. And as you'll see in later videos, you have complete control about what happens in that repository. + 1:27 It all happens in the background. And as you'll see in later videos, you have complete control about what happens in that repository. - 1:37 You can review pull requests and do, merges and all of those normal things that you would do in a Git workflow. + 1:37 You can review pull requests and do, merges and all of those normal things that you would do in a Git workflow. - 1:47 The other option is to use an external provider. So for example, you could use GitLab, GitHub, Bitbucket and so on. + 1:47 The other option is to use an external provider. So for example, you could use GitLab, GitHub, Bitbucket and so on. - 1:57 You can use any provider that supports the use of an SS. H key. For the purposes of this video, I'm going to keep it simple for now and just use the Quix manage Git. + 1:57 You can use any provider that supports the use of an SS. H key. For the purposes of this video, I'm going to keep it simple for now and just use the Quix manage Git. - 2:11 So I'm now ready to create the project. Now every project will have at least one environment and usually several environments. + 2:11 So I'm now ready to create the project. Now every project will have at least one environment and usually several environments. - 2:28 So what is an environment? An environment roughly corresponds to a branch in your Git repository, but there's a specific for an environment. + 2:28 So what is an environment? An environment roughly corresponds to a branch in your Git repository, but there's a specific for an environment. - 2:41 As well as you'll see as we go through this dialogue. But the first thing we need to do is give our environment a name. + 2:41 As well as you'll see as we go through this dialogue. But the first thing we need to do is give our environment a name. - 2:52 So I'm going to name it. Based on the typical development workflow, so usually we'd have production. Branch or environment and you might have staging and development. + 2:52 So I'm going to name it. Based on the typical development workflow, so usually we'd have production. Branch or environment and you might have staging and development. - 3:09 So I'm going to create the production environment and as I was saying earlier, that's going to roughly correspond to a branch. + 3:09 So I'm going to create the production environment and as I was saying earlier, that's going to roughly correspond to a branch. - 3:17 So in this next section, we're going to specify the, ,branch that this environment corresponds to and I want production to correspond to the main branch. + 3:17 So in this next section, we're going to specify the, ,branch that this environment corresponds to and I want production to correspond to the main branch. - 3:30 And so there's nothing really I need to do here. I could create a new branch if I wanted to, ,but I want to use the default branch which is main. + 3:30 And so there's nothing really I need to do here. I could create a new branch if I wanted to, ,but I want to use the default branch which is main. - 3:41 The other, thing that I can do here is protect this branch and what that means is that prevents developers from merging or committing content, making changes directly to the main branch. + 3:41 The other, thing that I can do here is protect this branch and what that means is that prevents developers from merging or committing content, making changes directly to the main branch. - 3:58 In order to change the main branch you'll have to raise, a pull request, a git pull request and that would have to be reviewed and approved and then merged in the usual way. + 3:58 In order to change the main branch you'll have to raise, a pull request, a git pull request and that would have to be reviewed and approved and then merged in the usual way. - 4:13 So we definitely want that because for production we don't want changes being made directly. So I'm protecting that and you'll, We'll see later how we go about taking changes that we've made and say the develop branch or the dev branch and merge those into the main branch. + 4:13 So we definitely want that because for production we don't want changes being made directly. So I'm protecting that and you'll, We'll see later how we go about taking changes that we've made and say the develop branch or the dev branch and merge those into the main branch. - 4:32 I'll cover that in another video. So for now let's just click continue. Now for each environment that you create you can specify how you want to host Kafka. + 4:32 I'll cover that in another video. So for now let's just click continue. Now for each environment that you create you can specify how you want to host Kafka. - 4:45 Now as you probably know Quix uses Kafka as its broker. And you have several options here. You if you want the simplest option and most convenient option is to just let Quix do all the hard work for you. + 4:45 Now as you probably know Quix uses Kafka as its broker. And you have several options here. You if you want the simplest option and most convenient option is to just let Quix do all the hard work for you. - 5:01 We will create them. Kafka broker and you don't need to do any configuration. We just do it all for you. + 5:01 We will create them. Kafka broker and you don't need to do any configuration. We just do it all for you. - 5:08 We make sure that scales nicely as well. We use Kubernetes and you know as I say it's the least. It's it's the quickest option especially. + 5:08 We make sure that scales nicely as well. We use Kubernetes and you know as I say it's the least. It's it's the quickest option especially. - 5:22 If you're testing things out, but there are other options as you can see here. You can use your own self hosted Kafka or you can connect to a Confluent Cloud and we'll cover those options in later videos. + 5:22 If you're testing things out, but there are other options as you can see here. You can use your own self hosted Kafka or you can connect to a Confluent Cloud and we'll cover those options in later videos. - 5:39 So for now, I'm just going to go with the simplest option and click. Continue. The other thing that you'll need to do here is specify the storage option for the environment. + 5:39 So for now, I'm just going to go with the simplest option and click. Continue. The other thing that you'll need to do here is specify the storage option for the environment. - 5:51 So the key thing to point out at this point is we were talking about environments as corresponding to a branch in get. + 5:51 So the key thing to point out at this point is we were talking about environments as corresponding to a branch in get. - 6:02 That is true, but it's also these other things like the Kafka options that you select and also the storage options. + 6:02 That is true, but it's also these other things like the Kafka options that you select and also the storage options. - 6:11 Now the storage option that you're selecting here is for any data that you might possess. So in Quix it's possible to persist the data that's published to topics. + 6:11 Now the storage option that you're selecting here is for any data that you might possess. So in Quix it's possible to persist the data that's published to topics. - 6:27 You can store that using our something called the data catalog and if you store, if you persist your topics and store the messages that are in the topics. + 6:27 You can store that using our something called the data catalog and if you store, if you persist your topics and store the messages that are in the topics. - 6:43 In our storage facilities, then there's a small charge associated with that. However, you don't have to persist topics. You can use external database solutions for storage of your data. + 6:43 In our storage facilities, then there's a small charge associated with that. However, you don't have to persist topics. You can use external database solutions for storage of your data. - 7:01 So you can use. Some of our standard connectors or even write your own connector to connect to more or less any database technology that you want. + 7:01 So you can use. Some of our standard connectors or even write your own connector to connect to more or less any database technology that you want. - 7:11 The other thing that's stored here is metadata associated with messages. So for now, I'm just going to choose the standard option and create. + 7:11 The other thing that's stored here is metadata associated with messages. So for now, I'm just going to choose the standard option and create. - 7:23 The environment. Okay, that's it. We'll wait for the project and the environment to be created. And then we'll have a look at what's in there in subsequent videos. + 7:23 The environment. Okay, that's it. We'll wait for the project and the environment to be created. And then we'll have a look at what's in there in subsequent videos. - 7:42 Okay, thanks for watching. And see you in the next video. + 7:42 Okay, thanks for watching. And see you in the next video. If you want to use the Quix AWS S3 service (optional), you'll need to provide your credentials for accessing AWS S3. diff --git a/docs/tutorials/predictive-maintenance/overview.md b/docs/tutorials/predictive-maintenance/overview.md index 23baab8a..6056f2dd 100644 --- a/docs/tutorials/predictive-maintenance/overview.md +++ b/docs/tutorials/predictive-maintenance/overview.md @@ -73,7 +73,83 @@ Install the Pushover mobile app from the [Apple App store](https://apps.apple.co You also need to have a Git account. This could be GitHub, Bitbucket, GitLab, or any other Git provider you are familar with, and that supports SSH keys. The simplest option is to create a free [GitHub account](){target=_blank}. -While this tutorial uses an external Git account, Quix can also provide a Quix-hosted Git solution using Gitea for your own projects. You can watch a video on [how to create a project using Quix-hosted Git](https://www.loom.com/share/b4488be244834333aec56e1a35faf4db?sid=a9aa124a-a2b0-45f1-a756-11b4395d0efc){target=_blank}. +!!! tip + + While this tutorial uses an external Git account, Quix can also provide a Quix-hosted Git solution using Gitea for your own projects. You can watch a video on [how to create a project using Quix-hosted Git](https://www.loom.com/share/b4488be244834333aec56e1a35faf4db?sid=a9aa124a-a2b0-45f1-a756-11b4395d0efc){target=_blank}. + + ??? "Transcript" + + 0:01 Hi there, welcome to this video on creating a quick project. I should point out before I get into this video that I'm using the beta development environment at Quix and so what you see may be slightly different when you're testing Quicks. + + 0:21 Okay, so having said that I'll get straight into creating a new project. You can see this button over here. I'm going to click that to create a new project. + + 0:33 The new workflow in Quix starts with creating a project. Everything is done inside a project. Generally speaking, I'm simplifying it quite a lot. + + 0:44 A project corresponds to a Git repository. So everything that you create, including your pipeline configuration and so on, will be stored in a Git repository. + + 0:59 So I'll give my project a name and I'll call it video project. And the first thing I need to do is specify where this Git repository is going to be. + + 1:12 Now, the simplest option is to get quick to do all the work for you and just create the Git repository. + + 1:21 And the second thing I need to do is a product called gitty to host the Git repository. So we can do that for you. + + 1:27 It all happens in the background. And as you'll see in later videos, you have complete control about what happens in that repository. + + 1:37 You can review pull requests and do, merges and all of those normal things that you would do in a Git workflow. + + 1:47 The other option is to use an external provider. So for example, you could use GitLab, GitHub, Bitbucket and so on. + + 1:57 You can use any provider that supports the use of an SS. H key. For the purposes of this video, I'm going to keep it simple for now and just use the Quix manage Git. + + 2:11 So I'm now ready to create the project. Now every project will have at least one environment and usually several environments. + + 2:28 So what is an environment? An environment roughly corresponds to a branch in your Git repository, but there's a specific for an environment. + + 2:41 As well as you'll see as we go through this dialogue. But the first thing we need to do is give our environment a name. + + 2:52 So I'm going to name it. Based on the typical development workflow, so usually we'd have production. Branch or environment and you might have staging and development. + + 3:09 So I'm going to create the production environment and as I was saying earlier, that's going to roughly correspond to a branch. + + 3:17 So in this next section, we're going to specify the, ,branch that this environment corresponds to and I want production to correspond to the main branch. + + 3:30 And so there's nothing really I need to do here. I could create a new branch if I wanted to, ,but I want to use the default branch which is main. + + 3:41 The other, thing that I can do here is protect this branch and what that means is that prevents developers from merging or committing content, making changes directly to the main branch. + + 3:58 In order to change the main branch you'll have to raise, a pull request, a git pull request and that would have to be reviewed and approved and then merged in the usual way. + + 4:13 So we definitely want that because for production we don't want changes being made directly. So I'm protecting that and you'll, We'll see later how we go about taking changes that we've made and say the develop branch or the dev branch and merge those into the main branch. + + 4:32 I'll cover that in another video. So for now let's just click continue. Now for each environment that you create you can specify how you want to host Kafka. + + 4:45 Now as you probably know Quix uses Kafka as its broker. And you have several options here. You if you want the simplest option and most convenient option is to just let Quix do all the hard work for you. + + 5:01 We will create them. Kafka broker and you don't need to do any configuration. We just do it all for you. + + 5:08 We make sure that scales nicely as well. We use Kubernetes and you know as I say it's the least. It's it's the quickest option especially. + + 5:22 If you're testing things out, but there are other options as you can see here. You can use your own self hosted Kafka or you can connect to a Confluent Cloud and we'll cover those options in later videos. + + 5:39 So for now, I'm just going to go with the simplest option and click. Continue. The other thing that you'll need to do here is specify the storage option for the environment. + + 5:51 So the key thing to point out at this point is we were talking about environments as corresponding to a branch in get. + + 6:02 That is true, but it's also these other things like the Kafka options that you select and also the storage options. + + 6:11 Now the storage option that you're selecting here is for any data that you might possess. So in Quix it's possible to persist the data that's published to topics. + + 6:27 You can store that using our something called the data catalog and if you store, if you persist your topics and store the messages that are in the topics. + + 6:43 In our storage facilities, then there's a small charge associated with that. However, you don't have to persist topics. You can use external database solutions for storage of your data. + + 7:01 So you can use. Some of our standard connectors or even write your own connector to connect to more or less any database technology that you want. + + 7:11 The other thing that's stored here is metadata associated with messages. So for now, I'm just going to choose the standard option and create. + + 7:23 The environment. Okay, that's it. We'll wait for the project and the environment to be created. And then we'll have a look at what's in there in subsequent videos. + + 7:42 Okay, thanks for watching. And see you in the next video. ## The pipeline diff --git a/docs/tutorials/sentiment-analysis/overview.md b/docs/tutorials/sentiment-analysis/overview.md index 28c21c49..348627df 100644 --- a/docs/tutorials/sentiment-analysis/overview.md +++ b/docs/tutorials/sentiment-analysis/overview.md @@ -70,81 +70,83 @@ If you want to use the Quix BigQuery service (optional), you'll need to provide You also need to have a Git account. This could be GitHub, Bitbucket, GitLab, or any other Git provider you are familar with, and that supports SSH keys. The simplest option is to create a free [GitHub account](https://github.com){target=_blank}. -While this tutorial uses an external Git account, Quix can also provide a Quix-hosted Git solution using Gitea for your own projects. You can watch a video on [how to create a project using Quix-hosted Git](https://www.loom.com/share/b4488be244834333aec56e1a35faf4db?sid=a9aa124a-a2b0-45f1-a756-11b4395d0efc){target=_blank}. +!!! tip -??? "Transcript" + While this tutorial uses an external Git account, Quix can also provide a Quix-hosted Git solution using Gitea for your own projects. You can watch a video on [how to create a project using Quix-hosted Git](https://www.loom.com/share/b4488be244834333aec56e1a35faf4db?sid=a9aa124a-a2b0-45f1-a756-11b4395d0efc){target=_blank}. - 0:01 Hi there, welcome to this video on creating a quick project. I should point out before I get into this video that I'm using the beta development environment at Quix and so what you see may be slightly different when you're testing Quicks. + ??? "Transcript" - 0:21 Okay, so having said that I'll get straight into creating a new project. You can see this button over here. I'm going to click that to create a new project. + 0:01 Hi there, welcome to this video on creating a quick project. I should point out before I get into this video that I'm using the beta development environment at Quix and so what you see may be slightly different when you're testing Quicks. - 0:33 The new workflow in Quix starts with creating a project. Everything is done inside a project. Generally speaking, I'm simplifying it quite a lot. + 0:21 Okay, so having said that I'll get straight into creating a new project. You can see this button over here. I'm going to click that to create a new project. - 0:44 A project corresponds to a Git repository. So everything that you create, including your pipeline configuration and so on, will be stored in a Git repository. + 0:33 The new workflow in Quix starts with creating a project. Everything is done inside a project. Generally speaking, I'm simplifying it quite a lot. - 0:59 So I'll give my project a name and I'll call it video project. And the first thing I need to do is specify where this Git repository is going to be. + 0:44 A project corresponds to a Git repository. So everything that you create, including your pipeline configuration and so on, will be stored in a Git repository. - 1:12 Now, the simplest option is to get quick to do all the work for you and just create the Git repository. + 0:59 So I'll give my project a name and I'll call it video project. And the first thing I need to do is specify where this Git repository is going to be. - 1:21 And the second thing I need to do is a product called gitty to host the Git repository. So we can do that for you. + 1:12 Now, the simplest option is to get quick to do all the work for you and just create the Git repository. - 1:27 It all happens in the background. And as you'll see in later videos, you have complete control about what happens in that repository. + 1:21 And the second thing I need to do is a product called gitty to host the Git repository. So we can do that for you. - 1:37 You can review pull requests and do, merges and all of those normal things that you would do in a Git workflow. + 1:27 It all happens in the background. And as you'll see in later videos, you have complete control about what happens in that repository. - 1:47 The other option is to use an external provider. So for example, you could use GitLab, GitHub, Bitbucket and so on. + 1:37 You can review pull requests and do, merges and all of those normal things that you would do in a Git workflow. - 1:57 You can use any provider that supports the use of an SS. H key. For the purposes of this video, I'm going to keep it simple for now and just use the Quix manage Git. + 1:47 The other option is to use an external provider. So for example, you could use GitLab, GitHub, Bitbucket and so on. - 2:11 So I'm now ready to create the project. Now every project will have at least one environment and usually several environments. + 1:57 You can use any provider that supports the use of an SS. H key. For the purposes of this video, I'm going to keep it simple for now and just use the Quix manage Git. - 2:28 So what is an environment? An environment roughly corresponds to a branch in your Git repository, but there's a specific for an environment. + 2:11 So I'm now ready to create the project. Now every project will have at least one environment and usually several environments. - 2:41 As well as you'll see as we go through this dialogue. But the first thing we need to do is give our environment a name. + 2:28 So what is an environment? An environment roughly corresponds to a branch in your Git repository, but there's a specific for an environment. - 2:52 So I'm going to name it. Based on the typical development workflow, so usually we'd have production. Branch or environment and you might have staging and development. + 2:41 As well as you'll see as we go through this dialogue. But the first thing we need to do is give our environment a name. - 3:09 So I'm going to create the production environment and as I was saying earlier, that's going to roughly correspond to a branch. + 2:52 So I'm going to name it. Based on the typical development workflow, so usually we'd have production. Branch or environment and you might have staging and development. - 3:17 So in this next section, we're going to specify the, ,branch that this environment corresponds to and I want production to correspond to the main branch. + 3:09 So I'm going to create the production environment and as I was saying earlier, that's going to roughly correspond to a branch. - 3:30 And so there's nothing really I need to do here. I could create a new branch if I wanted to, ,but I want to use the default branch which is main. + 3:17 So in this next section, we're going to specify the, ,branch that this environment corresponds to and I want production to correspond to the main branch. - 3:41 The other, thing that I can do here is protect this branch and what that means is that prevents developers from merging or committing content, making changes directly to the main branch. + 3:30 And so there's nothing really I need to do here. I could create a new branch if I wanted to, ,but I want to use the default branch which is main. - 3:58 In order to change the main branch you'll have to raise, a pull request, a git pull request and that would have to be reviewed and approved and then merged in the usual way. + 3:41 The other, thing that I can do here is protect this branch and what that means is that prevents developers from merging or committing content, making changes directly to the main branch. - 4:13 So we definitely want that because for production we don't want changes being made directly. So I'm protecting that and you'll, We'll see later how we go about taking changes that we've made and say the develop branch or the dev branch and merge those into the main branch. + 3:58 In order to change the main branch you'll have to raise, a pull request, a git pull request and that would have to be reviewed and approved and then merged in the usual way. - 4:32 I'll cover that in another video. So for now let's just click continue. Now for each environment that you create you can specify how you want to host Kafka. + 4:13 So we definitely want that because for production we don't want changes being made directly. So I'm protecting that and you'll, We'll see later how we go about taking changes that we've made and say the develop branch or the dev branch and merge those into the main branch. - 4:45 Now as you probably know Quix uses Kafka as its broker. And you have several options here. You if you want the simplest option and most convenient option is to just let Quix do all the hard work for you. + 4:32 I'll cover that in another video. So for now let's just click continue. Now for each environment that you create you can specify how you want to host Kafka. - 5:01 We will create them. Kafka broker and you don't need to do any configuration. We just do it all for you. + 4:45 Now as you probably know Quix uses Kafka as its broker. And you have several options here. You if you want the simplest option and most convenient option is to just let Quix do all the hard work for you. - 5:08 We make sure that scales nicely as well. We use Kubernetes and you know as I say it's the least. It's it's the quickest option especially. + 5:01 We will create them. Kafka broker and you don't need to do any configuration. We just do it all for you. - 5:22 If you're testing things out, but there are other options as you can see here. You can use your own self hosted Kafka or you can connect to a Confluent Cloud and we'll cover those options in later videos. + 5:08 We make sure that scales nicely as well. We use Kubernetes and you know as I say it's the least. It's it's the quickest option especially. - 5:39 So for now, I'm just going to go with the simplest option and click. Continue. The other thing that you'll need to do here is specify the storage option for the environment. + 5:22 If you're testing things out, but there are other options as you can see here. You can use your own self hosted Kafka or you can connect to a Confluent Cloud and we'll cover those options in later videos. - 5:51 So the key thing to point out at this point is we were talking about environments as corresponding to a branch in get. + 5:39 So for now, I'm just going to go with the simplest option and click. Continue. The other thing that you'll need to do here is specify the storage option for the environment. - 6:02 That is true, but it's also these other things like the Kafka options that you select and also the storage options. + 5:51 So the key thing to point out at this point is we were talking about environments as corresponding to a branch in get. - 6:11 Now the storage option that you're selecting here is for any data that you might possess. So in Quix it's possible to persist the data that's published to topics. + 6:02 That is true, but it's also these other things like the Kafka options that you select and also the storage options. - 6:27 You can store that using our something called the data catalog and if you store, if you persist your topics and store the messages that are in the topics. + 6:11 Now the storage option that you're selecting here is for any data that you might possess. So in Quix it's possible to persist the data that's published to topics. - 6:43 In our storage facilities, then there's a small charge associated with that. However, you don't have to persist topics. You can use external database solutions for storage of your data. + 6:27 You can store that using our something called the data catalog and if you store, if you persist your topics and store the messages that are in the topics. - 7:01 So you can use. Some of our standard connectors or even write your own connector to connect to more or less any database technology that you want. + 6:43 In our storage facilities, then there's a small charge associated with that. However, you don't have to persist topics. You can use external database solutions for storage of your data. - 7:11 The other thing that's stored here is metadata associated with messages. So for now, I'm just going to choose the standard option and create. + 7:01 So you can use. Some of our standard connectors or even write your own connector to connect to more or less any database technology that you want. - 7:23 The environment. Okay, that's it. We'll wait for the project and the environment to be created. And then we'll have a look at what's in there in subsequent videos. + 7:11 The other thing that's stored here is metadata associated with messages. So for now, I'm just going to choose the standard option and create. - 7:42 Okay, thanks for watching. And see you in the next video. + 7:23 The environment. Okay, that's it. We'll wait for the project and the environment to be created. And then we'll have a look at what's in there in subsequent videos. + + 7:42 Okay, thanks for watching. And see you in the next video. ## The pipeline From 63899100caa5acd817eb747fe0ef1ea27fe45fc5 Mon Sep 17 00:00:00 2001 From: tbedford Date: Mon, 22 Apr 2024 14:55:36 +0100 Subject: [PATCH 3/3] [chore] - template moving --- docs/tutorials/predictive-maintenance/get-project.md | 2 +- docs/tutorials/predictive-maintenance/overview.md | 10 +--------- 2 files changed, 2 insertions(+), 10 deletions(-) diff --git a/docs/tutorials/predictive-maintenance/get-project.md b/docs/tutorials/predictive-maintenance/get-project.md index 31642e7f..ba572835 100644 --- a/docs/tutorials/predictive-maintenance/get-project.md +++ b/docs/tutorials/predictive-maintenance/get-project.md @@ -1,6 +1,6 @@ # Get the project -While you can see the [deployed project running in Quix](https://portal.platform.quix.io/pipeline?token=pat-7381f57aaee34adf95382c3a60df6306&workspace=demo-predictivemaintenance-production){target=_blank}, it can be useful to learn how to get a project up and running in your own Quix account. +Here you learn how to copy the template project. Once you have the project running in your Quix account, you can modify the project as required, and save your changes to your copy of the project. diff --git a/docs/tutorials/predictive-maintenance/overview.md b/docs/tutorials/predictive-maintenance/overview.md index 6056f2dd..c8a70d0b 100644 --- a/docs/tutorials/predictive-maintenance/overview.md +++ b/docs/tutorials/predictive-maintenance/overview.md @@ -8,14 +8,6 @@ This tutorial uses the [Quix predictive maintenance template project](https://gi You'll fork the complete project from GitHub, and then create a Quix project from the forked repo, so you have a copy of the full application code running in your Quix account. You then examine the data flow through the project's pipeline, using tools provided by Quix. -
-See the project running in Quix - -See the deployed project - -
-
- ## Technologies used Some of the technologies used by this template project are listed here. @@ -153,7 +145,7 @@ You also need to have a Git account. This could be GitHub, Bitbucket, GitLab, or ## The pipeline -There are several *main* stages in the [pipeline](https://portal.platform.quix.io/pipeline?token=pat-7381f57aaee34adf95382c3a60df6306&workspace=demo-predictivemaintenance-production){target=_blank}: +There are several *main* stages in the pipeline: 1. *Data generator* - generates the temperature data for a fleet of 3D printers. 2. *Downsampling* - downsamples data from one second to one minute.