diff --git a/develop-docs/application/architecture.mdx b/develop-docs/application/architecture.mdx
index 24464de12f880..995d2c95e5ae5 100644
--- a/develop-docs/application/architecture.mdx
+++ b/develop-docs/application/architecture.mdx
@@ -9,28 +9,28 @@ Edges represent service dependencies.
```mermaid
graph TD
-app[Your Application] --> |SDK| lb{{Load Balancer}}
-lb --> |"sentry.example.com/api/\d+/store/"| relay
-lb --> |"sentry.example.com"| sentry_web["Sentry (web)"]
-symbolicator --> sentry_web
-relay --> kafka
-relay --> redis
-sentry_web --> snuba
-sentry_web --> memcached
-sentry_web --> postgres
-sentry_web --> redis
-snuba --> kafka
-snuba --> redis
-snuba --> clickhouse
-kafka --> zookeeper
-sentry_web --> sentry_worker["Sentry (worker)"]
-sentry_worker --> memcached
-sentry_worker --> redis
-sentry_worker --> postgres
-sentry_worker --> symbolicator
-
-click snuba "https://github.com/getsentry/snuba" "Snuba Documentation"
-click relay "https://github.com/getsentry/relay" "Relay Documentation"
+ app[Your Application] --> |SDK| lb{{Load Balancer}}
+ lb --> |"sentry.example.com/api/\d+/store/"| relay
+ lb --> |"sentry.example.com"| sentry_web["Sentry (web)"]
+ symbolicator --> sentry_web
+ relay --> kafka
+ relay --> redis
+ sentry_web --> snuba
+ sentry_web --> memcached
+ sentry_web --> postgres
+ sentry_web --> redis
+ snuba --> kafka
+ snuba --> redis
+ snuba --> clickhouse
+ kafka --> zookeeper
+ sentry_web --> sentry_worker["Sentry (worker)"]
+ sentry_worker --> memcached
+ sentry_worker --> redis
+ sentry_worker --> postgres
+ sentry_worker --> symbolicator
+
+ click snuba "https://github.com/getsentry/snuba" "Snuba Documentation"
+ click relay "https://github.com/getsentry/relay" "Relay Documentation"
```
## Event pipeline
@@ -49,26 +49,25 @@ For more information read [Path of an event through Relay](https://getsentry.git
```mermaid
graph TD
-
-app[Your application] --> |sends crashes| lb{{nginx}}
-lb --> |/api/n/store/| relay
-relay --> kafka[(Ingest Kafka)]
-kafka --> ingest-consumer["Sentry ingest consumer"]
-ingest-consumer --> preprocess-event
-
-subgraph celery["Sentry celery tasks"]
- preprocess-event --> save-event
- preprocess-event --> process-event
- preprocess-event --> symbolicate-event
- symbolicate-event --> process-event
- process-event --> save-event
- save-event --> snuba-kafka[("Snuba Kafka
(eventstream)")]
-end
-
-subgraph snuba["Snuba"]
- snuba-kafka --> snuba-consumer["Snuba consumers"]
- snuba-consumer --> clickhouse[("Clickhouse")]
-end
+ app[Your application] --> |sends crashes| lb{{nginx}}
+ lb --> |/api/n/store/| relay
+ relay --> kafka[(Ingest Kafka)]
+ kafka --> ingest-consumer["Sentry ingest consumer"]
+ ingest-consumer --> preprocess-event
+
+ subgraph celery["Sentry celery tasks"]
+ preprocess-event --> save-event
+ preprocess-event --> process-event
+ preprocess-event --> symbolicate-event
+ symbolicate-event --> process-event
+ process-event --> save-event
+ save-event --> snuba-kafka[("Snuba Kafka
(eventstream)")]
+ end
+
+ subgraph snuba["Snuba"]
+ snuba-kafka --> snuba-consumer["Snuba consumers"]
+ snuba-consumer --> clickhouse[("Clickhouse")]
+ end
```
## Multi-Region
@@ -116,23 +115,23 @@ In addition to the siloed modes, there also exists a **Monolith** mode. In monol
```mermaid
flowchart TD
-
-ui[Frontend UI] --> usr
-ui --> cs
-ui --> eur
-
-
-subgraph usr [US Region]
- usapi[US Sentry API] --> uspg[(US Postgres)]
- usapi --> used[(EU Event Data)]
-end
-subgraph cs [Control Silo]
- capi[Control Silo Sentry API] --> cpg[(Control Postgres)]
-end
-subgraph eur [EU Region]
- euapi[EU Sentry API] --> eupg[(EU Postgres)]
- euapi --> eued[(EU Event Data)]
-end
+ ui[Frontend UI] --> usr
+ ui --> cs
+ ui --> eur
+
+ subgraph usr [US Region]
+ usapi[US Sentry API] --> uspg[(US Postgres)]
+ usapi --> used[(EU Event Data)]
+ end
+
+ subgraph cs [Control Silo]
+ capi[Control Silo Sentry API] --> cpg[(Control Postgres)]
+ end
+
+ subgraph eur [EU Region]
+ euapi[EU Sentry API] --> eupg[(EU Postgres)]
+ euapi --> eued[(EU Event Data)]
+ end
```
Each region silo can be scaled independently, and is isolated from other regions. Within each region exists separate, dedicated infrastructure and applications as outlined in the [application overview](/architecture/#high-level-overview).
diff --git a/develop-docs/application/control-silo.mdx b/develop-docs/application/control-silo.mdx
index 1f611f83abfaf..fb25ce57bca48 100644
--- a/develop-docs/application/control-silo.mdx
+++ b/develop-docs/application/control-silo.mdx
@@ -44,23 +44,22 @@ Integrations can be shared by multiple organizations. For example, a single MS t
```mermaid
sequenceDiagram
-
-actor RegionSilo
-actor ControlSilo
-actor MsTeams
-
-RegionSilo ->> RegionSilo: send issue alert
-RegionSilo ->> ControlSilo : send issue alert to msteams
-ControlSilo ->> ControlSilo : load integration metadata
-ControlSilo ->> ControlSilo : add integration credentials to request
-ControlSilo ->> MsTeams : send notification request with credentials
-MsTeams -->> ControlSilo : token expired!
-ControlSilo ->> MsTeams : refresh token
-MsTeams -->> ControlSilo : response
-ControlSilo ->> ControlSilo : update stored token
-ControlSilo ->> MsTeams : send notification with new token
-MsTeams -->> ControlSilo : response
-ControlSilo -->> RegionSilo : response
+ actor RegionSilo
+ actor ControlSilo
+ actor MsTeams
+
+ RegionSilo ->> RegionSilo: send issue alert
+ RegionSilo ->> ControlSilo : send issue alert to msteams
+ ControlSilo ->> ControlSilo : load integration metadata
+ ControlSilo ->> ControlSilo : add integration credentials to request
+ ControlSilo ->> MsTeams : send notification request with credentials
+ MsTeams -->> ControlSilo : token expired!
+ ControlSilo ->> MsTeams : refresh token
+ MsTeams -->> ControlSilo : response
+ ControlSilo ->> ControlSilo : update stored token
+ ControlSilo ->> MsTeams : send notification with new token
+ MsTeams -->> ControlSilo : response
+ ControlSilo -->> RegionSilo : response
```
The integration proxy is implemented as a class that integrations requiring refresh tokens can sub-class. Currently the following integrations use the integration credential proxy:
@@ -141,13 +140,12 @@ For each mailbox with undelivered messages, we select a block of messages, and u
```mermaid
sequenceDiagram
+ participant Control Silo
-participant Control Silo
-
-Control Silo ->> Control Silo : find mailboxes with messages
-Control Silo ->> Control Silo : filter out mailboxes with future delivery
-Control Silo ->> Control Silo : update next delivery time for the next block
-Control Silo ->> Control Silo : Spawn task to drain a mailbox
+ Control Silo ->> Control Silo : find mailboxes with messages
+ Control Silo ->> Control Silo : filter out mailboxes with future delivery
+ Control Silo ->> Control Silo : update next delivery time for the next block
+ Control Silo ->> Control Silo : Spawn task to drain a mailbox
```
### Webhook Delivery
@@ -158,21 +156,21 @@ Draining a mailbox involves sending as many messages from a given mailbox as we
```mermaid
sequenceDiagram
-participant Control Silo
-participant Region Silo
-
-Control Silo ->> Control Silo : Fetch head block of mailbox
-loop foreach message
- Control Silo ->> Control Silo : if hook is above max attempts delete.
- Control Silo ->> Region Silo : Deliver hook
- alt success
- Region Silo -->> Control Silo : 200-40x
- Control Silo ->> Control Silo : delete hook
- else error
- Region Silo -->> Control Silo : 50x
- Control Silo ->> Control Silo : increment attempt and reschedule
- end
-end
+ participant Control Silo
+ participant Region Silo
+
+ Control Silo ->> Control Silo : Fetch head block of mailbox
+ loop foreach message
+ Control Silo ->> Control Silo : if hook is above max attempts delete.
+ Control Silo ->> Region Silo : Deliver hook
+ alt success
+ Region Silo -->> Control Silo : 200-40x
+ Control Silo ->> Control Silo : delete hook
+ else error
+ Region Silo -->> Control Silo : 50x
+ Control Silo ->> Control Silo : increment attempt and reschedule
+ end
+ end
```
Notably, most 40x errors are considered 'successful' deliveries. The reason for this is that re-attempting a delivery of a webhook that initially had a 40x response will generally not result in a 200.
diff --git a/develop-docs/application/cross-region-replication.mdx b/develop-docs/application/cross-region-replication.mdx
index 1772e959e719a..18c04f0fe1e13 100644
--- a/develop-docs/application/cross-region-replication.mdx
+++ b/develop-docs/application/cross-region-replication.mdx
@@ -41,8 +41,21 @@ User, Organization, and Membership deletions are the most common form of cross r
The flow for removing a user is
-
-[diagram source](https://mermaid.live/edit#pako:eNplkTFrwzAQhf-K0BYcD203Q7O0ayHEdNMiSxdHIN2l0qk0hPz3ntMUYme703vf49A7a0cedKcLfFVAB-_Bjtkmg7YyYU0DZIOyOaasPsu0HW3m4MLRIqs3Qs4UVR8izZUdjIHwJhicUNVuNs0MUZ3yEIFB1UkP_vXp-cXgzCLQktlBou8b06hiZabKA_2oBKXYEZYRktFerxd4OD3Kq-b-XjFt6xBDOTzE3ruuoXOsn05hSkORz1va29XCvQNH6EKcIXqtE-Rkg5dWzgaVMpoPkMDoTkYPe1sjG23wItappf6ETnecK6x1PXrL_yXqbm9jkVfwQer7-Gv6WvjlF-fzsOs)
+```mermaid
+sequenceDiagram
+ autonumber
+
+ actor User
+ participant Control Silo
+ participant Region Silo
+
+ User ->>+ Control Silo : delete user id=123
+ Control Silo ->> Control Silo : Remove user + save outbox message
+ Control Silo -->>- User : bye
+ Control Silo --)+ Region Silo : Publish outbox message
+ Region Silo ->>- Region Silo : Save tombstone
+ Region Silo --) Region Silo : Reconcile tombstone
+```
In step 5 and 6 of the above diagram we reconcile the tombstone changes with the rest of the data in the region. Tombstones needs to be reconciled for each relation that the removed record had. For example, removing a user will:
diff --git a/develop-docs/application/dynamic-sampling/outcomes.mdx b/develop-docs/application/dynamic-sampling/outcomes.mdx
index 75819a02da826..6bd7f8f1aafc8 100644
--- a/develop-docs/application/dynamic-sampling/outcomes.mdx
+++ b/develop-docs/application/dynamic-sampling/outcomes.mdx
@@ -15,23 +15,23 @@ For a sampled transaction:
```mermaid
flowchart LR
- SDK --transaction--> Relay
- Relay --transaction--> DS[Dynamic Sampling]
- Relay --transaction_indexed--> DS
- DS --transaction-->Store
- DS --transaction_indexed-->Store
+ SDK --transaction--> Relay
+ Relay --transaction--> DS[Dynamic Sampling]
+ Relay --transaction_indexed--> DS
+ DS --transaction-->Store
+ DS --transaction_indexed-->Store
```
For a transaction filtered by dynamic sampling:
```mermaid
flowchart LR
- SDK --transaction--> Relay
- Relay --transaction--> DS[Dynamic Sampling]
- Relay --transaction_indexed--> DS
- DS --transaction-->Store
- DS --transaction_indexed-->Reject
- style Reject stroke:#f00
+ SDK --transaction--> Relay
+ Relay --transaction--> DS[Dynamic Sampling]
+ Relay --transaction_indexed--> DS
+ DS --transaction-->Store
+ DS --transaction_indexed-->Reject
+ style Reject stroke:#f00
```
## Data Types
diff --git a/develop-docs/application/feedback-architecture.mdx b/develop-docs/application/feedback-architecture.mdx
index 9a5816f4bf91b..2cde9c7fea727 100644
--- a/develop-docs/application/feedback-architecture.mdx
+++ b/develop-docs/application/feedback-architecture.mdx
@@ -76,12 +76,12 @@ event[”contexts”][”feedback”] = {
```mermaid
graph TD
- app[Your Application] --> |"envelope"| Relay
- Relay --> f_consumer([ingest-feedback-events])
- f_consumer --> |"queues as celery task"| create_feedback_issue
- create_feedback_issue --> o_consumer(["ingest-occurrences"])
- o_consumer --> nodestore[(Nodestore)]
- o_consumer --> |"EventStream"| snuba[("Snuba/Clickhouse")]
+ app[Your Application] --> |"envelope"| Relay
+ Relay --> f_consumer([ingest-feedback-events])
+ f_consumer --> |"queues as celery task"| create_feedback_issue
+ create_feedback_issue --> o_consumer(["ingest-occurrences"])
+ o_consumer --> nodestore[(Nodestore)]
+ o_consumer --> |"EventStream"| snuba[("Snuba/Clickhouse")]
```
In Relay v24.5.1, we migrated feedback to its own kafka topic + consumer,
@@ -123,36 +123,36 @@ user_report = {
```mermaid
graph TD
- subgraph Sentry
- report_endpoint["/user-feedback"]
- crash_report_modal["Crash Report View"]
- end
-
- subgraph functions[Functions - run in referrer]
- save_userreport
- shim_to_feedback
- create_feedback_issue
- save_userreport --> |"IF event processed"| shim_to_feedback
- shim_to_feedback --> create_feedback_issue
- end
-
- %% envelope pipeline
- app[Your Application] --> |"envelope"| Relay
- Relay --> a_topic([ingest-attachments])
- a_topic --> save_userreport
-
- %% endpoint and crash reports
- app --> |"POST"| report_endpoint --> save_userreport
- app --> |"POST"| crash_report_modal --> save_userreport
-
- %% issues platform
- create_feedback_issue --> o_consumer(["ingest-occurrences"])
- o_consumer --> nodestore[(Nodestore)]
- o_consumer --> |"EventStream"| snuba[(Snuba/Clickhouse)]
-
- %% user report saves/updates
- save_userreport --> postgres[("Postgres")]
- snuba --> |"queries eventstore"| save_userreport
+ subgraph Sentry
+ report_endpoint["/user-feedback"]
+ crash_report_modal["Crash Report View"]
+ end
+
+ subgraph functions[Functions - run in referrer]
+ save_userreport
+ shim_to_feedback
+ create_feedback_issue
+ save_userreport --> |"IF event processed"| shim_to_feedback
+ shim_to_feedback --> create_feedback_issue
+ end
+
+ %% envelope pipeline
+ app[Your Application] --> |"envelope"| Relay
+ Relay --> a_topic([ingest-attachments])
+ a_topic --> save_userreport
+
+ %% endpoint and crash reports
+ app --> |"POST"| report_endpoint --> save_userreport
+ app --> |"POST"| crash_report_modal --> save_userreport
+
+ %% issues platform
+ create_feedback_issue --> o_consumer(["ingest-occurrences"])
+ o_consumer --> nodestore[(Nodestore)]
+ o_consumer --> |"EventStream"| snuba[(Snuba/Clickhouse)]
+
+ %% user report saves/updates
+ save_userreport --> postgres[("Postgres")]
+ snuba --> |"queries eventstore"| save_userreport
```
@@ -175,10 +175,10 @@ Simplified diagram:
```mermaid
graph TD
- shim_to_feedback --> |"..."| snuba[(Snuba/Clickhouse)]
- snuba --> |"queries eventstore"| pp_job
- pp_job["errors post process job"] <--> |"queries/updates"| postgres[("Postgres")]
- pp_job --> shim_to_feedback
+ shim_to_feedback --> |"..."| snuba[(Snuba/Clickhouse)]
+ snuba --> |"queries eventstore"| pp_job
+ pp_job["errors post process job"] <--> |"queries/updates"| postgres[("Postgres")]
+ pp_job --> shim_to_feedback
```
@@ -225,21 +225,21 @@ Crash reports are also shimmed to feedback. The pipeline is the same as the
```mermaid
graph TD
- subgraph Storages
- postgres[("Postgres (UserReports)")]
- snuba[(Snuba/Clickhouse)]
- end
-
- subgraph Sentry
- issues_endpoint["/issues"]
- issues_endpoint --> |"GET"| ui["Feedback UI (/feedback)"]
- report_endpoint["/user-feedback"]
- report_endpoint --> |"GET"| report_ui["Issue Details UI > User Feedback"]
- issues_endpoint --> |"GET"| report_ui
- end
-
- postgres --> |"queries"| report_endpoint
- snuba --> |"queries"| issues_endpoint
+ subgraph Storages
+ postgres[("Postgres (UserReports)")]
+ snuba[(Snuba/Clickhouse)]
+ end
+
+ subgraph Sentry
+ issues_endpoint["/issues"]
+ issues_endpoint --> |"GET"| ui["Feedback UI (/feedback)"]
+ report_endpoint["/user-feedback"]
+ report_endpoint --> |"GET"| report_ui["Issue Details UI > User Feedback"]
+ issues_endpoint --> |"GET"| report_ui
+ end
+
+ postgres --> |"queries"| report_endpoint
+ snuba --> |"queries"| issues_endpoint
```
You can view the user reports related to a specific issue at the "User Feedback"
diff --git a/docs/contributing/pages/charts-diagrams.mdx b/docs/contributing/pages/charts-diagrams.mdx
new file mode 100644
index 0000000000000..87ff4fb62e9f7
--- /dev/null
+++ b/docs/contributing/pages/charts-diagrams.mdx
@@ -0,0 +1,23 @@
+---
+title: Charts & Diagrams
+noindex: true
+sidebar_order: 30
+---
+
+You can render charts and diagrams in you MDX files using [Mermaid](https://mermaid.js.org):
+
+````txt {tabTitle:Example}
+```mermaid
+flowchart TD
+ Start --> Stop
+```
+````
+
+Will turn into this:
+
+```mermaid
+flowchart TD
+ Start --> Stop
+```
+
+See the [Mermaid documentation](https://mermaid.js.org/intro/) to learn what types of charts and diagrams can be rendered.
diff --git a/docs/contributing/pages/components.mdx b/docs/contributing/pages/components.mdx
index 389967d259708..38a68dedef70a 100644
--- a/docs/contributing/pages/components.mdx
+++ b/docs/contributing/pages/components.mdx
@@ -1,6 +1,7 @@
---
title: MDX Components
noindex: true
+sidebar_order: 40
---
## Alert
diff --git a/docs/contributing/pages/frontmatter.mdx b/docs/contributing/pages/frontmatter.mdx
index ec897f596a558..1a8c2171971ec 100644
--- a/docs/contributing/pages/frontmatter.mdx
+++ b/docs/contributing/pages/frontmatter.mdx
@@ -1,6 +1,7 @@
---
title: Frontmatter
noindex: true
+sidebar_order: 10
---
Frontmatter a YAML-formatted blob defined at the top of every markdown or mdx file:
@@ -55,5 +56,3 @@ Overrides the next page shown in the bottom pagination navigation.
`previousPage` (`{ path: 'path/to/page', title: 'Page Title' }`)
Overrides the previous page shown in the bottom pagination navigation.
-
-
diff --git a/docs/contributing/pages/images.mdx b/docs/contributing/pages/images.mdx
index b79a4df53c42c..df4a071b7cf03 100644
--- a/docs/contributing/pages/images.mdx
+++ b/docs/contributing/pages/images.mdx
@@ -1,6 +1,7 @@
---
title: Images
noindex: true
+sidebar_order: 20
---
If you want to add images to your docs, you can add them in a `./img` folder in the same directory as your `.mdx` files.
diff --git a/docs/contributing/pages/redirects.mdx b/docs/contributing/pages/redirects.mdx
index a2f91550dd058..ebc01fdf4148f 100644
--- a/docs/contributing/pages/redirects.mdx
+++ b/docs/contributing/pages/redirects.mdx
@@ -1,6 +1,7 @@
---
title: Redirects
noindex: true
+sidebar_order: 60
---
Redirects allow you to automatically redirect an incoming request path to a new destination path. When you move or rename a file, you should make sure to set up a redirect from the old path to the new path, so that the old URL still takes users to the right place.
diff --git a/docs/contributing/pages/search.mdx b/docs/contributing/pages/search.mdx
index cd15df617c2c9..8e1496f840af6 100644
--- a/docs/contributing/pages/search.mdx
+++ b/docs/contributing/pages/search.mdx
@@ -1,6 +1,7 @@
---
title: Search
noindex: true
+sidebar_order: 70
---
Search is powered by Algolia, and will index all content in `/docs/` that is Markdown or MDX formatted.
diff --git a/docs/contributing/pages/variables.mdx b/docs/contributing/pages/variables.mdx
index 97e80e043ea7c..aeaf8648bdab3 100644
--- a/docs/contributing/pages/variables.mdx
+++ b/docs/contributing/pages/variables.mdx
@@ -1,6 +1,7 @@
---
title: Markdown Variables
noindex: true
+sidebar_order: 50
---
A transformation is exposed to both Markdown and MDX files which supports processing variables in a Django/Jekyll-style way.
diff --git a/src/components/mermaid.tsx b/src/components/mermaid.tsx
index f906985b187b5..dfaabe8599129 100644
--- a/src/components/mermaid.tsx
+++ b/src/components/mermaid.tsx
@@ -1,9 +1,11 @@
'use client';
import {useEffect} from 'react';
+import {useTheme} from 'next-themes';
export default function Mermaid() {
+ const theme = useTheme();
useEffect(() => {
- (async function () {
+ const renderMermaid = async () => {
const escapeHTML = (str: string) => {
return str.replace(/[&<>"']/g, function (match) {
const escapeMap = {
@@ -22,7 +24,10 @@ export default function Mermaid() {
return;
}
const {default: mermaid} = await import('mermaid');
- mermaid.initialize({startOnLoad: false});
+ mermaid.initialize({
+ startOnLoad: false,
+ theme: theme.resolvedTheme === 'light' ? 'default' : 'dark',
+ });
mermaidBlocks.forEach(block => {
// get rid of code highlighting
const code = block.textContent ?? '';
@@ -35,7 +40,8 @@ export default function Mermaid() {
}
});
await mermaid.run({nodes: document.querySelectorAll('.language-mermaid')});
- })();
- }, []);
+ };
+ renderMermaid();
+ }, [theme]);
return null;
}