diff --git a/docs/extensions-plugins/azure-data-studio.mdx b/docs/extensions-plugins/azure-data-studio.mdx index a8596656f..898ca722d 100644 --- a/docs/extensions-plugins/azure-data-studio.mdx +++ b/docs/extensions-plugins/azure-data-studio.mdx @@ -135,7 +135,7 @@ This setting enables automatic copying for generated Pieces links. If enabled, y If you choose to enable auto-save, Pieces will automatically save highly re-used and potentially useful snippets while you work. This cuts down on the number of snippets you need to save manually and helps to prevent searching for code in all of your files. ### Cloud Capabilities -Here you can choose which type of ML models you'd like to use in Pieces for Azure Data Studio: Cloud, Local, or Blended. To learn more about the types of ML models and their privacy implications, [visit this section of our documentation](/product-highlights-and-benefits/privacy-security-data#our-machine-learning-models). +Here you can choose which type of ML models you'd like to use in Pieces for Azure Data Studio: Cloud, Local, or Blended. To learn more about the types of ML models and their privacy implications, [visit this section of our documentation](/privacy-and-security#our-machine-learning-models). ### Display In this section, you can choose what aspect of your snippets to display in the snippet list. diff --git a/docs/extensions-plugins/visual-studio.mdx b/docs/extensions-plugins/visual-studio.mdx index e00ebeb7a..dcfcac54f 100644 --- a/docs/extensions-plugins/visual-studio.mdx +++ b/docs/extensions-plugins/visual-studio.mdx @@ -113,7 +113,7 @@ Easily modify tags, links, content, or delete assets directly within the Pieces This setting enables automatic copying for generated Pieces links. If enabled, you won't need to copy a Pieces link before sharing it with a teammate or friend. ### Cloud Capabilities -Here you can choose which type of ML models you'd like to use in Pieces for Visual Studio: Cloud, Local, or Blended. To learn more about the types of ML models and their privacy implications, [visit this section of our documentation](/product-highlights-and-benefits/privacy-security-data#our-machine-learning-models). +Here you can choose which type of ML models you'd like to use in Pieces for Visual Studio: Cloud, Local, or Blended. To learn more about the types of ML models and their privacy implications, [visit this section of our documentation](/privacy-and-security#our-machine-learning-models). ### CodeLens To enable or disable Pieces CodeLens, go to `Tools > Options > Pieces > User Preferences` and check the `Enabled` option for CodeLens. diff --git a/docs/extensions-plugins/vscode.mdx b/docs/extensions-plugins/vscode.mdx index 878e03a85..3da8b682a 100644 --- a/docs/extensions-plugins/vscode.mdx +++ b/docs/extensions-plugins/vscode.mdx @@ -163,7 +163,7 @@ This setting enables automatic copying for generated Pieces links. If enabled, y If you choose to enable auto-save, Pieces will automatically save highly re-used and potentially useful snippets while you work. This cuts down on the number of snippets you need to save manually and helps to prevent searching for code in all of your files. ### Cloud Capabilities -Here you can choose which type of ML models you'd like to use in Pieces for VS Code: Cloud, Local, or Blended. To learn more about the types of ML models and their privacy implications, [visit this section of our documentation](/product-highlights-and-benefits/privacy-security-data#our-machine-learning-models). +Here you can choose which type of ML models you'd like to use in Pieces for VS Code: Cloud, Local, or Blended. To learn more about the types of ML models and their privacy implications, [visit this section of our documentation](/privacy-and-security#our-machine-learning-models). ### Display In this section, you can choose what aspect of your snippets to display in the snippet list. diff --git a/docs/features/user-settings.mdx b/docs/features/user-settings.mdx index 940a03b2a..639179728 100644 --- a/docs/features/user-settings.mdx +++ b/docs/features/user-settings.mdx @@ -273,12 +273,12 @@ You can choose to use local or blended processing to manage your code snippets. Try both modes to find what works best in your environment. -**Note:** Choosing local-only processing ensures that your code remains solely on your device. Learn more about [Privacy & Data Security](/product-highlights-and-benefits/privacy-security-data) at Pieces to determine which processing mode is best for you. +**Note:** Choosing local-only processing ensures that your code remains solely on your device. Learn more about [Privacy & Security](/privacy-and-security) at Pieces to determine which processing mode is best for you. ## Telemetry You can always adjust the amount of anonymous data that you share with us. By unchecking this box, you ensure that we will not collect data and analytics from your machine. Regardless of collection preferences, we do not collect your code snippets! They are stored on your machine. The data we collect helps us see how often particular actions are taken, what sections of the application are used by the community as a whole, and other data so that we can provide a better experience for you. Personal data is not collected and your code is not shared with any other parties, even when you generate shareable links. -Read more about our [Privacy & Data Security](/product-highlights-and-benefits/privacy-security-data) to get a full look. +Read more about our [Privacy & Security](/privacy-and-security) to get a full look. ## Information To share the version number and specific platform build of your version of the Pieces Desktop App, just press the copy button next to either of these items. These are helpful when working with Support on installation and debugging issues. diff --git a/docs/privacy-and-security.mdx b/docs/privacy-and-security.mdx new file mode 100644 index 000000000..376b7a111 --- /dev/null +++ b/docs/privacy-and-security.mdx @@ -0,0 +1,129 @@ +--- +title: Privacy, Security & Your Data +description: Let's jump in and touch on privacy, data, cloud-based features, and our ML models. +--- + +import CTAButton from "/src/components/CTAButton"; + +From day 1 of building Pieces for Developers, we've had a first principle that everything is local first and built with speed, privacy, security, and offline-productivity in mind. + +We also know that our partners **operate in highly secure and sensitive environments**, and we want to be prepared to operate in such environments (HIPAA, SOC 2, FERPA/COPPA, etc.). That being said - we can't tell you how happy we are that we have stayed true to this philosophy for cases just like yours! + +## We are SOC 2 Compliant 🔒 {#we-are-soc-2-compliant} + +SOC 2 is a prestigious benchmark in the tech industry, especially important for companies like ours. It verifies that we meet strict organizational controls and practices, enhancing our credibility and trustworthiness in the market. + +This achievement is a collective triumph for our team, reflecting our dedication to maintaining a secure and reliable service. It's an essential step forward in our mission to provide world-class developer tools. + + + +## Our Machine Learning Models + +Pieces has our own internal ML models that we use to power some of our features. We have a few different models, and they are all built with privacy and security in mind. + +:::info Local Processing + +Our ML models are designed to function entirely offline and on-device, eliminating the need for internet connectivity. This is possible by opting out of blended processing, as the models are integrated within the application's binary, ensuring seamless operation without requiring an internet connection. + +::: + +If you have **not opted out of blended processing,** then some of the models will offload computation to the cloud. A few of our models are **only** available in the cloud, but we are working on making them local. + +This table shows which models are available locally and which will use cloud compute unless processing is set to local. + +| **Model** | **Local** | **Blended** | +|-------------------------------- |----------- |------------- | +| Code vs Text | ✅ | ✅ | +| Coding language classification | ✅ | ✅ | +| Code Similarity | ✅ | ❌ | +| Description Generation | ✅ | ✅ | +| Framework Detection | ✅ | ✅ | +| Image to text (OCR) | ✅ | ✅ | +| Link Extraction | ✅ | ❌ | +| Neural Code Search | ✅ | ❌ | +| Related Links | 🚧 | ✅ | +| Suggested tags from tags | ✅ | ❌ | +| Suggested Save | ✅ | ❌ | +| Snippet Discovery | ✅ | ❌ | +| Secret Detection | ✅ | ✅ | +| Search Queries | 🚧 | ✅ | +| Tag Generation | ✅ | ✅ | +| Title Generation | ✅ | ✅ | + +Our ML models are not trained continuously. **They do not train on your data** as you use the product. + +## Live Context + +The **Live Context** feature in Pieces enhances the functionality of the Pieces Copilot by utilizing our proprietary Workstream Pattern Engine (WPE). This feature is designed with privacy and efficiency in mind, ensuring that all data processing and storage occur locally on your device. + +### How Live Context Works + +1. **On-Device Processing and Storage:** All WPE algorithms, processing, and storage take place directly on your device. This ensures that your data remains secure and private, without being transmitted over the internet unless necessary. +2. **Querying Local Data:** When Live Context is enabled, and you ask a question to the Copilot, the system queries data aggregated from the WPE. This data is processed entirely on your device to find content that is relevant to your query. +3. **Utilizing Retrieval-Augmented Generation (RAG) for Contextual Relevance:** The relevant content identified by the WPE is then used as context for the Copilot prompt. +4. **Interaction with Language Models (LLM):** + - **Cloud LLM:** If you are using a cloud-based LLM, the data identified as relevant is sent to the cloud LLM for processing. + - **Local LLM:** If you are using a local LLM, the data remains on your device, ensuring that all processing happens locally without any data leaving your device. + +Our ML models are designed to function entirely offline and on-device, eliminating the need for internet connectivity. This is possible by **opting out of blended processing**, as the models are integrated within the application's binary, ensuring seamless operation without requiring an internet connection. We do not train our models continuously on your data. + +The data that we do collect is completely anonymous and highly secure. We also know that our partners operate in highly secure and sensitive environments, and we want to be prepared to operate in such environments (HIPAA, SOC 2, FERPA/COPPA, etc.). + +### Privacy Recommendations + +For users concerned about privacy, we strongly recommend using a Local LLM with the Pieces Copilot. Options include Mistral, Phi, Llama, among other powerful local models. Using a local LLM ensures that all data and processing remain on your device, providing an additional layer of security and privacy. + +### Performance Note + +Please note that results may vary depending on the selected LLM. Each model has its strengths and capabilities, which can influence the effectiveness of the Live Context feature. + +## Saving Code Snippets in the Cloud + +The Pieces cloud is entirely opt-in. Authentication is managed by our enterprise-ready authentication partners at Auth0 (now owned by Okta). + +Even when a user signs in, they do not have a cloud until they specifically connect it in their [settings](/features/user-settings#private-cloud-beta). + +If a user opts into the cloud, the data is only uploaded when something is shared. + +When you click the "Share" icon and create a shareable link, only then is the snippet uploaded and accessible via the cloud. + +Finally, a note on cloud architecture for the things that are backed up: +- There is no centralized database; each user has their own micro-database +- There are no centralized or shared servers +- Each user has their own Cloud Run instance, with their own unique subdomain and their own micro load balancer + +Every user's cloud scales up and down completely independent of other users. The cloud is only running when a shared snippet is being accessed, backed up, or updated, and we can easily port our docker images over to an existing "Panasonic Cloud" if needed. + +## Telemetry & Crash Data + +Most importantly, all data collection is opt-out, and we give all the control to our users. + +The data that we do collect is completely anonymous and highly secure. + +### Why do we collect data? + +Long story short, we're an early-stage startup and the data helps us to report on overall growth and hopefully earn more funding to continue building amazing products for our users. + +Here are some screenshots of Telemetry opt-outs from the Pieces products: + +#### Pieces for Developers Desktop App + +![](/assets/pfd_telemetry.png) + +#### Pieces for IntelliJ + +![](/assets/intellij_telemetry.png) + +#### Pieces for VS Code + +![](/assets/vscode_telemetry.png) + +#### Pieces for Chrome + +![](/assets/chrome_telemetry.png) + +If you have any other questions about privacy and security, please reach out! We want to make choosing Pieces as easy for you as possible. diff --git a/docs/product-highlights-and-benefits/privacy-security-data.mdx b/docs/product-highlights-and-benefits/privacy-security-data.mdx deleted file mode 100644 index b3f598f63..000000000 --- a/docs/product-highlights-and-benefits/privacy-security-data.mdx +++ /dev/null @@ -1,128 +0,0 @@ ---- -title: Privacy, Security & Your Data -description: Let's jump in and touch on privacy, data, cloud-based features, and our ML models. ---- - -# Privacy, Security, & Your Data -From Day 1 of engineering Pieces for Developers, we've had a first principle that everything is local first and built with speed, privacy, security, and offline-productivity in mind. - -We also know that our partners **operate in highly secure and sensitive environments**, and we want to be prepared to operate in such environments, e.g., HIPAA, SOC 2, FERPA/COPPA, etc. That being said - we can't tell you how happy we are that we have stayed true to this philosophy for cases just like yours! - -## Where Snippets are Stored -Your snippets are stored completely locally on your device. This is what the path looks like for an installation on macOS: - -### macOS - -`/Users/[YOUR-USERNAME]/Library/com.pieces.os/` - -### Windows - -`C:/Users/[YOUR-USERNAME]/Documents/com.pieces.os/` - -### Linux - -`/Users/[YOUR-USERNAME]/Documents/com.pieces.os/` - -You can easily copy this directory to a flash drive and bring it to another computer. - - -## We are SOC 2 Compliant! - -![SOC 2 Compliance Badge](/assets/soc-2-badge.png) - -SOC 2 is a prestigious benchmark in the tech industry, especially important for companies like ours. It verifies that we meet strict organizational controls and practices, enhancing our credibility and trustworthiness in the market. - -This achievement is a collective triumph for our team, reflecting our dedication to maintaining a secure and reliable service. It's an essential step forward in our mission to provide word class developer tools. - -## Our Machine Learning Models -Our ML models can be completely local and offline (i.e., they're shipped within the application's binary & require no internet) as long as you opt out of blended processing. - -If you have **not opted out of blended processing,** then some of the models will offload computation to the cloud. A few of our models are **only** available in the cloud, but we are working on making them local. - -This table shows which models are available locally and which will use cloud compute unless processing is set to local. -| **Model** | **Local** | **Blended** | -|-------------------------------- |----------- |------------- | -| Code vs Text | ✅ | ✅ | -| Coding language classification | ✅ | ✅ | -| Code Similarity | ✅ | ❌ | -| Description Generation | ✅ | ✅ | -| Framework Detection | ✅ | ✅ | -| Image to text (OCR) | ✅ | ✅ | -| Link Extraction | ✅ | ❌ | -| Neural Code Search | ✅ | ❌ | -| Related Links | 🚧 | ✅ | -| Suggested tags from tags | ✅ | ❌ | -| Suggested Save | ✅ | ❌ | -| Snippet Discovery | ✅ | ❌ | -| Secret Detection | ✅ | ✅ | -| Search Queries | 🚧 | ✅ | -| Tag Generation | ✅ | ✅ | -| Title Generation | ✅ | ✅ | - - -Our ML models are not trained continuously. They do not train on your data as you use the product! - -## Live Context - -The Live Context feature in Pieces enhances the functionality of the Pieces Copilot by utilizing our proprietary Workstream Pattern Engine (WPE). This feature is designed with privacy and efficiency in mind, ensuring that all data processing and storage occur locally on your device. - -### How Live Context Works - -1. **On-Device Processing and Storage:** All WPE algorithms, processing, and storage take place directly on your device. This ensures that your data remains secure and private, without being transmitted over the internet unless necessary. -2. **Querying Local Data:** When Live Context is enabled, and you ask a question to the Copilot, the system queries data aggregated from the WPE. This data is processed entirely on your device to find content that is relevant to your query. -3. **Utilizing [Retrieval-Augmented Generation (RAG)](/build/glossary/terms/retrieval-augmented-generation) for Contextual Relevance:** The relevant content identified by the WPE is then used as context for the Copilot prompt. -4. **Interaction with Language Models (LLM):** - - **Cloud LLM:** If you are using a cloud-based LLM, the data identified as relevant is sent to the cloud LLM for processing. - - **Local LLM:** If you are using a local LLM, the data remains on your device, ensuring that all processing happens locally without any data leaving your device. - -### Privacy Recommendations - -For users concerned about privacy, we strongly recommend using a Local LLM with the Pieces Copilot. Options include Mistral, Phi-2, Llama2, among others. Using a local LLM ensures that all data and processing remain on your device, providing an additional layer of security and privacy. - -### Performance Note - -Please note that results may vary depending on the selected LLM. Each model has its strengths and capabilities, which can influence the effectiveness of the Live Context feature. - -## Saving Code Snippets in the Cloud -The Pieces cloud is entirely opt-in. Authentication is managed by our enterprise-ready authentication partners at Auth0 (now owned by Okta). - -Even when a user signs in, they do not have a cloud until they specifically connect it in their Settings. - -If a user opts into the cloud, the data is only uploaded when something is shared. - -When you click the "Share" icon and create a shareable link, only then is the snippet uploaded and accessible via the cloud. - -Finally, a note on cloud architecture for the things that are backed up: -- There is no centralized database; each user has their own micro-database -- There are no centralized or shared servers -- Each user has their own Cloud Run instance, with their own unique subdomain and their own micro load balancer - -Every user's cloud scales up and down completely independent of other users. The cloud is only running when a shared snippet is being accessed, backed up, or updated, and we can easily port our docker images over to an existing "Panasonic Cloud" if needed. - -## Telemetry & Crash Data -Most importantly, all data collection is opt-out and we give all the control to our users. - -The data that we do collect is completely anonymous and highly secure (we take data very seriously). - -### Why do we collect data? -Long story short, we're a seed stage startup and the data helps us to report on overall growth and hopefully help us earn more funding. - -Here are some screenshots of Telemetry opt-outs from the Pieces products: - -#### Pieces for Developers Desktop App - -![](/assets/pfd_telemetry.png) - -#### Pieces for IntelliJ - -![](/assets/intellij_telemetry.png) - -#### Pieces for VS Code - -![](/assets/vscode_telemetry.png) - -#### Pieces for Chrome - -![](/assets/chrome_telemetry.png) - -If you have any other questions about privacy and security, please reach out! We want to make choosing Pieces as easy for you as possible. diff --git a/docs/resources/live-context.mdx b/docs/resources/live-context.mdx index 724425009..1fa0d6cdd 100644 --- a/docs/resources/live-context.mdx +++ b/docs/resources/live-context.mdx @@ -18,7 +18,7 @@ Live Context elevates the Pieces Copilot, allowing it to understand your workflo - [Live Context Documentation Page](https://docs.pieces.app/product-highlights-and-benefits/live-context) - [Tsavo's Excellent Demo and Discussion](https://x.com/i/broadcasts/1YpKkwQNZDmKj) - Demo Timestamps: 10:18-18:43 -- [Live Context Privacy and Security Documentation](https://docs.pieces.app/product-highlights-and-benefits/privacy-security-data#live-context) +- [Live Context Privacy and Security Documentation](/privacy-and-security#live-context) - [The Pieces Team Magic Moments](https://www.youtube.com/watch?v=pEHdZyR83BU) ## Privacy and Security diff --git a/docusaurus.config.ts b/docusaurus.config.ts index 5fe2c48a0..8da7f39a5 100644 --- a/docusaurus.config.ts +++ b/docusaurus.config.ts @@ -190,6 +190,10 @@ const config: Config = { { title: 'Terms & Policies', items: [ + { + label: 'Privacy & Security', + href: '/privacy-and-security', + }, { label: 'Privacy Policy', href: '/privacy-policy', diff --git a/sidebars.ts b/sidebars.ts index cbe75e891..0135dec61 100644 --- a/sidebars.ts +++ b/sidebars.ts @@ -282,7 +282,7 @@ const sidebars: SidebarsConfig = { }, { type: 'doc', - id: 'product-highlights-and-benefits/privacy-security-data', + id: 'privacy-and-security', label: 'Privacy & Security', }, ] diff --git a/vercel.json b/vercel.json index cf4589d22..0b4a20d3d 100644 --- a/vercel.json +++ b/vercel.json @@ -195,6 +195,11 @@ "source": "/ama/building-a-more-extensible-development-environment", "destination": "/community/events/ama/building-a-more-extensible-development-environment", "statusCode": 301 + }, + { + "source": "/product-highlights-and-benefits/privacy-security-data", + "destination": "privacy-and-security", + "statusCode": 301 } ] }