diff --git a/Makefile b/Makefile index 2655167912..142b99cc54 100644 --- a/Makefile +++ b/Makefile @@ -213,6 +213,7 @@ logs: ## display app-dev logs (follow mode) .PHONY: logs run-backend: ## Start only the backend application and all needed services + @$(COMPOSE) up --force-recreate -d docspec @$(COMPOSE) up --force-recreate -d celery-dev @$(COMPOSE) up --force-recreate -d y-provider-development @$(COMPOSE) up --force-recreate -d nginx diff --git a/compose.yml b/compose.yml index a774f11e07..55e2a8a87a 100644 --- a/compose.yml +++ b/compose.yml @@ -217,3 +217,8 @@ services: kc_postgresql: condition: service_healthy restart: true + + docspec: + image: ghcr.io/docspecio/api:2.0.0 + ports: + - "4000:4000" \ No newline at end of file diff --git a/docs/env.md b/docs/env.md index 0b3f9b3bf6..7292791828 100644 --- a/docs/env.md +++ b/docs/env.md @@ -103,6 +103,7 @@ These are the environment variables you can set for the `impress-backend` contai | USER_OIDC_ESSENTIAL_CLAIMS | Essential claims in OIDC token | [] | | Y_PROVIDER_API_BASE_URL | Y Provider url | | | Y_PROVIDER_API_KEY | Y provider API key | | +| DOCSPEC_API_URL | URL to endpoint of DocSpec conversion API | | ## impress-frontend image diff --git a/env.d/development/common b/env.d/development/common index de857d5b2a..f6e1b54982 100644 --- a/env.d/development/common +++ b/env.d/development/common @@ -67,5 +67,7 @@ DJANGO_SERVER_TO_SERVER_API_TOKENS=server-api-token Y_PROVIDER_API_BASE_URL=http://y-provider-development:4444/api/ Y_PROVIDER_API_KEY=yprovider-api-key +DOCSPEC_API_URL=http://docspec:4000/conversion + # Theme customization -THEME_CUSTOMIZATION_CACHE_TIMEOUT=15 \ No newline at end of file +THEME_CUSTOMIZATION_CACHE_TIMEOUT=15 diff --git a/env.d/development/common.e2e b/env.d/development/common.e2e index 15434a6811..6394c8b2bb 100644 --- a/env.d/development/common.e2e +++ b/env.d/development/common.e2e @@ -6,4 +6,4 @@ Y_PROVIDER_API_BASE_URL=http://y-provider:4444/api/ # Throttle API_DOCUMENT_THROTTLE_RATE=1000/min -API_CONFIG_THROTTLE_RATE=1000/min \ No newline at end of file +API_CONFIG_THROTTLE_RATE=1000/min diff --git a/src/backend/core/api/serializers.py b/src/backend/core/api/serializers.py index 47754efe46..9870463dca 100644 --- a/src/backend/core/api/serializers.py +++ b/src/backend/core/api/serializers.py @@ -11,6 +11,7 @@ from django.utils.text import slugify from django.utils.translation import gettext_lazy as _ +from core.services import mime_types import magic from rest_framework import serializers @@ -18,7 +19,7 @@ from core.services.ai_services import AI_ACTIONS from core.services.converter_services import ( ConversionError, - YdocConverter, + Converter, ) @@ -188,6 +189,7 @@ class DocumentSerializer(ListDocumentSerializer): content = serializers.CharField(required=False) websocket = serializers.BooleanField(required=False, write_only=True) + file = serializers.FileField(required=False, write_only=True, allow_null=True) class Meta: model = models.Document @@ -204,6 +206,7 @@ class Meta: "deleted_at", "depth", "excerpt", + "file", "is_favorite", "link_role", "link_reach", @@ -461,7 +464,11 @@ def create(self, validated_data): language = user.language or language try: - document_content = YdocConverter().convert(validated_data["content"]) + document_content = Converter().convert( + validated_data["content"], + mime_types.MARKDOWN, + mime_types.YJS + ) except ConversionError as err: raise serializers.ValidationError( {"content": ["Could not convert content"]} diff --git a/src/backend/core/api/viewsets.py b/src/backend/core/api/viewsets.py index 1c1b9ef50a..ae0fcc9a40 100644 --- a/src/backend/core/api/viewsets.py +++ b/src/backend/core/api/viewsets.py @@ -40,14 +40,12 @@ from core.services.ai_services import AIService from core.services.collaboration_services import CollaborationService from core.services.converter_services import ( + ConversionError, ServiceUnavailableError as YProviderServiceUnavailableError, -) -from core.services.converter_services import ( ValidationError as YProviderValidationError, + Converter, ) -from core.services.converter_services import ( - YdocConverter, -) +from core.services import mime_types from core.tasks.mail import send_ask_for_access_mail from core.utils import extract_attachments, filter_descendants @@ -504,6 +502,28 @@ def perform_create(self, serializer): "IN SHARE ROW EXCLUSIVE MODE;" ) + # Remove file from validated_data as it's not a model field + # Process it if present + uploaded_file = serializer.validated_data.pop("file", None) + + # If a file is uploaded, convert it to Yjs format and set as content + if uploaded_file: + try: + file_content = uploaded_file.read() + + converter = Converter() + converted_content = converter.convert( + file_content, + content_type=uploaded_file.content_type, + accept=mime_types.YJS + ) + serializer.validated_data["content"] = converted_content + serializer.validated_data["title"] = uploaded_file.name + except ConversionError as err: + raise drf.exceptions.ValidationError( + {"file": ["Could not convert file content"]} + ) from err + obj = models.Document.add_root( creator=self.request.user, **serializer.validated_data, @@ -1603,14 +1623,14 @@ def content(self, request, pk=None): if base64_content is not None: # Convert using the y-provider service try: - yprovider = YdocConverter() + yprovider = Converter() result = yprovider.convert( base64.b64decode(base64_content), - "application/vnd.yjs.doc", + mime_types.YJS, { - "markdown": "text/markdown", - "html": "text/html", - "json": "application/json", + "markdown": mime_types.MARKDOWN, + "html": mime_types.HTML, + "json": mime_types.JSON, }[content_format], ) content = result diff --git a/src/backend/core/services/converter_services.py b/src/backend/core/services/converter_services.py index 9c79a7192d..8790bf9ad6 100644 --- a/src/backend/core/services/converter_services.py +++ b/src/backend/core/services/converter_services.py @@ -5,7 +5,9 @@ from django.conf import settings import requests +import typing +from core.services import mime_types class ConversionError(Exception): """Base exception for conversion-related errors.""" @@ -19,8 +21,65 @@ class ServiceUnavailableError(ConversionError): """Raised when the conversion service is unavailable.""" +class ConverterProtocol(typing.Protocol): + def convert(self, text, content_type, accept): ... + + +class Converter: + docspec: ConverterProtocol + ydoc: ConverterProtocol + + def __init__(self): + self.docspec = DocSpecConverter() + self.ydoc = YdocConverter() + + def convert(self, input, content_type, accept): + """Convert input into other formats using external microservices.""" + + if content_type == mime_types.DOCX and accept == mime_types.YJS: + return self.convert( + self.docspec.convert(input, mime_types.DOCX, mime_types.BLOCKNOTE), + mime_types.BLOCKNOTE, + mime_types.YJS + ) + + return self.ydoc.convert(input, content_type, accept) + + +class DocSpecConverter: + """Service class for DocSpec conversion-related operations.""" + + def _request(self, url, data, content_type): + """Make a request to the DocSpec API.""" + + response = requests.post( + url, + headers={"Accept": mime_types.BLOCKNOTE}, + files={"file": ("document.docx", data, content_type)}, + timeout=settings.CONVERSION_API_TIMEOUT, + verify=settings.CONVERSION_API_SECURE, + ) + response.raise_for_status() + return response + + def convert(self, data, content_type, accept): + """Convert a Document to BlockNote.""" + if not data: + raise ValidationError("Input data cannot be empty") + + if content_type != mime_types.DOCX or accept != mime_types.BLOCKNOTE: + raise ValidationError(f"Conversion from {content_type} to {accept} is not supported.") + + try: + return self._request(settings.DOCSPEC_API_URL, data, content_type).content + except requests.RequestException as err: + raise ServiceUnavailableError( + "Failed to connect to DocSpec conversion service", + ) from err + + class YdocConverter: - """Service class for conversion-related operations.""" + """Service class for YDoc conversion-related operations.""" @property def auth_header(self): @@ -45,7 +104,7 @@ def _request(self, url, data, content_type, accept): return response def convert( - self, text, content_type="text/markdown", accept="application/vnd.yjs.doc" + self, text, content_type=mime_types.MARKDOWN, accept=mime_types.YJS ): """Convert a Markdown text into our internal format using an external microservice.""" @@ -59,14 +118,14 @@ def convert( content_type, accept, ) - if accept == "application/vnd.yjs.doc": + if accept == mime_types.YJS: return b64encode(response.content).decode("utf-8") - if accept in {"text/markdown", "text/html"}: + if accept in {mime_types.MARKDOWN, "text/html"}: return response.text - if accept == "application/json": + if accept == mime_types.JSON: return response.json() raise ValidationError("Unsupported format") except requests.RequestException as err: raise ServiceUnavailableError( - "Failed to connect to conversion service", + f"Failed to connect to YDoc conversion service {content_type}, {accept}", ) from err diff --git a/src/backend/core/services/mime_types.py b/src/backend/core/services/mime_types.py new file mode 100644 index 0000000000..84714e7f8f --- /dev/null +++ b/src/backend/core/services/mime_types.py @@ -0,0 +1,6 @@ +BLOCKNOTE = "application/vnd.blocknote+json" +YJS = "application/vnd.yjs.doc" +MARKDOWN = "text/markdown" +JSON = "application/json" +DOCX = "application/vnd.openxmlformats-officedocument.wordprocessingml.document" +HTML = "text/html" diff --git a/src/backend/impress/settings.py b/src/backend/impress/settings.py index 2229036c8a..6d2e653472 100755 --- a/src/backend/impress/settings.py +++ b/src/backend/impress/settings.py @@ -680,6 +680,12 @@ class Base(Configuration): environ_prefix=None, ) + # DocSpec API microservice + DOCSPEC_API_URL = values.Value( + environ_name="DOCSPEC_API_URL", + environ_prefix=None + ) + # Conversion endpoint CONVERSION_API_ENDPOINT = values.Value( default="convert", diff --git a/src/frontend/apps/e2e/__tests__/app-impress/assets/test_import.docx b/src/frontend/apps/e2e/__tests__/app-impress/assets/test_import.docx new file mode 100644 index 0000000000..7a5aaef1c5 Binary files /dev/null and b/src/frontend/apps/e2e/__tests__/app-impress/assets/test_import.docx differ diff --git a/src/frontend/apps/e2e/__tests__/app-impress/assets/test_import.md b/src/frontend/apps/e2e/__tests__/app-impress/assets/test_import.md new file mode 100644 index 0000000000..de83f2c6b7 --- /dev/null +++ b/src/frontend/apps/e2e/__tests__/app-impress/assets/test_import.md @@ -0,0 +1,63 @@ +![473389927-e4ff1794-69f3-460a-85f8-fec993cd74d6.png](http://localhost:3000/assets/logo-suite-numerique.png)![497094770-53e5f8e2-c93e-4a0b-a82f-cd184fd03f51.svg](http://localhost:3000/assets/assets/icon-docs.svg) + +# Lorem Ipsum Markdown Document + +## Introduction + +Lorem ipsum dolor sit amet, consectetur adipiscing elit. Nullam auctor, nisl eget ultricies tincidunt, nisl nisl aliquam nisl, eget ultricies nisl nisl eget nisl. + +### Subsection 1.1 + +* **Bold text**: Lorem ipsum dolor sit amet. + +* *Italic text*: Consectetur adipiscing elit. + +* ~~Strikethrough text~~: Nullam auctor, nisl eget ultricies tincidunt. + +1. First item in an ordered list. + +2. Second item in an ordered list. + + * Indented bullet point. + + * Another indented bullet point. + +3. Third item in an ordered list. + +### Subsection 1.2 + +**Code block:** + +```python +def hello_world(): + print("Hello, world!") +``` + +**Blockquote:** + +> Lorem ipsum dolor sit amet, consectetur adipiscing elit. Nullam auctor, nisl eget ultricies tincidunt. + +**Horizontal rule:** + +*** + +**Table:** + +| Syntax | Description | +| --------- | ----------- | +| Header | Title | +| Paragraph | Text | + +**Inline code:** + +Use the `printf()` function. + +**Link:** [Example](https://www.example.com) + +**Image:** + +![Alt text](https://via.placeholder.com/150) + +## Conclusion + +Lorem ipsum dolor sit amet, consectetur adipiscing elit. Nullam auctor, nisl eget ultricies tincidunt, nisl nisl aliquam nisl, eget ultricies nisl nisl eget nisl. diff --git a/src/frontend/apps/e2e/__tests__/app-impress/assets/toto.docx b/src/frontend/apps/e2e/__tests__/app-impress/assets/toto.docx new file mode 100644 index 0000000000..9dd1c36aa0 Binary files /dev/null and b/src/frontend/apps/e2e/__tests__/app-impress/assets/toto.docx differ diff --git a/src/frontend/apps/e2e/__tests__/app-impress/assets/tutu.docx b/src/frontend/apps/e2e/__tests__/app-impress/assets/tutu.docx new file mode 100644 index 0000000000..9dd1c36aa0 Binary files /dev/null and b/src/frontend/apps/e2e/__tests__/app-impress/assets/tutu.docx differ diff --git a/src/frontend/apps/e2e/__tests__/app-impress/doc-import.spec.ts b/src/frontend/apps/e2e/__tests__/app-impress/doc-import.spec.ts new file mode 100644 index 0000000000..56b361c1b1 --- /dev/null +++ b/src/frontend/apps/e2e/__tests__/app-impress/doc-import.spec.ts @@ -0,0 +1,149 @@ +import { readFileSync } from 'fs'; +import path from 'path'; + +import { Page, expect, test } from '@playwright/test'; + +test.beforeEach(async ({ page }) => { + await page.goto('/'); +}); + +test.describe('Doc Import', () => { + test('it imports 2 docs with the import icon', async ({ page }) => { + const fileChooserPromise = page.waitForEvent('filechooser'); + await page.getByLabel('Open the upload dialog').click(); + + const fileChooser = await fileChooserPromise; + await fileChooser.setFiles(path.join(__dirname, 'assets/test_import.docx')); + await fileChooser.setFiles(path.join(__dirname, 'assets/test_import.md')); + + await expect( + page.getByText( + 'The document "test_import.docx" has been successfully imported', + ), + ).toBeVisible(); + await expect( + page.getByText( + 'The document "test_import.md" has been successfully imported', + ), + ).toBeVisible(); + + const docsGrid = page.getByTestId('docs-grid'); + await expect(docsGrid.getByText('test_import.docx')).toBeVisible(); + await expect(docsGrid.getByText('test_import.md')).toBeVisible(); + }); + + test('it imports 2 docs with the drag and drop area', async ({ page }) => { + const docsGrid = page.getByTestId('docs-grid'); + await expect(docsGrid).toBeVisible(); + + await dragAndDropFiles(page, "[data-testid='docs-grid']", [ + { + filePath: path.join(__dirname, 'assets/test_import.docx'), + fileName: 'test_import.docx', + fileType: + 'application/vnd.openxmlformats-officedocument.wordprocessingml.document', + }, + { + filePath: path.join(__dirname, 'assets/test_import.md'), + fileName: 'test_import.md', + fileType: 'text/markdown', + }, + ]); + + // Wait for success messages + await expect( + page.getByText( + 'The document "test_import.docx" has been successfully imported', + ), + ).toBeVisible(); + await expect( + page.getByText( + 'The document "test_import.md" has been successfully imported', + ), + ).toBeVisible(); + + await expect(docsGrid.getByText('test_import.docx').first()).toBeVisible(); + await expect(docsGrid.getByText('test_import.md').first()).toBeVisible(); + }); + + test('it imports TOTO TUTU with the import icon', async ({ page }) => { + const fileChooserPromise = page.waitForEvent('filechooser'); + await page.getByLabel('Open the upload dialog').click(); + + const fileChooser = await fileChooserPromise; + await fileChooser.setFiles(path.join(__dirname, 'assets/toto.docx')); + await fileChooser.setFiles(path.join(__dirname, 'assets/tutu.docx')); + + await expect( + page.getByText('The document "toto.docx" has been successfully imported'), + ).toBeVisible(); + await expect( + page.getByText('The document "tutu.docx" has been successfully imported'), + ).toBeVisible(); + + const docsGrid = page.getByTestId('docs-grid'); + await expect(docsGrid.getByText('toto.docx').first()).toBeVisible(); + await expect(docsGrid.getByText('tutu.docx').first()).toBeVisible(); + }); + + test('it imports TOTO TUTU with the drag and drop area', async ({ page }) => { + const docsGrid = page.getByTestId('docs-grid'); + await expect(docsGrid).toBeVisible(); + + await dragAndDropFiles(page, "[data-testid='docs-grid']", [ + { + filePath: path.join(__dirname, 'assets/toto.docx'), + fileName: 'toto.docx', + fileType: + 'application/vnd.openxmlformats-officedocument.wordprocessingml.document', + }, + { + filePath: path.join(__dirname, 'assets/tutu.docx'), + fileName: 'tutu.docx', + fileType: + 'application/vnd.openxmlformats-officedocument.wordprocessingml.document', + }, + ]); + + // Wait for success messages + await expect( + page.getByText('The document "toto.docx" has been successfully imported'), + ).toBeVisible(); + await expect( + page.getByText('The document "tutu.docx" has been successfully imported'), + ).toBeVisible(); + + await expect(docsGrid.getByText('toto.docx').first()).toBeVisible(); + await expect(docsGrid.getByText('tutu.docx').first()).toBeVisible(); + }); +}); + +const dragAndDropFiles = async ( + page: Page, + selector: string, + files: Array<{ filePath: string; fileName: string; fileType?: string }>, +) => { + const filesData = files.map((file) => ({ + bufferData: `data:application/octet-stream;base64,${readFileSync(file.filePath).toString('base64')}`, + fileName: file.fileName, + fileType: file.fileType || '', + })); + + const dataTransfer = await page.evaluateHandle(async (filesInfo) => { + const dt = new DataTransfer(); + + for (const fileInfo of filesInfo) { + const blobData = await fetch(fileInfo.bufferData).then((res) => + res.blob(), + ); + const file = new File([blobData], fileInfo.fileName, { + type: fileInfo.fileType, + }); + dt.items.add(file); + } + + return dt; + }, filesData); + + await page.dispatchEvent(selector, 'drop', { dataTransfer }); +}; diff --git a/src/frontend/apps/impress/package.json b/src/frontend/apps/impress/package.json index 9ace686abc..9d57b68015 100644 --- a/src/frontend/apps/impress/package.json +++ b/src/frontend/apps/impress/package.json @@ -62,6 +62,7 @@ "react": "*", "react-aria-components": "1.13.0", "react-dom": "*", + "react-dropzone": "14.3.8", "react-i18next": "16.3.3", "react-intersection-observer": "10.0.0", "react-resizable-panels": "3.0.6", diff --git a/src/frontend/apps/impress/src/api/helpers.tsx b/src/frontend/apps/impress/src/api/helpers.tsx index cbc4d0b3c8..991c517610 100644 --- a/src/frontend/apps/impress/src/api/helpers.tsx +++ b/src/frontend/apps/impress/src/api/helpers.tsx @@ -20,7 +20,7 @@ export type DefinedInitialDataInfiniteOptionsAPI< QueryKey, TPageParam >; - +export type UseInfiniteQueryResultAPI = InfiniteData; export type InfiniteQueryConfig = Omit< DefinedInitialDataInfiniteOptionsAPI, 'queryKey' | 'initialData' | 'getNextPageParam' | 'initialPageParam' diff --git a/src/frontend/apps/impress/src/assets/icons/doc-all.svg b/src/frontend/apps/impress/src/assets/icons/doc-all.svg new file mode 100644 index 0000000000..a4e61a5aa0 --- /dev/null +++ b/src/frontend/apps/impress/src/assets/icons/doc-all.svg @@ -0,0 +1,20 @@ + + + + + + diff --git a/src/frontend/apps/impress/src/components/Box.tsx b/src/frontend/apps/impress/src/components/Box.tsx index dd57c6fa05..3e9e907bf1 100644 --- a/src/frontend/apps/impress/src/components/Box.tsx +++ b/src/frontend/apps/impress/src/components/Box.tsx @@ -4,6 +4,8 @@ import { CSSProperties, RuleSet } from 'styled-components/dist/types'; import { MarginPadding, + Spacings, + spacingValue, stylesMargin, stylesPadding, } from '@/utils/styleBuilder'; @@ -22,7 +24,7 @@ export interface BoxProps { $display?: CSSProperties['display']; $effect?: 'show' | 'hide'; $flex?: CSSProperties['flex']; - $gap?: CSSProperties['gap']; + $gap?: Spacings; $hasTransition?: boolean | 'slow'; $height?: CSSProperties['height']; $justify?: CSSProperties['justifyContent']; @@ -70,7 +72,7 @@ export const Box = styled('div')` ${({ $display, as }) => `display: ${$display || (as?.match('span|input') ? 'inline-flex' : 'flex')};`} ${({ $flex }) => $flex && `flex: ${$flex};`} - ${({ $gap }) => $gap && `gap: ${$gap};`} + ${({ $gap }) => $gap && `gap: ${spacingValue($gap)};`} ${({ $height }) => $height && `height: ${$height};`} ${({ $hasTransition }) => $hasTransition && $hasTransition === 'slow' diff --git a/src/frontend/apps/impress/src/components/Icon.tsx b/src/frontend/apps/impress/src/components/Icon.tsx index 3923450578..e5c6380600 100644 --- a/src/frontend/apps/impress/src/components/Icon.tsx +++ b/src/frontend/apps/impress/src/components/Icon.tsx @@ -1,21 +1,34 @@ import clsx from 'clsx'; +import React from 'react'; import { css } from 'styled-components'; import { Text, TextType } from '@/components'; -type IconProps = TextType & { +type IconBase = TextType & { disabled?: boolean; +}; + +type IconMaterialProps = IconBase & { iconName: string; variant?: 'filled' | 'outlined' | 'symbols-outlined'; + icon?: never; +}; + +type IconSVGProps = IconBase & { + icon: React.ReactNode; + iconName?: never; + variant?: never; }; + export const Icon = ({ className, - iconName, disabled, + iconName, + icon, variant = 'outlined', $theme = 'neutral', ...textProps -}: IconProps) => { +}: IconMaterialProps | IconSVGProps) => { const hasLabel = 'aria-label' in textProps || 'aria-labelledby' in textProps; const ariaHidden = 'aria-hidden' in textProps ? textProps['aria-hidden'] : !hasLabel; @@ -24,15 +37,15 @@ export const Icon = ({ - {iconName} + {iconName ?? icon} ); }; diff --git a/src/frontend/apps/impress/src/features/docs/docs-grid/api/useImportDoc.tsx b/src/frontend/apps/impress/src/features/docs/docs-grid/api/useImportDoc.tsx new file mode 100644 index 0000000000..35d878e05f --- /dev/null +++ b/src/frontend/apps/impress/src/features/docs/docs-grid/api/useImportDoc.tsx @@ -0,0 +1,88 @@ +import { VariantType, useToastProvider } from '@openfun/cunningham-react'; +import { + UseMutationOptions, + useMutation, + useQueryClient, +} from '@tanstack/react-query'; +import { useTranslation } from 'react-i18next'; + +import { + APIError, + UseInfiniteQueryResultAPI, + errorCauses, + fetchAPI, +} from '@/api'; +import { Doc, DocsResponse, KEY_LIST_DOC } from '@/docs/doc-management'; + +export const importDoc = async (file: File): Promise => { + const form = new FormData(); + form.append('file', file); + + const response = await fetchAPI(`documents/`, { + method: 'POST', + body: form, + withoutContentType: true, + }); + + if (!response.ok) { + throw new APIError('Failed to import the doc', await errorCauses(response)); + } + + return response.json() as Promise; +}; + +type UseImportDocOptions = UseMutationOptions; + +export function useImportDoc(props?: UseImportDocOptions) { + const { toast } = useToastProvider(); + const queryClient = useQueryClient(); + const { t } = useTranslation(); + + return useMutation({ + mutationFn: importDoc, + ...props, + onSuccess: (...successProps) => { + queryClient.setQueriesData>( + { queryKey: [KEY_LIST_DOC] }, + (oldData) => { + if (!oldData || oldData?.pages.length === 0) { + return oldData; + } + + return { + ...oldData, + pages: oldData.pages.map((page, index) => { + // Add the new doc to the first page only + if (index === 0) { + return { + ...page, + results: [successProps[0], ...page.results], + }; + } + return page; + }), + }; + }, + ); + + toast( + t('The document "{{documentName}}" has been successfully imported', { + documentName: successProps?.[0].title || '', + }), + VariantType.SUCCESS, + ); + + props?.onSuccess?.(...successProps); + }, + onError: (...errorProps) => { + toast( + t(`The document "{{documentName}}" import has failed`, { + documentName: errorProps?.[1].name || '', + }), + VariantType.ERROR, + ); + + props?.onError?.(...errorProps); + }, + }); +} diff --git a/src/frontend/apps/impress/src/features/docs/docs-grid/components/DocsGrid.tsx b/src/frontend/apps/impress/src/features/docs/docs-grid/components/DocsGrid.tsx index 625d994643..51c0a282d6 100644 --- a/src/frontend/apps/impress/src/features/docs/docs-grid/components/DocsGrid.tsx +++ b/src/frontend/apps/impress/src/features/docs/docs-grid/components/DocsGrid.tsx @@ -1,14 +1,21 @@ -import { Button } from '@openfun/cunningham-react'; -import { useMemo } from 'react'; +import { + Button, + VariantType, + useToastProvider, +} from '@openfun/cunningham-react'; +import { useMemo, useState } from 'react'; +import { useDropzone } from 'react-dropzone'; import { useTranslation } from 'react-i18next'; import { InView } from 'react-intersection-observer'; import { css } from 'styled-components'; -import { Box, Card, Text } from '@/components'; +import AllDocs from '@/assets/icons/doc-all.svg'; +import { Box, Card, Icon, Text } from '@/components'; import { DocDefaultFilter, useInfiniteDocs } from '@/docs/doc-management'; import { useResponsiveStore } from '@/stores'; import { useInfiniteDocsTrashbin } from '../api'; +import { useImportDoc } from '../api/useImportDoc'; import { useResponsiveDocGrid } from '../hooks/useResponsiveDocGrid'; import { @@ -24,6 +31,41 @@ export const DocsGrid = ({ target = DocDefaultFilter.ALL_DOCS, }: DocsGridProps) => { const { t } = useTranslation(); + const [isDragOver, setIsDragOver] = useState(false); + const { toast } = useToastProvider(); + const { getRootProps, getInputProps, open } = useDropzone({ + accept: { + 'application/vnd.openxmlformats-officedocument.wordprocessingml.document': + ['.docx'], + 'text/markdown': ['.md'], + }, + onDrop(acceptedFiles) { + setIsDragOver(false); + for (const file of acceptedFiles) { + importDoc(file); + } + }, + onDragEnter: () => { + setIsDragOver(true); + }, + onDragLeave: () => { + setIsDragOver(false); + }, + onDropRejected(fileRejections) { + toast( + t( + `The document "{{documentName}}" import has failed (only .docx and .md files are allowed)`, + { + documentName: fileRejections?.[0].file.name || '', + }, + ), + VariantType.ERROR, + ); + }, + noClick: true, + }); + const { mutate: importDoc } = useImportDoc(); + const withUpload = target === DocDefaultFilter.ALL_DOCS; const { isDesktop } = useResponsiveStore(); const { flexLeft, flexRight } = useResponsiveDocGrid(); @@ -60,21 +102,6 @@ export const DocsGrid = ({ void fetchNextPage(); }; - let title = t('All docs'); - switch (target) { - case DocDefaultFilter.MY_DOCS: - title = t('My docs'); - break; - case DocDefaultFilter.SHARED_WITH_ME: - title = t('Shared with me'); - break; - case DocDefaultFilter.TRASHBIN: - title = t('Trashbin'); - break; - default: - title = t('All docs'); - } - return ( - - {title} - + {withUpload && } + {!hasDocs && !loading && ( @@ -110,7 +145,11 @@ export const DocsGrid = ({ )} {hasDocs && ( - + void; + withUpload: boolean; +}) => { + const { t } = useTranslation(); + const { isDesktop } = useResponsiveStore(); + + let title = t('All docs'); + let icon = } />; + switch (target) { + case DocDefaultFilter.MY_DOCS: + icon = ; + title = t('My docs'); + break; + case DocDefaultFilter.SHARED_WITH_ME: + icon = ; + title = t('Shared with me'); + break; + case DocDefaultFilter.TRASHBIN: + icon = ; + title = t('Trashbin'); + break; + default: + title = t('All docs'); + } + + return ( + + + {icon} + + {title} + + + {withUpload && ( + + )} + + ); +}; + const useDocsQuery = (target: DocDefaultFilter) => { const trashbinQuery = useInfiniteDocsTrashbin( { diff --git a/src/frontend/apps/impress/src/features/left-panel/components/LefPanelTargetFilters.tsx b/src/frontend/apps/impress/src/features/left-panel/components/LefPanelTargetFilters.tsx index 75f13cb688..26dded8afc 100644 --- a/src/frontend/apps/impress/src/features/left-panel/components/LefPanelTargetFilters.tsx +++ b/src/frontend/apps/impress/src/features/left-panel/components/LefPanelTargetFilters.tsx @@ -2,6 +2,7 @@ import { usePathname, useSearchParams } from 'next/navigation'; import { useTranslation } from 'react-i18next'; import { css } from 'styled-components'; +import AllDocs from '@/assets/icons/doc-all.svg'; import { Box, Icon, StyledLink, Text } from '@/components'; import { useCunninghamTheme } from '@/cunningham'; import { DocDefaultFilter } from '@/docs/doc-management'; @@ -21,22 +22,22 @@ export const LeftPanelTargetFilters = () => { const defaultQueries = [ { - icon: 'apps', + icon: } />, label: t('All docs'), targetQuery: DocDefaultFilter.ALL_DOCS, }, { - icon: 'lock', + icon: , label: t('My docs'), targetQuery: DocDefaultFilter.MY_DOCS, }, { - icon: 'group', + icon: , label: t('Shared with me'), targetQuery: DocDefaultFilter.SHARED_WITH_ME, }, { - icon: 'delete', + icon: , label: t('Trashbin'), targetQuery: DocDefaultFilter.TRASHBIN, }, @@ -96,7 +97,7 @@ export const LeftPanelTargetFilters = () => { } `} > - + {query.icon} {query.label} ); diff --git a/src/frontend/servers/y-provider/__tests__/convert.test.ts b/src/frontend/servers/y-provider/__tests__/convert.test.ts index 44c21c2870..a31f4b5893 100644 --- a/src/frontend/servers/y-provider/__tests__/convert.test.ts +++ b/src/frontend/servers/y-provider/__tests__/convert.test.ts @@ -69,7 +69,7 @@ describe('Server Tests', () => { const response = await request(app) .post('/api/convert') .set('origin', origin) - .set('authorization', 'wrong-api-key') + .set('authorization', `Bearer wrong-api-key`) .set('content-type', 'application/json'); expect(response.status).toBe(401); @@ -99,7 +99,7 @@ describe('Server Tests', () => { const response = await request(app) .post('/api/convert') .set('origin', origin) - .set('authorization', apiKey) + .set('authorization', `Bearer ${apiKey}`) .set('content-type', 'application/json'); expect(response.status).toBe(400); @@ -114,7 +114,7 @@ describe('Server Tests', () => { const response = await request(app) .post('/api/convert') .set('origin', origin) - .set('authorization', apiKey) + .set('authorization', `Bearer ${apiKey}`) .set('content-type', 'application/json') .send(''); @@ -129,9 +129,10 @@ describe('Server Tests', () => { const response = await request(app) .post('/api/convert') .set('origin', origin) - .set('authorization', apiKey) + .set('authorization', `Bearer ${apiKey}`) .set('content-type', 'image/png') .send('randomdata'); + expect(response.status).toBe(415); expect(response.body).toStrictEqual({ error: 'Unsupported Content-Type' }); }); @@ -141,38 +142,73 @@ describe('Server Tests', () => { const response = await request(app) .post('/api/convert') .set('origin', origin) - .set('authorization', apiKey) + .set('authorization', `Bearer ${apiKey}`) .set('content-type', 'text/markdown') .set('accept', 'image/png') .send('# Header'); + expect(response.status).toBe(406); expect(response.body).toStrictEqual({ error: 'Unsupported format' }); }); - test.each([[apiKey], [`Bearer ${apiKey}`]])( - 'POST /api/convert with correct content with Authorization: %s', - async (authHeader) => { - const app = initApp(); + test('POST /api/convert BlockNote to Markdown', async () => { + const app = initApp(); + const response = await request(app) + .post('/api/convert') + .set('origin', origin) + .set('authorization', `Bearer ${apiKey}`) + .set('content-type', 'application/vnd.blocknote+json') + .set('accept', 'text/markdown') + .send(expectedBlocks); - const response = await request(app) - .post('/api/convert') - .set('Origin', origin) - .set('Authorization', authHeader) - .set('content-type', 'text/markdown') - .set('accept', 'application/vnd.yjs.doc') - .send(expectedMarkdown); + expect(response.status).toBe(200); + expect(response.header['content-type']).toBe( + 'text/markdown; charset=utf-8', + ); + expect(typeof response.text).toBe('string'); + expect(response.text.trim()).toBe(expectedMarkdown); + }); - expect(response.status).toBe(200); - expect(response.body).toBeInstanceOf(Buffer); + test('POST /api/convert BlockNote to Yjs', async () => { + const app = initApp(); + const editor = ServerBlockNoteEditor.create(); + const blocks = await editor.tryParseMarkdownToBlocks(expectedMarkdown); + const response = await request(app) + .post('/api/convert') + .set('origin', origin) + .set('authorization', `Bearer ${apiKey}`) + .set('content-type', 'application/vnd.blocknote+json') + .set('accept', 'application/vnd.yjs.doc') + .send(blocks) + .responseType('blob'); - const editor = ServerBlockNoteEditor.create(); - const doc = new Y.Doc(); - Y.applyUpdate(doc, response.body); - const blocks = editor.yDocToBlocks(doc, 'document-store'); + expect(response.status).toBe(200); + expect(response.header['content-type']).toBe('application/vnd.yjs.doc'); - expect(blocks).toStrictEqual(expectedBlocks); - }, - ); + // Decode the Yjs response and verify it contains the correct blocks + const responseBuffer = Buffer.from(response.body as Buffer); + const ydoc = new Y.Doc(); + Y.applyUpdate(ydoc, responseBuffer); + const decodedBlocks = editor.yDocToBlocks(ydoc, 'document-store'); + + expect(decodedBlocks).toStrictEqual(expectedBlocks); + }); + + test('POST /api/convert BlockNote to HTML', async () => { + const app = initApp(); + const response = await request(app) + .post('/api/convert') + .set('origin', origin) + .set('authorization', `Bearer ${apiKey}`) + .set('content-type', 'application/vnd.blocknote+json') + .set('accept', 'text/html') + .send(expectedBlocks); + + expect(response.status).toBe(200); + expect(response.header['content-type']).toBe('text/html; charset=utf-8'); + expect(typeof response.text).toBe('string'); + expect(response.text).toBe(expectedHTML); + }); test('POST /api/convert Yjs to HTML', async () => { const app = initApp(); @@ -183,10 +219,11 @@ describe('Server Tests', () => { const response = await request(app) .post('/api/convert') .set('origin', origin) - .set('authorization', apiKey) + .set('authorization', `Bearer ${apiKey}`) .set('content-type', 'application/vnd.yjs.doc') .set('accept', 'text/html') .send(Buffer.from(yjsUpdate)); + expect(response.status).toBe(200); expect(response.header['content-type']).toBe('text/html; charset=utf-8'); expect(typeof response.text).toBe('string'); @@ -202,10 +239,11 @@ describe('Server Tests', () => { const response = await request(app) .post('/api/convert') .set('origin', origin) - .set('authorization', apiKey) + .set('authorization', `Bearer ${apiKey}`) .set('content-type', 'application/vnd.yjs.doc') .set('accept', 'text/markdown') .send(Buffer.from(yjsUpdate)); + expect(response.status).toBe(200); expect(response.header['content-type']).toBe( 'text/markdown; charset=utf-8', @@ -223,15 +261,16 @@ describe('Server Tests', () => { const response = await request(app) .post('/api/convert') .set('origin', origin) - .set('authorization', apiKey) + .set('authorization', `Bearer ${apiKey}`) .set('content-type', 'application/vnd.yjs.doc') .set('accept', 'application/json') .send(Buffer.from(yjsUpdate)); + expect(response.status).toBe(200); expect(response.header['content-type']).toBe( 'application/json; charset=utf-8', ); - expect(Array.isArray(response.body)).toBe(true); + expect(response.body).toBeInstanceOf(Array); expect(response.body).toStrictEqual(expectedBlocks); }); @@ -240,15 +279,16 @@ describe('Server Tests', () => { const response = await request(app) .post('/api/convert') .set('origin', origin) - .set('authorization', apiKey) + .set('authorization', `Bearer ${apiKey}`) .set('content-type', 'text/markdown') .set('accept', 'application/json') .send(expectedMarkdown); + expect(response.status).toBe(200); expect(response.header['content-type']).toBe( 'application/json; charset=utf-8', ); - expect(Array.isArray(response.body)).toBe(true); + expect(response.body).toBeInstanceOf(Array); expect(response.body).toStrictEqual(expectedBlocks); }); @@ -257,11 +297,12 @@ describe('Server Tests', () => { const response = await request(app) .post('/api/convert') .set('origin', origin) - .set('authorization', apiKey) + .set('authorization', `Bearer ${apiKey}`) .set('content-type', 'application/vnd.yjs.doc') .set('accept', 'application/json') .send(Buffer.from('notvalidyjs')); + expect(response.status).toBe(400); - expect(response.body).toStrictEqual({ error: 'Invalid Yjs content' }); + expect(response.body).toStrictEqual({ error: 'Invalid content' }); }); }); diff --git a/src/frontend/servers/y-provider/src/handlers/convertHandler.ts b/src/frontend/servers/y-provider/src/handlers/convertHandler.ts index bdfbd2c8a9..0452724c54 100644 --- a/src/frontend/servers/y-provider/src/handlers/convertHandler.ts +++ b/src/frontend/servers/y-provider/src/handlers/convertHandler.ts @@ -14,27 +14,115 @@ interface ErrorResponse { error: string; } +type ConversionResponseBody = Uint8Array | string | object | ErrorResponse; + +interface InputReader { + supportedContentTypes: string[]; + read(data: Buffer): Promise; +} + +interface OutputWriter { + supportedContentTypes: string[]; + write(blocks: PartialBlock[]): Promise; +} + const editor = ServerBlockNoteEditor.create< DefaultBlockSchema, DefaultInlineContentSchema, DefaultStyleSchema >(); +const ContentTypes = { + XMarkdown: 'text/x-markdown', + Markdown: 'text/markdown', + YJS: 'application/vnd.yjs.doc', + FormUrlEncoded: 'application/x-www-form-urlencoded', + OctetStream: 'application/octet-stream', + HTML: 'text/html', + BlockNote: 'application/vnd.blocknote+json', + JSON: 'application/json', +} as const; + +const createYDocument = (blocks: PartialBlock[]) => + editor.blocksToYDoc(blocks, 'document-store'); + +const readers: InputReader[] = [ + { + // application/x-www-form-urlencoded is interpreted as Markdown for backward compatibility + supportedContentTypes: [ + ContentTypes.Markdown, + ContentTypes.XMarkdown, + ContentTypes.FormUrlEncoded, + ], + read: (data) => editor.tryParseMarkdownToBlocks(data.toString()), + }, + { + supportedContentTypes: [ContentTypes.YJS, ContentTypes.OctetStream], + read: async (data) => { + const ydoc = new Y.Doc(); + Y.applyUpdate(ydoc, data); + return editor.yDocToBlocks(ydoc, 'document-store') as PartialBlock[]; + }, + }, + { + supportedContentTypes: [ContentTypes.BlockNote], + read: async (data) => JSON.parse(data.toString()), + }, +]; + +const writers: OutputWriter[] = [ + { + supportedContentTypes: [ContentTypes.BlockNote, ContentTypes.JSON], + write: async (blocks) => blocks, + }, + { + supportedContentTypes: [ContentTypes.YJS, ContentTypes.OctetStream], + write: async (blocks) => Y.encodeStateAsUpdate(createYDocument(blocks)), + }, + { + supportedContentTypes: [ContentTypes.Markdown, ContentTypes.XMarkdown], + write: (blocks) => editor.blocksToMarkdownLossy(blocks), + }, + { + supportedContentTypes: [ContentTypes.HTML], + write: (blocks) => editor.blocksToHTMLLossy(blocks), + }, +]; + +const normalizeContentType = (value: string) => value.split(';')[0]; + export const convertHandler = async ( req: Request, - res: Response, + res: Response, ) => { if (!req.body || req.body.length === 0) { res.status(400).json({ error: 'Invalid request: missing content' }); return; } - const contentType = (req.header('content-type') || 'text/markdown').split( - ';', - )[0]; - const accept = (req.header('accept') || 'application/vnd.yjs.doc').split( - ';', - )[0]; + const contentType = normalizeContentType( + req.header('content-type') || ContentTypes.Markdown, + ); + + const reader = readers.find((reader) => + reader.supportedContentTypes.includes(contentType), + ); + + if (!reader) { + res.status(415).json({ error: 'Unsupported Content-Type' }); + return; + } + + const accept = normalizeContentType(req.header('accept') || ContentTypes.YJS); + + const writer = writers.find((writer) => + writer.supportedContentTypes.includes(accept), + ); + + if (!writer) { + res.status(406).json({ error: 'Unsupported format' }); + return; + } let blocks: | PartialBlock< @@ -44,63 +132,23 @@ export const convertHandler = async ( >[] | null = null; try { - // First, convert from the input format to blocks - // application/x-www-form-urlencoded is interpreted as Markdown for backward compatibility - if ( - contentType === 'text/markdown' || - contentType === 'application/x-www-form-urlencoded' - ) { - blocks = await editor.tryParseMarkdownToBlocks(req.body.toString()); - } else if ( - contentType === 'application/vnd.yjs.doc' || - contentType === 'application/octet-stream' - ) { - try { - const ydoc = new Y.Doc(); - Y.applyUpdate(ydoc, req.body); - blocks = editor.yDocToBlocks(ydoc, 'document-store') as PartialBlock[]; - } catch (e) { - logger('Invalid Yjs content:', e); - res.status(400).json({ error: 'Invalid Yjs content' }); - return; - } - } else { - res.status(415).json({ error: 'Unsupported Content-Type' }); + try { + blocks = await reader.read(req.body); + } catch (e) { + logger('Invalid content:', e); + res.status(400).json({ error: 'Invalid content' }); return; } + if (!blocks || blocks.length === 0) { res.status(500).json({ error: 'No valid blocks were generated' }); return; } - // Then, convert from blocks to the output format - if (accept === 'application/json') { - res.status(200).json(blocks); - } else { - const yDocument = editor.blocksToYDoc(blocks, 'document-store'); - - if ( - accept === 'application/vnd.yjs.doc' || - accept === 'application/octet-stream' - ) { - res - .status(200) - .setHeader('content-type', 'application/octet-stream') - .send(Y.encodeStateAsUpdate(yDocument)); - } else if (accept === 'text/markdown') { - res - .status(200) - .setHeader('content-type', 'text/markdown') - .send(await editor.blocksToMarkdownLossy(blocks)); - } else if (accept === 'text/html') { - res - .status(200) - .setHeader('content-type', 'text/html') - .send(await editor.blocksToHTMLLossy(blocks)); - } else { - res.status(406).json({ error: 'Unsupported format' }); - } - } + res + .status(200) + .setHeader('content-type', accept) + .send(await writer.write(blocks)); } catch (e) { logger('conversion failed:', e); res.status(500).json({ error: 'An error occurred' }); diff --git a/src/frontend/yarn.lock b/src/frontend/yarn.lock index 5479ba1218..6b1a740e31 100644 --- a/src/frontend/yarn.lock +++ b/src/frontend/yarn.lock @@ -1613,16 +1613,11 @@ dependencies: eslint-visitor-keys "^3.4.3" -"@eslint-community/regexpp@^4.10.0": +"@eslint-community/regexpp@^4.10.0", "@eslint-community/regexpp@^4.12.1": version "4.12.2" resolved "https://registry.yarnpkg.com/@eslint-community/regexpp/-/regexpp-4.12.2.tgz#bccdf615bcf7b6e8db830ec0b8d21c9a25de597b" integrity sha512-EriSTlt5OC9/7SXkRSCAhfSxxoSUgBm33OH+IkwbdpgoqsSsUg7y3uh+IICI/Qg4BBWr3U2i39RpmycbxMq4ew== -"@eslint-community/regexpp@^4.12.1": - version "4.12.1" - resolved "https://registry.yarnpkg.com/@eslint-community/regexpp/-/regexpp-4.12.1.tgz#cfc6cffe39df390a3841cde2abccf92eaa7ae0e0" - integrity sha512-CCZCDJuduB9OUkFkY2IgppNZMi2lBQgD2qzwXkEia16cge2pijY/aXi96CJMquDMn3nJdlPV1A5KrJEXwfLNzQ== - "@eslint/config-array@^0.21.1": version "0.21.1" resolved "https://registry.yarnpkg.com/@eslint/config-array/-/config-array-0.21.1.tgz#7d1b0060fea407f8301e932492ba8c18aff29713" @@ -6948,6 +6943,11 @@ at-least-node@^1.0.0: resolved "https://registry.yarnpkg.com/at-least-node/-/at-least-node-1.0.0.tgz#602cd4b46e844ad4effc92a8011a3c46e0238dc2" integrity sha512-+q/t7Ekv1EDY2l6Gda6LLiX14rU9TV20Wa3ofeQmwPFZbOMo9DXrLbOjFaaclkXKWidIaopwAObQDqwWtGUjqg== +attr-accept@^2.2.4: + version "2.2.5" + resolved "https://registry.yarnpkg.com/attr-accept/-/attr-accept-2.2.5.tgz#d7061d958e6d4f97bf8665c68b75851a0713ab5e" + integrity sha512-0bDNnY/u6pPwHDMoF0FieU354oBi0a8rD9FcsLwzcGWbc8KS8KPIi7y+s13OlVY+gMWc/9xEMUgNE6Qm8ZllYQ== + available-typed-arrays@^1.0.7: version "1.0.7" resolved "https://registry.yarnpkg.com/available-typed-arrays/-/available-typed-arrays-1.0.7.tgz#a5cc375d6a03c2efc87a553f3e0b1522def14846" @@ -8954,6 +8954,13 @@ file-entry-cache@^8.0.0: dependencies: flat-cache "^4.0.0" +file-selector@^2.1.0: + version "2.1.2" + resolved "https://registry.yarnpkg.com/file-selector/-/file-selector-2.1.2.tgz#fe7c7ee9e550952dfbc863d73b14dc740d7de8b4" + integrity sha512-QgXo+mXTe8ljeqUFaX3QVHc5osSItJ/Km+xpocx0aSqWGMSCf6qYs/VnzZgS864Pjn5iceMRFigeAV7AfTlaig== + dependencies: + tslib "^2.7.0" + filelist@^1.0.4: version "1.0.4" resolved "https://registry.yarnpkg.com/filelist/-/filelist-1.0.4.tgz#f78978a1e944775ff9e62e744424f215e58352b5" @@ -12907,6 +12914,15 @@ react-dom@*, react-dom@19.2.0: dependencies: scheduler "^0.27.0" +react-dropzone@14.3.8: + version "14.3.8" + resolved "https://registry.yarnpkg.com/react-dropzone/-/react-dropzone-14.3.8.tgz#a7eab118f8a452fe3f8b162d64454e81ba830582" + integrity sha512-sBgODnq+lcA4P296DY4wacOZz3JFpD99fp+hb//iBO2HHnyeZU3FwWyXJ6salNpqQdsZrgMrotuko/BdJMV8Ug== + dependencies: + attr-accept "^2.2.4" + file-selector "^2.1.0" + prop-types "^15.8.1" + react-i18next@16.3.3: version "16.3.3" resolved "https://registry.yarnpkg.com/react-i18next/-/react-i18next-16.3.3.tgz#098ff5443d0436a78692ca76303b2219aca32989" @@ -14686,7 +14702,7 @@ tslib@2.6.2: resolved "https://registry.yarnpkg.com/tslib/-/tslib-2.6.2.tgz#703ac29425e7b37cd6fd456e92404d46d1f3e4ae" integrity sha512-AEYxH93jGFPn/a2iVAwW87VuUIkR1FVUKB77NwMF7nBTDkDrrT/Hpt/IrCJ0QXhW27jTBDcf5ZY7w6RiqTMw2Q== -tslib@^2.0.0, tslib@^2.0.3, tslib@^2.1.0, tslib@^2.4.0, tslib@^2.6.2, tslib@^2.8.0: +tslib@^2.0.0, tslib@^2.0.3, tslib@^2.1.0, tslib@^2.4.0, tslib@^2.6.2, tslib@^2.7.0, tslib@^2.8.0: version "2.8.1" resolved "https://registry.yarnpkg.com/tslib/-/tslib-2.8.1.tgz#612efe4ed235d567e8aba5f2a5fab70280ade83f" integrity sha512-oJFu94HQb+KVduSUQL7wnpmqnfmLsOA/nAh6b6EH0wCEoK0/mPeXU6c3wKDV83MkOuHPRHtSXKKU99IBazS/2w==