diff --git a/.gitignore b/.gitignore index 161fa3f..0baae0f 100644 --- a/.gitignore +++ b/.gitignore @@ -1,5 +1,31 @@ # Python-generated files __pycache__/ *.cpython*.pyc -evolutionapi.egg-info/ -dist/ \ No newline at end of file +*.pyc +*.pyo +*.pyd + +# Distribution / packaging +dist/ +build/ +*.egg-info/ +.eggs/ + +# Virtual environments +.venv/ +venv/ +env/ +ENV/ + +# uv +.uv/ + +# Ruff +.ruff_cache/ + +# IDE +.vscode/ +.idea/ +*.swp +*.swo +*~ \ No newline at end of file diff --git a/README.md b/README.md index 6810a2f..934f03c 100644 --- a/README.md +++ b/README.md @@ -1242,4 +1242,60 @@ The WebSocket Manager has robust error handling: 3. Use logging for debugging and monitoring 4. Consider implementing a heartbeat mechanism if needed 5. Keep your API token secure and don't expose it in logs -6. Keep your API token secure and don't expose it in logs + +## Contributing + +This project uses Python >= 3.8 and uv: + +### Setup Development Environment + +```bash +curl -LsSf https://astral.sh/uv/install.sh | sh + +git clone https://github.com/EvolutionAPI/evolution-client-python.git +cd evolution-client-python + +uv sync +``` + +### Code Quality + +This project uses [ruff](https://docs.astral.sh/ruff/) for linting and formatting: + +```bash +# Format code +uv run ruff format + +# Check linting +uv run ruff check + +# Fix auto-fixable issues +uv run ruff check --fix +``` + +### Building and Publishing + +```bash +# Build the package +uv build + +# Publish to PyPI (requires credentials) +uv publish + +# Or use the provided script +./publish.sh +``` + +### Adding Dependencies + +```bash +# Add a runtime dependency +uv add package-name + +# Add a development dependency +uv add --dev package-name +``` + +## License + +MIT diff --git a/build/lib/evolution/__init__.py b/build/lib/evolution/__init__.py deleted file mode 100644 index e69de29..0000000 diff --git a/build/lib/evolution/client.py b/build/lib/evolution/client.py deleted file mode 100644 index 991d756..0000000 --- a/build/lib/evolution/client.py +++ /dev/null @@ -1,81 +0,0 @@ -import requests -from .exceptions import EvolutionAuthenticationError, EvolutionNotFoundError, EvolutionAPIError -from .services.instance import InstanceService -from .services.instance_operations import InstanceOperationsService -from .services.message import MessageService -from .services.call import CallService -from .services.chat import ChatService -from .services.label import LabelService -from .services.profile import ProfileService -from .services.group import GroupService -class EvolutionClient: - """ - Cliente para interagir com a API Evolution. - - Args: - base_url (str): A URL base do servidor da API Evolution. - api_token (str): O token de autenticação para acessar a API. - """ - - def __init__(self, base_url: str, api_token: str): - self.base_url = base_url.rstrip('/') - self.api_token = api_token - self.instances = InstanceService(self) - self.instance_operations = InstanceOperationsService(self) - self.messages = MessageService(self) - self.calls = CallService(self) - self.chat = ChatService(self) - self.label = LabelService(self) - self.profile = ProfileService(self) - self.group = GroupService(self) - - def _get_headers(self, instance_token: str = None): - return { - 'apikey': instance_token or self.api_token, - 'Content-Type': 'application/json' - } - - def _get_full_url(self, endpoint): - return f'{self.base_url}/{endpoint}' - - def _handle_response(self, response): - if response.status_code == 401: - raise EvolutionAuthenticationError('Falha na autenticação.') - elif response.status_code == 404: - raise EvolutionNotFoundError('Recurso não encontrado.') - elif response.ok: - try: - return response.json() - except ValueError: - return response.content - else: - error_detail = '' - try: - error_detail = f' - {response.json()}' - except: - error_detail = f' - {response.text}' - raise EvolutionAPIError(f'Erro na requisição: {response.status_code}{error_detail}') - - def get(self, endpoint: str, instance_token: str = None): - """Faz uma requisição GET.""" - url = self._get_full_url(endpoint) - response = requests.get(url, headers=self._get_headers(instance_token)) - return self._handle_response(response) - - def post(self, endpoint: str, data: dict = None, instance_token: str = None): - """Faz uma requisição POST.""" - url = self._get_full_url(endpoint) - response = requests.post(url, headers=self._get_headers(instance_token), json=data) - return self._handle_response(response) - - def put(self, endpoint, data=None): - """Faz uma requisição PUT.""" - url = self._get_full_url(endpoint) - response = requests.put(url, headers=self.headers, json=data) - return self._handle_response(response) - - def delete(self, endpoint: str, instance_token: str = None): - """Faz uma requisição DELETE.""" - url = self._get_full_url(endpoint) - response = requests.delete(url, headers=self._get_headers(instance_token)) - return self._handle_response(response) diff --git a/build/lib/evolution/exceptions.py b/build/lib/evolution/exceptions.py deleted file mode 100644 index ee7e177..0000000 --- a/build/lib/evolution/exceptions.py +++ /dev/null @@ -1,11 +0,0 @@ -class EvolutionAPIError(Exception): - """Erro genérico da API Evolution.""" - pass - -class EvolutionAuthenticationError(EvolutionAPIError): - """Erro de autenticação com a API Evolution.""" - pass - -class EvolutionNotFoundError(EvolutionAPIError): - """Recurso não encontrado na API Evolution.""" - pass diff --git a/build/lib/evolution/models/__init__.py b/build/lib/evolution/models/__init__.py deleted file mode 100644 index e69de29..0000000 diff --git a/build/lib/evolution/models/call.py b/build/lib/evolution/models/call.py deleted file mode 100644 index 991463b..0000000 --- a/build/lib/evolution/models/call.py +++ /dev/null @@ -1,16 +0,0 @@ -class BaseCall: - def __init__(self, **kwargs): - self.__dict__.update({k: v for k, v in kwargs.items() if v is not None}) - -class FakeCall(BaseCall): - def __init__( - self, - number: str, - isVideo: bool, - callDuration: int - ): - super().__init__( - number=number, - isVideo=isVideo, - callDuration=callDuration - ) \ No newline at end of file diff --git a/build/lib/evolution/models/chat.py b/build/lib/evolution/models/chat.py deleted file mode 100644 index 4e985b3..0000000 --- a/build/lib/evolution/models/chat.py +++ /dev/null @@ -1,93 +0,0 @@ -from typing import List, Optional, Dict, Any - -class BaseChat: - def __init__(self, **kwargs): - self.__dict__.update({k: v for k, v in kwargs.items() if v is not None}) - -class CheckIsWhatsappNumber(BaseChat): - def __init__( - self, - numbers: List[str] - ): - super().__init__( - numbers=numbers - ) - -class MessageKey: - def __init__( - self, - remote_jid: str, - from_me: bool, - id: str, - participant: Optional[str] = None - ): - self.remoteJid = remote_jid - self.fromMe = from_me - self.id = id - self.participant = participant - -class ReadMessage: - def __init__( - self, - remote_jid: str, - from_me: bool, - id: str - ): - self.remoteJid = remote_jid - self.fromMe = from_me - self.id = id - -class ArchiveChat: - def __init__( - self, - last_message: Dict[str, Any], - chat: str, - archive: bool - ): - self.lastMessage = last_message - self.chat = chat - self.archive = archive - -class UnreadChat: - def __init__( - self, - last_message: Dict[str, Any], - chat: str - ): - self.lastMessage = last_message - self.chat = chat - -class ProfilePicture: - def __init__(self, number: str): - self.number = number - -class MediaMessage: - def __init__( - self, - message: Dict[str, Any], - convert_to_mp4: bool = False - ): - self.message = message - self.convertToMp4 = convert_to_mp4 - -class UpdateMessage: - def __init__( - self, - number: str, - key: Dict[str, Any], - text: str - ): - self.number = number - self.key = key - self.text = text - -class Presence: - def __init__( - self, - number: str, - delay: int, - presence: str - ): - self.number = number - self.delay = delay - self.presence = presence \ No newline at end of file diff --git a/build/lib/evolution/models/group.py b/build/lib/evolution/models/group.py deleted file mode 100644 index 5a21460..0000000 --- a/build/lib/evolution/models/group.py +++ /dev/null @@ -1,39 +0,0 @@ -from typing import List, Optional, Literal -from dataclasses import dataclass - -@dataclass -class CreateGroup: - subject: str - participants: List[str] - description: Optional[str] = None - -@dataclass -class GroupPicture: - image: str - -@dataclass -class GroupSubject: - subject: str - -@dataclass -class GroupDescription: - description: str - -@dataclass -class GroupInvite: - groupJid: str - description: str - numbers: List[str] - -@dataclass -class UpdateParticipant: - action: Literal["add", "remove", "promote", "demote"] - participants: List[str] - -@dataclass -class UpdateSetting: - action: Literal["announcement", "not_announcement", "locked", "unlocked"] - -@dataclass -class ToggleEphemeral: - expiration: int \ No newline at end of file diff --git a/build/lib/evolution/models/instance.py b/build/lib/evolution/models/instance.py deleted file mode 100644 index 102debc..0000000 --- a/build/lib/evolution/models/instance.py +++ /dev/null @@ -1,59 +0,0 @@ -from typing import Optional, List, Dict - -class WebhookConfig: - def __init__(self, url: str = None, byEvents: bool = False, base64: bool = True, - headers: Dict = None, events: List[str] = None): - self.url = url - self.byEvents = byEvents - self.base64 = base64 - self.headers = headers - self.events = events - -class EventsConfig: - def __init__(self, enabled: bool = True, events: List[str] = None): - self.enabled = enabled - self.events = events - -class ChatwootConfig: - def __init__(self, accountId: str = None, token: str = None, url: str = None, - signMsg: bool = True, reopenConversation: bool = True, - conversationPending: bool = False, importContacts: bool = True, - nameInbox: str = "evolution", mergeBrazilContacts: bool = True, - importMessages: bool = True, daysLimitImportMessages: int = 3, - organization: str = "Evolution Bot", - logo: str = "https://evolution-api.com/files/evolution-api-favicon.png"): - self.chatwootAccountId = accountId - self.chatwootToken = token - self.chatwootUrl = url - self.chatwootSignMsg = signMsg - self.chatwootReopenConversation = reopenConversation - self.chatwootConversationPending = conversationPending - self.chatwootImportContacts = importContacts - self.chatwootNameInbox = nameInbox - self.chatwootMergeBrazilContacts = mergeBrazilContacts - self.chatwootImportMessages = importMessages - self.chatwootDaysLimitImportMessages = daysLimitImportMessages - self.chatwootOrganization = organization - self.chatwootLogo = logo - -class InstanceConfig: - def __init__( - self, - instanceName: str, - integration: str = None, - token: str = None, - number: str = None, - qrcode: bool = None, - rejectCall: bool = None, - msgCall: str = None, - groupsIgnore: bool = None, - alwaysOnline: bool = None, - readMessages: bool = None, - readStatus: bool = None, - syncFullHistory: bool = None - ): - self.__dict__['instanceName'] = instanceName - - for key, value in locals().items(): - if key != 'self' and key != 'instanceName' and value is not None: - self.__dict__[key] = value \ No newline at end of file diff --git a/build/lib/evolution/models/label.py b/build/lib/evolution/models/label.py deleted file mode 100644 index cc59334..0000000 --- a/build/lib/evolution/models/label.py +++ /dev/null @@ -1,21 +0,0 @@ -from typing import Literal - -class BaseLabel: - def __init__(self, **kwargs): - self.__dict__.update({k: v for k, v in kwargs.items() if v is not None}) - -class HandleLabel(BaseLabel): - def __init__( - self, - number: str, - label_id: str, - action: Literal["add", "remove"] - ): - if action not in ["add", "remove"]: - raise ValueError("action deve ser 'add' ou 'remove'") - - super().__init__( - number=number, - labelId=label_id, - action=action - ) \ No newline at end of file diff --git a/build/lib/evolution/models/message.py b/build/lib/evolution/models/message.py deleted file mode 100644 index 739460f..0000000 --- a/build/lib/evolution/models/message.py +++ /dev/null @@ -1,254 +0,0 @@ -from enum import Enum -from typing import List, Optional, Union -from dataclasses import dataclass - -class MediaType(Enum): - IMAGE = "image" - VIDEO = "video" - DOCUMENT = "document" - -class StatusType(Enum): - TEXT = "text" - IMAGE = "image" - VIDEO = "video" - AUDIO = "audio" - -class FontType(Enum): - SERIF = 1 - NORICAN_REGULAR = 2 - BRYNDAN_WRITE = 3 - BEBASNEUE_REGULAR = 4 - OSWALD_HEAVY = 5 - -class BaseMessage: - def __init__(self, **kwargs): - self.__dict__.update({k: v for k, v in kwargs.items() if v is not None}) - -class QuotedMessage(BaseMessage): - def __init__(self, key: dict, message: Optional[dict] = None): - super().__init__(key=key, message=message) - -class TextMessage(BaseMessage): - def __init__( - self, - number: str, - text: str, - delay: Optional[int] = None, - quoted: Optional[QuotedMessage] = None, - linkPreview: Optional[bool] = None, - mentionsEveryOne: Optional[bool] = None, - mentioned: Optional[List[str]] = None - ): - super().__init__( - number=number, - text=text, - delay=delay, - quoted=quoted.__dict__ if quoted else None, - linkPreview=linkPreview, - mentionsEveryOne=mentionsEveryOne, - mentioned=mentioned - ) - -class MediaMessage(BaseMessage): - def __init__( - self, - number: str, - mediatype: str, - mimetype: str, - caption: str, - media: str, - fileName: str, - delay: Optional[int] = None, - quoted: Optional[QuotedMessage] = None, - mentionsEveryOne: Optional[bool] = None, - mentioned: Optional[List[str]] = None - ): - super().__init__( - number=number, - mediatype=mediatype, - mimetype=mimetype, - caption=caption, - media=media, - fileName=fileName, - delay=delay, - quoted=quoted.__dict__ if quoted else None, - mentionsEveryOne=mentionsEveryOne, - mentioned=mentioned - ) - -class StatusMessage(BaseMessage): - def __init__( - self, - type: StatusType, - content: str, - caption: Optional[str] = None, - backgroundColor: Optional[str] = None, - font: Optional[FontType] = None, - allContacts: bool = False, - statusJidList: Optional[List[str]] = None - ): - super().__init__( - type=type.value, - content=content, - caption=caption, - backgroundColor=backgroundColor, - font=font.value if font else None, - allContacts=allContacts, - statusJidList=statusJidList - ) - -class LocationMessage(BaseMessage): - def __init__( - self, - number: str, - name: str, - address: str, - latitude: float, - longitude: float, - delay: Optional[int] = None, - quoted: Optional[QuotedMessage] = None - ): - super().__init__( - number=number, - name=name, - address=address, - latitude=latitude, - longitude=longitude, - delay=delay, - quoted=quoted.__dict__ if quoted else None - ) - -class Contact(BaseMessage): - def __init__( - self, - fullName: str, - wuid: str, - phoneNumber: str, - organization: Optional[str] = None, - email: Optional[str] = None, - url: Optional[str] = None - ): - super().__init__( - fullName=fullName, - wuid=wuid, - phoneNumber=phoneNumber, - organization=organization, - email=email, - url=url - ) - -class ContactMessage(BaseMessage): - def __init__(self, number: str, contact: List[Contact]): - super().__init__( - number=number, - contact=[c.__dict__ for c in contact] - ) - -class ReactionMessage(BaseMessage): - def __init__(self, key: dict, reaction: str): - super().__init__(key=key, reaction=reaction) - -class PollMessage(BaseMessage): - def __init__( - self, - number: str, - name: str, - selectableCount: int, - values: List[str], - delay: Optional[int] = None, - quoted: Optional[QuotedMessage] = None - ): - super().__init__( - number=number, - name=name, - selectableCount=selectableCount, - values=values, - delay=delay, - quoted=quoted.__dict__ if quoted else None - ) - -class ListRow(BaseMessage): - def __init__(self, title: str, description: str, rowId: str): - super().__init__( - title=title, - description=description, - rowId=rowId - ) - -class ListSection(BaseMessage): - def __init__(self, title: str, rows: List[ListRow]): - super().__init__( - title=title, - rows=[r.__dict__ for r in rows] - ) - -class ListMessage(BaseMessage): - def __init__( - self, - number: str, - title: str, - description: str, - buttonText: str, - footerText: str, - sections: List[ListSection], - delay: Optional[int] = None, - quoted: Optional[QuotedMessage] = None - ): - super().__init__( - number=number, - title=title, - description=description, - buttonText=buttonText, - footerText=footerText, - sections=[s.__dict__ for s in sections], - delay=delay, - quoted=quoted.__dict__ if quoted else None - ) - -class Button(BaseMessage): - def __init__( - self, - type: str, - displayText: str, - id: Optional[str] = None, - copyCode: Optional[str] = None, - url: Optional[str] = None, - phoneNumber: Optional[str] = None, - currency: Optional[str] = None, - name: Optional[str] = None, - keyType: Optional[str] = None, - key: Optional[str] = None - ): - super().__init__( - type=type, - displayText=displayText, - id=id, - copyCode=copyCode, - url=url, - phoneNumber=phoneNumber, - currency=currency, - name=name, - keyType=keyType, - key=key - ) - -class ButtonMessage(BaseMessage): - def __init__( - self, - number: str, - title: str, - description: str, - footer: str, - buttons: List[Button], - delay: Optional[int] = None, - quoted: Optional[QuotedMessage] = None - ): - super().__init__( - number=number, - title=title, - description=description, - footer=footer, - buttons=[b.__dict__ for b in buttons], - delay=delay, - quoted=quoted.__dict__ if quoted else None - ) \ No newline at end of file diff --git a/build/lib/evolution/models/presence.py b/build/lib/evolution/models/presence.py deleted file mode 100644 index d245b57..0000000 --- a/build/lib/evolution/models/presence.py +++ /dev/null @@ -1,9 +0,0 @@ -from enum import Enum - -class PresenceStatus(Enum): - AVAILABLE = "available" - UNAVAILABLE = "unavailable" - -class PresenceConfig: - def __init__(self, presence: PresenceStatus): - self.presence = presence.value \ No newline at end of file diff --git a/build/lib/evolution/models/profile.py b/build/lib/evolution/models/profile.py deleted file mode 100644 index 39441b3..0000000 --- a/build/lib/evolution/models/profile.py +++ /dev/null @@ -1,60 +0,0 @@ -from typing import Literal - -class BaseProfile: - def __init__(self, **kwargs): - self.__dict__.update({k: v for k, v in kwargs.items() if v is not None}) - -class FetchProfile(BaseProfile): - def __init__( - self, - number: str, - ): - super().__init__( - number=number, - ) - -class ProfileName(BaseProfile): - def __init__( - self, - name: str, - ): - super().__init__( - name=name, - ) - -class ProfileStatus(BaseProfile): - def __init__( - self, - status: str, - ): - super().__init__( - status=status, - ) - -class ProfilePicture(BaseProfile): - def __init__( - self, - picture: str, - ): - super().__init__( - picture=picture, - ) - -class PrivacySettings(BaseProfile): - def __init__( - self, - readreceipts: Literal["all", "none"], - profile: Literal["all", "contacts", "contact_blacklist", "none"], - status: Literal["all", "contacts", "contact_blacklist", "none"], - online: Literal["all", "match_last_seen"], - last: Literal["all", "contacts", "contact_blacklist", "none"], - groupadd: Literal["all", "contacts", "contact_blacklist"], - ): - super().__init__( - readreceipts=readreceipts, - profile=profile, - status=status, - online=online, - last=last, - groupadd=groupadd, - ) \ No newline at end of file diff --git a/build/lib/evolution/services/__init__.py b/build/lib/evolution/services/__init__.py deleted file mode 100644 index e69de29..0000000 diff --git a/build/lib/evolution/services/call.py b/build/lib/evolution/services/call.py deleted file mode 100644 index 4e1b2bd..0000000 --- a/build/lib/evolution/services/call.py +++ /dev/null @@ -1,13 +0,0 @@ -from typing import Union, BinaryIO -from ..models.call import * - -class CallService: - def __init__(self, client): - self.client = client - - def fake_call(self, instance_id: str, data: FakeCall, instance_token: str): - return self.client.post( - f'call/offer/{instance_id}', - data=data.__dict__, - instance_token=instance_token - ) \ No newline at end of file diff --git a/build/lib/evolution/services/chat.py b/build/lib/evolution/services/chat.py deleted file mode 100644 index b2af456..0000000 --- a/build/lib/evolution/services/chat.py +++ /dev/null @@ -1,69 +0,0 @@ -from typing import Union, BinaryIO -from ..models.chat import * - -class ChatService: - def __init__(self, client): - self.client = client - - def check_is_whatsapp_numbers(self, instance_id: str, data: CheckIsWhatsappNumber, instance_token: str): - return self.client.post( - f'chat/checkIsWhatsappNumber/{instance_id}', - data=data.__dict__, - instance_token=instance_token - ) - - def mark_message_as_read(self, instance_id: str, messages: List[ReadMessage], instance_token: str): - return self.client.post( - f'chat/markMessageAsRead/{instance_id}', - data={"readMessages": [m.__dict__ for m in messages]}, - instance_token=instance_token - ) - - def archive_chat(self, instance_id: str, data: ArchiveChat, instance_token: str): - return self.client.post( - f'chat/archiveChat/{instance_id}', - data=data.__dict__, - instance_token=instance_token - ) - - def mark_chat_unread(self, instance_id: str, data: UnreadChat, instance_token: str): - return self.client.post( - f'chat/markChatUnread/{instance_id}', - data=data.__dict__, - instance_token=instance_token - ) - - def delete_message_for_everyone(self, instance_id: str, data: MessageKey, instance_token: str): - return self.client.delete( - f'chat/deleteMessageForEveryone/{instance_id}', - data=data.__dict__, - instance_token=instance_token - ) - - def fetch_profile_picture_url(self, instance_id: str, data: ProfilePicture, instance_token: str): - return self.client.post( - f'chat/fetchProfilePictureUrl/{instance_id}', - data=data.__dict__, - instance_token=instance_token - ) - - def get_base64_from_media_message(self, instance_id: str, data: MediaMessage, instance_token: str): - return self.client.post( - f'chat/getBase64FromMediaMessage/{instance_id}', - data=data.__dict__, - instance_token=instance_token - ) - - def update_message(self, instance_id: str, data: UpdateMessage, instance_token: str): - return self.client.post( - f'chat/updateMessage/{instance_id}', - data=data.__dict__, - instance_token=instance_token - ) - - def send_presence(self, instance_id: str, data: Presence, instance_token: str): - return self.client.post( - f'chat/sendPresence/{instance_id}', - data=data.__dict__, - instance_token=instance_token - ) \ No newline at end of file diff --git a/build/lib/evolution/services/group.py b/build/lib/evolution/services/group.py deleted file mode 100644 index 6bfbcfe..0000000 --- a/build/lib/evolution/services/group.py +++ /dev/null @@ -1,117 +0,0 @@ -from typing import Optional -from ..models.group import * - -class GroupService: - def __init__(self, client): - self.client = client - - def create_group(self, instance_id: str, data: CreateGroup, instance_token: str): - return self.client.post( - f'group/create/{instance_id}', - data=data.__dict__, - instance_token=instance_token - ) - - def update_group_picture(self, instance_id: str, group_jid: str, data: GroupPicture, instance_token: str): - return self.client.post( - f'group/updateGroupPicture/{instance_id}', - params={'groupJid': group_jid}, - data=data.__dict__, - instance_token=instance_token - ) - - def update_group_subject(self, instance_id: str, group_jid: str, data: GroupSubject, instance_token: str): - return self.client.post( - f'group/updateGroupSubject/{instance_id}', - params={'groupJid': group_jid}, - data=data.__dict__, - instance_token=instance_token - ) - - def update_group_description(self, instance_id: str, group_jid: str, data: GroupDescription, instance_token: str): - return self.client.post( - f'group/updateGroupDescription/{instance_id}', - params={'groupJid': group_jid}, - data=data.__dict__, - instance_token=instance_token - ) - - def get_invite_code(self, instance_id: str, group_jid: str, instance_token: str): - return self.client.get( - f'group/inviteCode/{instance_id}', - params={'groupJid': group_jid}, - instance_token=instance_token - ) - - def revoke_invite_code(self, instance_id: str, group_jid: str, instance_token: str): - return self.client.post( - f'group/revokeInviteCode/{instance_id}', - params={'groupJid': group_jid}, - instance_token=instance_token - ) - - def send_invite(self, instance_id: str, data: GroupInvite, instance_token: str): - return self.client.post( - f'group/sendInvite/{instance_id}', - data=data.__dict__, - instance_token=instance_token - ) - - def get_invite_info(self, instance_id: str, invite_code: str, instance_token: str): - return self.client.get( - f'group/inviteInfo/{instance_id}', - params={'inviteCode': invite_code}, - instance_token=instance_token - ) - - def get_group_info(self, instance_id: str, group_jid: str, instance_token: str): - return self.client.get( - f'group/findGroupInfos/{instance_id}', - params={'groupJid': group_jid}, - instance_token=instance_token - ) - - def fetch_all_groups(self, instance_id: str, instance_token: str, get_participants: bool = False): - return self.client.get( - f'group/fetchAllGroups/{instance_id}', - params={'getParticipants': get_participants}, - instance_token=instance_token - ) - - def get_participants(self, instance_id: str, group_jid: str, instance_token: str): - return self.client.get( - f'group/participants/{instance_id}', - params={'groupJid': group_jid}, - instance_token=instance_token - ) - - def update_participant(self, instance_id: str, group_jid: str, data: UpdateParticipant, instance_token: str): - return self.client.post( - f'group/updateParticipant/{instance_id}', - params={'groupJid': group_jid}, - data=data.__dict__, - instance_token=instance_token - ) - - def update_setting(self, instance_id: str, group_jid: str, data: UpdateSetting, instance_token: str): - return self.client.post( - f'group/updateSetting/{instance_id}', - params={'groupJid': group_jid}, - data=data.__dict__, - instance_token=instance_token - ) - - def toggle_ephemeral(self, instance_id: str, group_jid: str, data: ToggleEphemeral, instance_token: str): - return self.client.post( - f'group/toggleEphemeral/{instance_id}', - params={'groupJid': group_jid}, - data=data.__dict__, - instance_token=instance_token - ) - - def leave_group(self, instance_id: str, group_jid: str, instance_token: str): - return self.client.delete( - f'group/leaveGroup/{instance_id}', - params={'groupJid': group_jid}, - instance_token=instance_token - ) \ No newline at end of file diff --git a/build/lib/evolution/services/instance.py b/build/lib/evolution/services/instance.py deleted file mode 100644 index 099c8bb..0000000 --- a/build/lib/evolution/services/instance.py +++ /dev/null @@ -1,9 +0,0 @@ -class InstanceService: - def __init__(self, client): - self.client = client - - def fetch_instances(self): - return self.client.get('instance/fetchInstances') - - def create_instance(self, config): - return self.client.post('instance/create', data=config.__dict__) \ No newline at end of file diff --git a/build/lib/evolution/services/instance_operations.py b/build/lib/evolution/services/instance_operations.py deleted file mode 100644 index c59016f..0000000 --- a/build/lib/evolution/services/instance_operations.py +++ /dev/null @@ -1,28 +0,0 @@ -from ..models.presence import PresenceStatus, PresenceConfig - -class InstanceOperationsService: - def __init__(self, client): - self.client = client - - def connect(self, instance_id: str, instance_token: str): - return self.client.get(f'instance/connect/{instance_id}', instance_token) - - def restart(self, instance_id: str, instance_token: str): - return self.client.post(f'instance/restart/{instance_id}', instance_token=instance_token) - - def set_presence(self, instance_id: str, presence: PresenceStatus, instance_token: str): - config = PresenceConfig(presence) - return self.client.post( - f'instance/setPresence/{instance_id}', - data=config.__dict__, - instance_token=instance_token - ) - - def get_connection_state(self, instance_id: str, instance_token: str): - return self.client.get(f'instance/connectionState/{instance_id}', instance_token) - - def logout(self, instance_id: str, instance_token: str): - return self.client.delete(f'instance/logout/{instance_id}', instance_token) - - def delete(self, instance_id: str, instance_token: str): - return self.client.delete(f'instance/delete/{instance_id}', instance_token) diff --git a/build/lib/evolution/services/label.py b/build/lib/evolution/services/label.py deleted file mode 100644 index ed87c91..0000000 --- a/build/lib/evolution/services/label.py +++ /dev/null @@ -1,19 +0,0 @@ -from typing import Union, BinaryIO -from ..models.label import * - -class LabelService: - def __init__(self, client): - self.client = client - - def find_labels(self, instance_id: str, instance_token: str): - return self.client.get( - f'label/findLabels/{instance_id}', - instance_token=instance_token - ) - - def handle_label(self, instance_id: str, data: HandleLabel, instance_token: str): - return self.client.post( - f'label/handleLabel/{instance_id}', - data=data.__dict__, - instance_token=instance_token - ) \ No newline at end of file diff --git a/build/lib/evolution/services/message.py b/build/lib/evolution/services/message.py deleted file mode 100644 index 02bd3c4..0000000 --- a/build/lib/evolution/services/message.py +++ /dev/null @@ -1,111 +0,0 @@ -from typing import Union, BinaryIO -from ..models.message import * - -class MessageService: - def __init__(self, client): - self.client = client - - def send_text(self, instance_id: str, message: TextMessage, instance_token: str): - return self.client.post( - f'message/sendText/{instance_id}', - data=message.__dict__, - instance_token=instance_token - ) - - def send_media(self, instance_id: str, message: MediaMessage, instance_token: str, file: BinaryIO = None): - payload = { - 'data': message.__dict__, - 'instance_token': instance_token - } - - if file: - payload['files'] = {'file': file} - - return self.client.post( - f'message/sendMedia/{instance_id}', - **payload - ) - - def send_ptv(self, instance_id: str, message: dict, instance_token: str, file: BinaryIO = None): - payload = { - 'data': message, - 'instance_token': instance_token - } - - if file: - payload['files'] = {'file': file} - - return self.client.post( - f'message/sendPtv/{instance_id}', - **payload - ) - - def send_whatsapp_audio(self, instance_id: str, message: dict, instance_token: str, file: BinaryIO = None): - payload = { - 'data': message, - 'instance_token': instance_token - } - - if file: - payload['files'] = {'file': file} - - return self.client.post( - f'message/sendWhatsAppAudio/{instance_id}', - **payload - ) - - def send_status(self, instance_id: str, message: StatusMessage, instance_token: str): - return self.client.post( - f'message/sendStatus/{instance_id}', - data=message.__dict__, - instance_token=instance_token - ) - - def send_sticker(self, instance_id: str, message: dict, instance_token: str): - return self.client.post( - f'message/sendSticker/{instance_id}', - data=message, - instance_token=instance_token - ) - - def send_location(self, instance_id: str, message: LocationMessage, instance_token: str): - return self.client.post( - f'message/sendLocation/{instance_id}', - data=message.__dict__, - instance_token=instance_token - ) - - def send_contact(self, instance_id: str, message: ContactMessage, instance_token: str): - return self.client.post( - f'message/sendContact/{instance_id}', - data=message.__dict__, - instance_token=instance_token - ) - - def send_reaction(self, instance_id: str, message: ReactionMessage, instance_token: str): - return self.client.post( - f'message/sendReaction/{instance_id}', - data=message.__dict__, - instance_token=instance_token - ) - - def send_poll(self, instance_id: str, message: PollMessage, instance_token: str): - return self.client.post( - f'message/sendPoll/{instance_id}', - data=message.__dict__, - instance_token=instance_token - ) - - def send_list(self, instance_id: str, message: ListMessage, instance_token: str): - return self.client.post( - f'message/sendList/{instance_id}', - data=message.__dict__, - instance_token=instance_token - ) - - def send_buttons(self, instance_id: str, message: ButtonMessage, instance_token: str): - return self.client.post( - f'message/sendButtons/{instance_id}', - data=message.__dict__, - instance_token=instance_token - ) \ No newline at end of file diff --git a/build/lib/evolution/services/profile.py b/build/lib/evolution/services/profile.py deleted file mode 100644 index b0da8ab..0000000 --- a/build/lib/evolution/services/profile.py +++ /dev/null @@ -1,60 +0,0 @@ -from typing import Union, BinaryIO -from ..models.profile import * - -class ProfileService: - def __init__(self, client): - self.client = client - - def fetch_business_profile(self, instance_id: str, data: FetchProfile, instance_token: str): - return self.client.post( - f'chat/fetchBusinessProfile/{instance_id}', - data=data.__dict__, - instance_token=instance_token - ) - - def fetch_profile(self, instance_id: str, data: FetchProfile, instance_token: str): - return self.client.post( - f'chat/fetchProfile/{instance_id}', - data=data.__dict__, - instance_token=instance_token - ) - - def update_profile_name(self, instance_id: str, data: ProfileName, instance_token: str): - return self.client.post( - f'chat/updateProfileName/{instance_id}', - data=data.__dict__, - instance_token=instance_token - ) - - def update_profile_status(self, instance_id: str, data: ProfileStatus, instance_token: str): - return self.client.post( - f'chat/updateProfileStatus/{instance_id}', - data=data.__dict__, - instance_token=instance_token - ) - - def update_profile_picture(self, instance_id: str, data: ProfilePicture, instance_token: str): - return self.client.post( - f'chat/updateProfilePicture/{instance_id}', - data=data.__dict__, - instance_token=instance_token - ) - - def remove_profile_picture(self, instance_id: str, instance_token: str): - return self.client.delete( - f'chat/removeProfilePicture/{instance_id}', - instance_token=instance_token - ) - - def fetch_privacy_settings(self, instance_id: str, instance_token: str): - return self.client.get( - f'chat/fetchPrivacySettings/{instance_id}', - instance_token=instance_token - ) - - def update_privacy_settings(self, instance_id: str, data: PrivacySettings, instance_token: str): - return self.client.post( - f'chat/updatePrivacySettings/{instance_id}', - data=data.__dict__, - instance_token=instance_token - ) \ No newline at end of file diff --git a/build/lib/evolutionapi/__init__.py b/build/lib/evolutionapi/__init__.py deleted file mode 100644 index e69de29..0000000 diff --git a/build/lib/evolutionapi/client.py b/build/lib/evolutionapi/client.py deleted file mode 100644 index 237f925..0000000 --- a/build/lib/evolutionapi/client.py +++ /dev/null @@ -1,138 +0,0 @@ -import requests -from requests_toolbelt import MultipartEncoder -from .exceptions import EvolutionAuthenticationError, EvolutionNotFoundError, EvolutionAPIError -from .services.instance import InstanceService -from .services.instance_operations import InstanceOperationsService -from .services.message import MessageService -from .services.call import CallService -from .services.chat import ChatService -from .services.label import LabelService -from .services.profile import ProfileService -from .services.group import GroupService -from .services.websocket import WebSocketService, WebSocketManager - -class EvolutionClient: - """ - Cliente para interagir com a API Evolution. - - Args: - base_url (str): A URL base do servidor da API Evolution. - api_token (str): O token de autenticação para acessar a API. - """ - - def __init__(self, base_url: str, api_token: str): - self.base_url = base_url.rstrip('/') - self.api_token = api_token - self.instances = InstanceService(self) - self.instance_operations = InstanceOperationsService(self) - self.messages = MessageService(self) - self.calls = CallService(self) - self.chat = ChatService(self) - self.label = LabelService(self) - self.profile = ProfileService(self) - self.group = GroupService(self) - self.websocket = WebSocketService(self) - - def _get_headers(self, instance_token: str = None): - return { - 'apikey': instance_token or self.api_token, - 'Content-Type': 'application/json' - } - - def _get_full_url(self, endpoint): - return f'{self.base_url}/{endpoint}' - - def _handle_response(self, response): - if response.status_code == 401: - raise EvolutionAuthenticationError('Falha na autenticação.') - elif response.status_code == 404: - raise EvolutionNotFoundError('Recurso não encontrado.') - elif response.ok: - try: - return response.json() - except ValueError: - return response.content - else: - error_detail = '' - try: - error_detail = f' - {response.json()}' - except: - error_detail = f' - {response.text}' - raise EvolutionAPIError(f'Erro na requisição: {response.status_code}{error_detail}') - - def get(self, endpoint: str, instance_token: str = None): - """Faz uma requisição GET.""" - url = self._get_full_url(endpoint) - response = requests.get(url, headers=self._get_headers(instance_token)) - return self._handle_response(response) - - def post(self, endpoint: str, data: dict = None, instance_token: str = None, files: dict = None): - url = f'{self.base_url}/{endpoint}' - headers = self._get_headers(instance_token) - - if files: - # Remove o Content-Type do header quando enviando arquivos - if 'Content-Type' in headers: - del headers['Content-Type'] - - # Prepara os campos do multipart - fields = {} - - # Adiciona os campos do data - for key, value in data.items(): - fields[key] = str(value) if not isinstance(value, (int, float)) else (None, str(value), 'text/plain') - - # Adiciona o arquivo - file_tuple = files['file'] - fields['file'] = (file_tuple[0], file_tuple[1], file_tuple[2]) - - # Cria o multipart encoder - multipart = MultipartEncoder(fields=fields) - headers['Content-Type'] = multipart.content_type - - response = requests.post( - url, - headers=headers, - data=multipart - ) - else: - response = requests.post( - url, - headers=headers, - json=data - ) - - return response.json() - - def put(self, endpoint, data=None): - """Faz uma requisição PUT.""" - url = self._get_full_url(endpoint) - response = requests.put(url, headers=self.headers, json=data) - return self._handle_response(response) - - def delete(self, endpoint: str, instance_token: str = None): - """Faz uma requisição DELETE.""" - url = self._get_full_url(endpoint) - response = requests.delete(url, headers=self._get_headers(instance_token)) - return self._handle_response(response) - - def create_websocket(self, instance_id: str, api_token: str, max_retries: int = 5, retry_delay: float = 1.0) -> WebSocketManager: - """ - Create a WebSocket manager for the specified instance. - - Args: - instance_id (str): The instance ID - api_token (str): The API token - max_retries (int): Maximum number of reconnection attempts - retry_delay (float): Initial delay between attempts in seconds - - Returns: - WebSocketManager: The WebSocket manager instance - """ - return WebSocketManager( - base_url=self.base_url, - instance_id=instance_id, - api_token=api_token, - max_retries=max_retries, - retry_delay=retry_delay - ) diff --git a/build/lib/evolutionapi/exceptions.py b/build/lib/evolutionapi/exceptions.py deleted file mode 100644 index ee7e177..0000000 --- a/build/lib/evolutionapi/exceptions.py +++ /dev/null @@ -1,11 +0,0 @@ -class EvolutionAPIError(Exception): - """Erro genérico da API Evolution.""" - pass - -class EvolutionAuthenticationError(EvolutionAPIError): - """Erro de autenticação com a API Evolution.""" - pass - -class EvolutionNotFoundError(EvolutionAPIError): - """Recurso não encontrado na API Evolution.""" - pass diff --git a/build/lib/evolutionapi/models/__init__.py b/build/lib/evolutionapi/models/__init__.py deleted file mode 100644 index e69de29..0000000 diff --git a/build/lib/evolutionapi/models/call.py b/build/lib/evolutionapi/models/call.py deleted file mode 100644 index 991463b..0000000 --- a/build/lib/evolutionapi/models/call.py +++ /dev/null @@ -1,16 +0,0 @@ -class BaseCall: - def __init__(self, **kwargs): - self.__dict__.update({k: v for k, v in kwargs.items() if v is not None}) - -class FakeCall(BaseCall): - def __init__( - self, - number: str, - isVideo: bool, - callDuration: int - ): - super().__init__( - number=number, - isVideo=isVideo, - callDuration=callDuration - ) \ No newline at end of file diff --git a/build/lib/evolutionapi/models/chat.py b/build/lib/evolutionapi/models/chat.py deleted file mode 100644 index 4e985b3..0000000 --- a/build/lib/evolutionapi/models/chat.py +++ /dev/null @@ -1,93 +0,0 @@ -from typing import List, Optional, Dict, Any - -class BaseChat: - def __init__(self, **kwargs): - self.__dict__.update({k: v for k, v in kwargs.items() if v is not None}) - -class CheckIsWhatsappNumber(BaseChat): - def __init__( - self, - numbers: List[str] - ): - super().__init__( - numbers=numbers - ) - -class MessageKey: - def __init__( - self, - remote_jid: str, - from_me: bool, - id: str, - participant: Optional[str] = None - ): - self.remoteJid = remote_jid - self.fromMe = from_me - self.id = id - self.participant = participant - -class ReadMessage: - def __init__( - self, - remote_jid: str, - from_me: bool, - id: str - ): - self.remoteJid = remote_jid - self.fromMe = from_me - self.id = id - -class ArchiveChat: - def __init__( - self, - last_message: Dict[str, Any], - chat: str, - archive: bool - ): - self.lastMessage = last_message - self.chat = chat - self.archive = archive - -class UnreadChat: - def __init__( - self, - last_message: Dict[str, Any], - chat: str - ): - self.lastMessage = last_message - self.chat = chat - -class ProfilePicture: - def __init__(self, number: str): - self.number = number - -class MediaMessage: - def __init__( - self, - message: Dict[str, Any], - convert_to_mp4: bool = False - ): - self.message = message - self.convertToMp4 = convert_to_mp4 - -class UpdateMessage: - def __init__( - self, - number: str, - key: Dict[str, Any], - text: str - ): - self.number = number - self.key = key - self.text = text - -class Presence: - def __init__( - self, - number: str, - delay: int, - presence: str - ): - self.number = number - self.delay = delay - self.presence = presence \ No newline at end of file diff --git a/build/lib/evolutionapi/models/group.py b/build/lib/evolutionapi/models/group.py deleted file mode 100644 index 5a21460..0000000 --- a/build/lib/evolutionapi/models/group.py +++ /dev/null @@ -1,39 +0,0 @@ -from typing import List, Optional, Literal -from dataclasses import dataclass - -@dataclass -class CreateGroup: - subject: str - participants: List[str] - description: Optional[str] = None - -@dataclass -class GroupPicture: - image: str - -@dataclass -class GroupSubject: - subject: str - -@dataclass -class GroupDescription: - description: str - -@dataclass -class GroupInvite: - groupJid: str - description: str - numbers: List[str] - -@dataclass -class UpdateParticipant: - action: Literal["add", "remove", "promote", "demote"] - participants: List[str] - -@dataclass -class UpdateSetting: - action: Literal["announcement", "not_announcement", "locked", "unlocked"] - -@dataclass -class ToggleEphemeral: - expiration: int \ No newline at end of file diff --git a/build/lib/evolutionapi/models/instance.py b/build/lib/evolutionapi/models/instance.py deleted file mode 100644 index 102debc..0000000 --- a/build/lib/evolutionapi/models/instance.py +++ /dev/null @@ -1,59 +0,0 @@ -from typing import Optional, List, Dict - -class WebhookConfig: - def __init__(self, url: str = None, byEvents: bool = False, base64: bool = True, - headers: Dict = None, events: List[str] = None): - self.url = url - self.byEvents = byEvents - self.base64 = base64 - self.headers = headers - self.events = events - -class EventsConfig: - def __init__(self, enabled: bool = True, events: List[str] = None): - self.enabled = enabled - self.events = events - -class ChatwootConfig: - def __init__(self, accountId: str = None, token: str = None, url: str = None, - signMsg: bool = True, reopenConversation: bool = True, - conversationPending: bool = False, importContacts: bool = True, - nameInbox: str = "evolution", mergeBrazilContacts: bool = True, - importMessages: bool = True, daysLimitImportMessages: int = 3, - organization: str = "Evolution Bot", - logo: str = "https://evolution-api.com/files/evolution-api-favicon.png"): - self.chatwootAccountId = accountId - self.chatwootToken = token - self.chatwootUrl = url - self.chatwootSignMsg = signMsg - self.chatwootReopenConversation = reopenConversation - self.chatwootConversationPending = conversationPending - self.chatwootImportContacts = importContacts - self.chatwootNameInbox = nameInbox - self.chatwootMergeBrazilContacts = mergeBrazilContacts - self.chatwootImportMessages = importMessages - self.chatwootDaysLimitImportMessages = daysLimitImportMessages - self.chatwootOrganization = organization - self.chatwootLogo = logo - -class InstanceConfig: - def __init__( - self, - instanceName: str, - integration: str = None, - token: str = None, - number: str = None, - qrcode: bool = None, - rejectCall: bool = None, - msgCall: str = None, - groupsIgnore: bool = None, - alwaysOnline: bool = None, - readMessages: bool = None, - readStatus: bool = None, - syncFullHistory: bool = None - ): - self.__dict__['instanceName'] = instanceName - - for key, value in locals().items(): - if key != 'self' and key != 'instanceName' and value is not None: - self.__dict__[key] = value \ No newline at end of file diff --git a/build/lib/evolutionapi/models/label.py b/build/lib/evolutionapi/models/label.py deleted file mode 100644 index cc59334..0000000 --- a/build/lib/evolutionapi/models/label.py +++ /dev/null @@ -1,21 +0,0 @@ -from typing import Literal - -class BaseLabel: - def __init__(self, **kwargs): - self.__dict__.update({k: v for k, v in kwargs.items() if v is not None}) - -class HandleLabel(BaseLabel): - def __init__( - self, - number: str, - label_id: str, - action: Literal["add", "remove"] - ): - if action not in ["add", "remove"]: - raise ValueError("action deve ser 'add' ou 'remove'") - - super().__init__( - number=number, - labelId=label_id, - action=action - ) \ No newline at end of file diff --git a/build/lib/evolutionapi/models/message.py b/build/lib/evolutionapi/models/message.py deleted file mode 100644 index cd4a82a..0000000 --- a/build/lib/evolutionapi/models/message.py +++ /dev/null @@ -1,260 +0,0 @@ -from enum import Enum -from typing import List, Optional, Union -from dataclasses import dataclass - -class MediaType(Enum): - IMAGE = "image" - VIDEO = "video" - DOCUMENT = "document" - -class StatusType(Enum): - TEXT = "text" - IMAGE = "image" - VIDEO = "video" - AUDIO = "audio" - -class FontType(Enum): - SERIF = 1 - NORICAN_REGULAR = 2 - BRYNDAN_WRITE = 3 - BEBASNEUE_REGULAR = 4 - OSWALD_HEAVY = 5 - -class BaseMessage: - def __init__(self, **kwargs): - self.__dict__.update({k: v for k, v in kwargs.items() if v is not None}) - -class QuotedMessage(BaseMessage): - def __init__(self, key: dict, message: Optional[dict] = None): - super().__init__(key=key, message=message) - -class TextMessage(BaseMessage): - def __init__( - self, - number: str, - text: str, - delay: Optional[int] = None, - quoted: Optional[QuotedMessage] = None, - linkPreview: Optional[bool] = None, - mentionsEveryOne: Optional[bool] = None, - mentioned: Optional[List[str]] = None - ): - super().__init__( - number=number, - text=text, - delay=delay, - quoted=quoted.__dict__ if quoted else None, - linkPreview=linkPreview, - mentionsEveryOne=mentionsEveryOne, - mentioned=mentioned - ) - -class MediaMessage(BaseMessage): - def __init__( - self, - number: str, - media: dict = None, - mediatype: Optional[str] = None, - caption: str = None, - mimetype: str = None, - fileName: str = None, - delay: Optional[Union[int, float, str]] = None, - quoted: Optional[QuotedMessage] = None, - mentionsEveryOne: Optional[bool] = None, - mentioned: Optional[List[str]] = None - ): - data = { - 'number': number, - 'mediatype': mediatype, - 'caption': caption, - 'mimetype': mimetype, - 'fileName': fileName, - 'quoted': quoted.__dict__ if quoted else None, - 'mentionsEveryOne': mentionsEveryOne, - 'mentioned': mentioned - } - - if delay is not None: - data['delay'] = delay - - if media and media != {}: - data['media'] = media - - super().__init__(**{k: v for k, v in data.items() if v is not None}) - -class StatusMessage(BaseMessage): - def __init__( - self, - type: StatusType, - content: str, - caption: Optional[str] = None, - backgroundColor: Optional[str] = None, - font: Optional[FontType] = None, - allContacts: bool = False, - statusJidList: Optional[List[str]] = None - ): - super().__init__( - type=type.value, - content=content, - caption=caption, - backgroundColor=backgroundColor, - font=font.value if font else None, - allContacts=allContacts, - statusJidList=statusJidList - ) - -class LocationMessage(BaseMessage): - def __init__( - self, - number: str, - name: str, - address: str, - latitude: float, - longitude: float, - delay: Optional[int] = None, - quoted: Optional[QuotedMessage] = None - ): - super().__init__( - number=number, - name=name, - address=address, - latitude=latitude, - longitude=longitude, - delay=delay, - quoted=quoted.__dict__ if quoted else None - ) - -class Contact(BaseMessage): - def __init__( - self, - fullName: str, - wuid: str, - phoneNumber: str, - organization: Optional[str] = None, - email: Optional[str] = None, - url: Optional[str] = None - ): - super().__init__( - fullName=fullName, - wuid=wuid, - phoneNumber=phoneNumber, - organization=organization, - email=email, - url=url - ) - -class ContactMessage(BaseMessage): - def __init__(self, number: str, contact: List[Contact]): - super().__init__( - number=number, - contact=[c.__dict__ for c in contact] - ) - -class ReactionMessage(BaseMessage): - def __init__(self, key: dict, reaction: str): - super().__init__(key=key, reaction=reaction) - -class PollMessage(BaseMessage): - def __init__( - self, - number: str, - name: str, - selectableCount: int, - values: List[str], - delay: Optional[int] = None, - quoted: Optional[QuotedMessage] = None - ): - super().__init__( - number=number, - name=name, - selectableCount=selectableCount, - values=values, - delay=delay, - quoted=quoted.__dict__ if quoted else None - ) - -class ListRow(BaseMessage): - def __init__(self, title: str, description: str, rowId: str): - super().__init__( - title=title, - description=description, - rowId=rowId - ) - -class ListSection(BaseMessage): - def __init__(self, title: str, rows: List[ListRow]): - super().__init__( - title=title, - rows=[r.__dict__ for r in rows] - ) - -class ListMessage(BaseMessage): - def __init__( - self, - number: str, - title: str, - description: str, - buttonText: str, - footerText: str, - sections: List[ListSection], - delay: Optional[int] = None, - quoted: Optional[QuotedMessage] = None - ): - super().__init__( - number=number, - title=title, - description=description, - buttonText=buttonText, - footerText=footerText, - sections=[s.__dict__ for s in sections], - delay=delay, - quoted=quoted.__dict__ if quoted else None - ) - -class Button(BaseMessage): - def __init__( - self, - type: str, - displayText: str, - id: Optional[str] = None, - copyCode: Optional[str] = None, - url: Optional[str] = None, - phoneNumber: Optional[str] = None, - currency: Optional[str] = None, - name: Optional[str] = None, - keyType: Optional[str] = None, - key: Optional[str] = None - ): - super().__init__( - type=type, - displayText=displayText, - id=id, - copyCode=copyCode, - url=url, - phoneNumber=phoneNumber, - currency=currency, - name=name, - keyType=keyType, - key=key - ) - -class ButtonMessage(BaseMessage): - def __init__( - self, - number: str, - title: str, - description: str, - footer: str, - buttons: List[Button], - delay: Optional[int] = None, - quoted: Optional[QuotedMessage] = None - ): - super().__init__( - number=number, - title=title, - description=description, - footer=footer, - buttons=[b.__dict__ for b in buttons], - delay=delay, - quoted=quoted.__dict__ if quoted else None - ) \ No newline at end of file diff --git a/build/lib/evolutionapi/models/presence.py b/build/lib/evolutionapi/models/presence.py deleted file mode 100644 index d245b57..0000000 --- a/build/lib/evolutionapi/models/presence.py +++ /dev/null @@ -1,9 +0,0 @@ -from enum import Enum - -class PresenceStatus(Enum): - AVAILABLE = "available" - UNAVAILABLE = "unavailable" - -class PresenceConfig: - def __init__(self, presence: PresenceStatus): - self.presence = presence.value \ No newline at end of file diff --git a/build/lib/evolutionapi/models/profile.py b/build/lib/evolutionapi/models/profile.py deleted file mode 100644 index 39441b3..0000000 --- a/build/lib/evolutionapi/models/profile.py +++ /dev/null @@ -1,60 +0,0 @@ -from typing import Literal - -class BaseProfile: - def __init__(self, **kwargs): - self.__dict__.update({k: v for k, v in kwargs.items() if v is not None}) - -class FetchProfile(BaseProfile): - def __init__( - self, - number: str, - ): - super().__init__( - number=number, - ) - -class ProfileName(BaseProfile): - def __init__( - self, - name: str, - ): - super().__init__( - name=name, - ) - -class ProfileStatus(BaseProfile): - def __init__( - self, - status: str, - ): - super().__init__( - status=status, - ) - -class ProfilePicture(BaseProfile): - def __init__( - self, - picture: str, - ): - super().__init__( - picture=picture, - ) - -class PrivacySettings(BaseProfile): - def __init__( - self, - readreceipts: Literal["all", "none"], - profile: Literal["all", "contacts", "contact_blacklist", "none"], - status: Literal["all", "contacts", "contact_blacklist", "none"], - online: Literal["all", "match_last_seen"], - last: Literal["all", "contacts", "contact_blacklist", "none"], - groupadd: Literal["all", "contacts", "contact_blacklist"], - ): - super().__init__( - readreceipts=readreceipts, - profile=profile, - status=status, - online=online, - last=last, - groupadd=groupadd, - ) \ No newline at end of file diff --git a/build/lib/evolutionapi/models/websocket.py b/build/lib/evolutionapi/models/websocket.py deleted file mode 100644 index 56fbea3..0000000 --- a/build/lib/evolutionapi/models/websocket.py +++ /dev/null @@ -1,20 +0,0 @@ -from typing import List, Optional -from dataclasses import dataclass - -@dataclass -class WebSocketConfig: - enabled: bool - events: List[str] - - def __init__(self, enabled: bool, events: List[str]): - self.enabled = enabled - self.events = events - -@dataclass -class WebSocketInfo: - enabled: bool - events: List[str] - - def __init__(self, **kwargs): - self.enabled = kwargs.get('enabled', False) - self.events = kwargs.get('events', []) \ No newline at end of file diff --git a/build/lib/evolutionapi/services/__init__.py b/build/lib/evolutionapi/services/__init__.py deleted file mode 100644 index e69de29..0000000 diff --git a/build/lib/evolutionapi/services/call.py b/build/lib/evolutionapi/services/call.py deleted file mode 100644 index 4e1b2bd..0000000 --- a/build/lib/evolutionapi/services/call.py +++ /dev/null @@ -1,13 +0,0 @@ -from typing import Union, BinaryIO -from ..models.call import * - -class CallService: - def __init__(self, client): - self.client = client - - def fake_call(self, instance_id: str, data: FakeCall, instance_token: str): - return self.client.post( - f'call/offer/{instance_id}', - data=data.__dict__, - instance_token=instance_token - ) \ No newline at end of file diff --git a/build/lib/evolutionapi/services/chat.py b/build/lib/evolutionapi/services/chat.py deleted file mode 100644 index f1e06a5..0000000 --- a/build/lib/evolutionapi/services/chat.py +++ /dev/null @@ -1,122 +0,0 @@ -from typing import Union, BinaryIO, Optional -from ..models.chat import * - -class ChatService: - def __init__(self, client): - self.client = client - - def check_is_whatsapp_numbers(self, instance_id: str, data: CheckIsWhatsappNumber, instance_token: str): - return self.client.post( - f'chat/checkIsWhatsappNumber/{instance_id}', - data=data.__dict__, - instance_token=instance_token - ) - - def mark_message_as_read(self, instance_id: str, messages: List[ReadMessage], instance_token: str): - return self.client.post( - f'chat/markMessageAsRead/{instance_id}', - data={"readMessages": [m.__dict__ for m in messages]}, - instance_token=instance_token - ) - - def archive_chat(self, instance_id: str, data: ArchiveChat, instance_token: str): - return self.client.post( - f'chat/archiveChat/{instance_id}', - data=data.__dict__, - instance_token=instance_token - ) - - def mark_chat_unread(self, instance_id: str, data: UnreadChat, instance_token: str): - return self.client.post( - f'chat/markChatUnread/{instance_id}', - data=data.__dict__, - instance_token=instance_token - ) - - def delete_message_for_everyone(self, instance_id: str, data: MessageKey, instance_token: str): - return self.client.delete( - f'chat/deleteMessageForEveryone/{instance_id}', - data=data.__dict__, - instance_token=instance_token - ) - - def fetch_profile_picture_url(self, instance_id: str, data: ProfilePicture, instance_token: str): - return self.client.post( - f'chat/fetchProfilePictureUrl/{instance_id}', - data=data.__dict__, - instance_token=instance_token - ) - - def get_base64_from_media_message(self, instance_id: str, data: MediaMessage, instance_token: str): - return self.client.post( - f'chat/getBase64FromMediaMessage/{instance_id}', - data=data.__dict__, - instance_token=instance_token - ) - - def update_message(self, instance_id: str, data: UpdateMessage, instance_token: str): - return self.client.post( - f'chat/updateMessage/{instance_id}', - data=data.__dict__, - instance_token=instance_token - ) - - def send_presence(self, instance_id: str, data: Presence, instance_token: str): - return self.client.post( - f'chat/sendPresence/{instance_id}', - data=data.__dict__, - instance_token=instance_token - ) - - def get_messages( - self, - instance_id: str, - remote_jid: str, - instance_token: str, - message_id: Optional[str] = None, - whatsapp_message_id: Optional[str] = None, - from_me: Optional[bool] = None, - message_type: Optional[str] = None, - source: Optional[str] = None, - timestamp_start: Optional[str] = None, - timestamp_end: Optional[str] = None, - page: int = 1, - offset: int = 50 - ): - ''' - Obtém mensagens de um chat com filtros opcionais - - Args: - timestamp_start: Data inicial no formato ISO (ex: "2025-01-16T00:00:00Z") - timestamp_end: Data final no formato ISO (ex: "2025-01-16T23:59:59Z") - ''' - where = {"key": {"remoteJid": remote_jid}} - - if message_id: - where["id"] = message_id - if whatsapp_message_id: - where["key"]["id"] = whatsapp_message_id - if from_me is not None: - where["key"]["fromMe"] = from_me - if message_type: - where["messageType"] = message_type - if source: - where["source"] = source - if timestamp_start or timestamp_end: - where["messageTimestamp"] = {} - if timestamp_start: - where["messageTimestamp"]["gte"] = timestamp_start - if timestamp_end: - where["messageTimestamp"]["lte"] = timestamp_end - - payload = { - "where": where, - "page": page, - "offset": offset, - } - - return self.client.post( - f'chat/findMessages/{instance_id}', - data=payload, - instance_token=instance_token, - ) diff --git a/build/lib/evolutionapi/services/group.py b/build/lib/evolutionapi/services/group.py deleted file mode 100644 index 3b27006..0000000 --- a/build/lib/evolutionapi/services/group.py +++ /dev/null @@ -1,105 +0,0 @@ -from typing import Optional -from ..models.group import * - -class GroupService: - def __init__(self, client): - self.client = client - - def create_group(self, instance_id: str, data: CreateGroup, instance_token: str): - return self.client.post( - f'group/create/{instance_id}', - data=data.__dict__, - instance_token=instance_token - ) - - def update_group_picture(self, instance_id: str, group_jid: str, data: GroupPicture, instance_token: str): - return self.client.post( - f'group/updateGroupPicture/{instance_id}?groupJid={group_jid}', - data=data.__dict__, - instance_token=instance_token - ) - - def update_group_subject(self, instance_id: str, group_jid: str, data: GroupSubject, instance_token: str): - return self.client.post( - f'group/updateGroupSubject/{instance_id}?groupJid={group_jid}', - data=data.__dict__, - instance_token=instance_token - ) - - def update_group_description(self, instance_id: str, group_jid: str, data: GroupDescription, instance_token: str): - return self.client.post( - f'group/updateGroupDescription/{instance_id}?groupJid={group_jid}', - data=data.__dict__, - instance_token=instance_token - ) - - def get_invite_code(self, instance_id: str, group_jid: str, instance_token: str): - return self.client.get( - f'group/inviteCode/{instance_id}?groupJid={group_jid}', - instance_token=instance_token - ) - - def revoke_invite_code(self, instance_id: str, group_jid: str, instance_token: str): - return self.client.post( - f'group/revokeInviteCode/{instance_id}?groupJid={group_jid}', - instance_token=instance_token - ) - - def send_invite(self, instance_id: str, data: GroupInvite, instance_token: str): - return self.client.post( - f'group/sendInvite/{instance_id}', - data=data.__dict__, - instance_token=instance_token - ) - - def get_invite_info(self, instance_id: str, invite_code: str, instance_token: str): - return self.client.get( - f'group/inviteInfo/{instance_id}?inviteCode={invite_code}', - instance_token=instance_token - ) - - def get_group_info(self, instance_id: str, group_jid: str, instance_token: str): - return self.client.get( - f'group/findGroupInfos/{instance_id}?groupJid={group_jid}', - instance_token=instance_token - ) - - def fetch_all_groups(self, instance_id: str, instance_token: str, get_participants: bool = False): - url = f'group/fetchAllGroups/{instance_id}?getParticipants={str(get_participants).lower()}' - return self.client.get( - url, - instance_token=instance_token - ) - - def get_participants(self, instance_id: str, group_jid: str, instance_token: str): - return self.client.get( - f'group/participants/{instance_id}?groupJid={group_jid}', - instance_token=instance_token - ) - - def update_participant(self, instance_id: str, group_jid: str, data: UpdateParticipant, instance_token: str): - return self.client.post( - f'group/updateParticipant/{instance_id}?groupJid={group_jid}', - data=data.__dict__, - instance_token=instance_token - ) - - def update_setting(self, instance_id: str, group_jid: str, data: UpdateSetting, instance_token: str): - return self.client.post( - f'group/updateSetting/{instance_id}?groupJid={group_jid}', - data=data.__dict__, - instance_token=instance_token - ) - - def toggle_ephemeral(self, instance_id: str, group_jid: str, data: ToggleEphemeral, instance_token: str): - return self.client.post( - f'group/toggleEphemeral/{instance_id}?groupJid={group_jid}', - data=data.__dict__, - instance_token=instance_token - ) - - def leave_group(self, instance_id: str, group_jid: str, instance_token: str): - return self.client.delete( - f'group/leaveGroup/{instance_id}?groupJid={group_jid}', - instance_token=instance_token - ) \ No newline at end of file diff --git a/build/lib/evolutionapi/services/instance.py b/build/lib/evolutionapi/services/instance.py deleted file mode 100644 index 099c8bb..0000000 --- a/build/lib/evolutionapi/services/instance.py +++ /dev/null @@ -1,9 +0,0 @@ -class InstanceService: - def __init__(self, client): - self.client = client - - def fetch_instances(self): - return self.client.get('instance/fetchInstances') - - def create_instance(self, config): - return self.client.post('instance/create', data=config.__dict__) \ No newline at end of file diff --git a/build/lib/evolutionapi/services/instance_operations.py b/build/lib/evolutionapi/services/instance_operations.py deleted file mode 100644 index c59016f..0000000 --- a/build/lib/evolutionapi/services/instance_operations.py +++ /dev/null @@ -1,28 +0,0 @@ -from ..models.presence import PresenceStatus, PresenceConfig - -class InstanceOperationsService: - def __init__(self, client): - self.client = client - - def connect(self, instance_id: str, instance_token: str): - return self.client.get(f'instance/connect/{instance_id}', instance_token) - - def restart(self, instance_id: str, instance_token: str): - return self.client.post(f'instance/restart/{instance_id}', instance_token=instance_token) - - def set_presence(self, instance_id: str, presence: PresenceStatus, instance_token: str): - config = PresenceConfig(presence) - return self.client.post( - f'instance/setPresence/{instance_id}', - data=config.__dict__, - instance_token=instance_token - ) - - def get_connection_state(self, instance_id: str, instance_token: str): - return self.client.get(f'instance/connectionState/{instance_id}', instance_token) - - def logout(self, instance_id: str, instance_token: str): - return self.client.delete(f'instance/logout/{instance_id}', instance_token) - - def delete(self, instance_id: str, instance_token: str): - return self.client.delete(f'instance/delete/{instance_id}', instance_token) diff --git a/build/lib/evolutionapi/services/label.py b/build/lib/evolutionapi/services/label.py deleted file mode 100644 index ed87c91..0000000 --- a/build/lib/evolutionapi/services/label.py +++ /dev/null @@ -1,19 +0,0 @@ -from typing import Union, BinaryIO -from ..models.label import * - -class LabelService: - def __init__(self, client): - self.client = client - - def find_labels(self, instance_id: str, instance_token: str): - return self.client.get( - f'label/findLabels/{instance_id}', - instance_token=instance_token - ) - - def handle_label(self, instance_id: str, data: HandleLabel, instance_token: str): - return self.client.post( - f'label/handleLabel/{instance_id}', - data=data.__dict__, - instance_token=instance_token - ) \ No newline at end of file diff --git a/build/lib/evolutionapi/services/message.py b/build/lib/evolutionapi/services/message.py deleted file mode 100644 index 79efaae..0000000 --- a/build/lib/evolutionapi/services/message.py +++ /dev/null @@ -1,205 +0,0 @@ -from typing import Union, BinaryIO -from ..models.message import * -from requests_toolbelt import MultipartEncoder -import mimetypes -import requests - -class MessageService: - def __init__(self, client): - self.client = client - - def send_text(self, instance_id: str, message: TextMessage, instance_token: str): - # Preparar os dados como JSON - data = { - 'number': message.number, - 'text': message.text - } - - if hasattr(message, 'delay') and message.delay is not None: - data['delay'] = message.delay - - # Usar o método post do cliente que já trata JSON corretamente - return self.client.post( - f'message/sendText/{instance_id}', - data=data, - instance_token=instance_token - ) - - def send_media(self, instance_id: str, message: MediaMessage, instance_token: str, file: Union[BinaryIO, str] = None): - # Preparar os dados do formulário - fields = { - 'number': (None, message.number, 'text/plain'), - 'mediatype': (None, message.mediatype, 'text/plain'), - 'mimetype': (None, message.mimetype, 'text/plain'), - 'caption': (None, message.caption, 'text/plain'), - 'fileName': (None, message.fileName, 'text/plain'), - } - - # Adicionar delay apenas se existir - if hasattr(message, 'delay') and message.delay is not None: - fields['delay'] = (None, str(message.delay), 'text/plain; type=number') - - # Adicionar o arquivo se fornecido - if file: - if isinstance(file, str): - mime_type = mimetypes.guess_type(file)[0] or 'application/octet-stream' - fields['file'] = ('file', open(file, 'rb'), mime_type) - else: - fields['file'] = ('file', file, 'application/octet-stream') - - # Criar o multipart encoder - multipart = MultipartEncoder(fields=fields) - - # Preparar os headers - headers = self.client._get_headers(instance_token) - headers['Content-Type'] = multipart.content_type - - # Fazer a requisição diretamente - url = f'{self.client.base_url}/message/sendMedia/{instance_id}' - response = requests.post( - url, - headers=headers, - data=multipart - ) - - return response.json() - - def send_ptv(self, instance_id: str, message: dict, instance_token: str, file: Union[BinaryIO, str] = None): - fields = {} - - # Adiciona todos os campos do message como text/plain - for key, value in message.items(): - if key == 'delay' and value is not None: - fields[key] = (None, str(value), 'text/plain; type=number') - else: - fields[key] = (None, str(value), 'text/plain') - - if file: - if isinstance(file, str): - mime_type = mimetypes.guess_type(file)[0] or 'application/octet-stream' - fields['file'] = ('file', open(file, 'rb'), mime_type) - else: - fields['file'] = ('file', file, 'application/octet-stream') - - multipart = MultipartEncoder(fields=fields) - headers = self.client._get_headers(instance_token) - headers['Content-Type'] = multipart.content_type - - url = f'{self.client.base_url}/message/sendPtv/{instance_id}' - response = requests.post(url, headers=headers, data=multipart) - return response.json() - - def send_whatsapp_audio(self, instance_id: str, message: dict, instance_token: str, file: Union[BinaryIO, str] = None): - fields = {} - - # Adiciona todos os campos do message como text/plain - for key, value in message.items(): - if key == 'delay' and value is not None: - fields[key] = (None, str(value), 'text/plain; type=number') - else: - fields[key] = (None, str(value), 'text/plain') - - if file: - if isinstance(file, str): - mime_type = mimetypes.guess_type(file)[0] or 'application/octet-stream' - fields['file'] = ('file', open(file, 'rb'), mime_type) - else: - fields['file'] = ('file', file, 'application/octet-stream') - - multipart = MultipartEncoder(fields=fields) - headers = self.client._get_headers(instance_token) - headers['Content-Type'] = multipart.content_type - - url = f'{self.client.base_url}/message/sendWhatsAppAudio/{instance_id}' - response = requests.post(url, headers=headers, data=multipart) - return response.json() - - def send_status(self, instance_id: str, message: StatusMessage, instance_token: str): - return self.client.post( - f'message/sendStatus/{instance_id}', - data=message.__dict__, - instance_token=instance_token - ) - - def send_sticker(self, instance_id: str, message: dict, instance_token: str, file: Union[BinaryIO, str] = None): - fields = {} - - # Adiciona todos os campos do message como text/plain - for key, value in message.items(): - if key == 'delay' and value is not None: - fields[key] = (None, str(value), 'text/plain; type=number') - else: - fields[key] = (None, str(value), 'text/plain') - - if file: - if isinstance(file, str): - mime_type = mimetypes.guess_type(file)[0] or 'application/octet-stream' - fields['file'] = ('file', open(file, 'rb'), mime_type) - else: - fields['file'] = ('file', file, 'application/octet-stream') - - multipart = MultipartEncoder(fields=fields) - headers = self.client._get_headers(instance_token) - headers['Content-Type'] = multipart.content_type - - url = f'{self.client.base_url}/message/sendSticker/{instance_id}' - response = requests.post(url, headers=headers, data=multipart) - return response.json() - - def send_location(self, instance_id: str, message: LocationMessage, instance_token: str): - data = message.__dict__.copy() - if 'delay' in data and data['delay'] is not None: - data['delay'] = int(data['delay']) - - return self.client.post( - f'message/sendLocation/{instance_id}', - data=data, - instance_token=instance_token - ) - - def send_contact(self, instance_id: str, message: ContactMessage, instance_token: str): - return self.client.post( - f'message/sendContact/{instance_id}', - data=message.__dict__, - instance_token=instance_token - ) - - def send_reaction(self, instance_id: str, message: ReactionMessage, instance_token: str): - return self.client.post( - f'message/sendReaction/{instance_id}', - data=message.__dict__, - instance_token=instance_token - ) - - def send_poll(self, instance_id: str, message: PollMessage, instance_token: str): - data = message.__dict__.copy() - if 'delay' in data and data['delay'] is not None: - data['delay'] = int(data['delay']) - - return self.client.post( - f'message/sendPoll/{instance_id}', - data=data, - instance_token=instance_token - ) - - def send_list(self, instance_id: str, message: ListMessage, instance_token: str): - data = message.__dict__.copy() - if 'delay' in data and data['delay'] is not None: - data['delay'] = int(data['delay']) - - return self.client.post( - f'message/sendList/{instance_id}', - data=data, - instance_token=instance_token - ) - - def send_buttons(self, instance_id: str, message: ButtonMessage, instance_token: str): - data = message.__dict__.copy() - if 'delay' in data and data['delay'] is not None: - data['delay'] = int(data['delay']) - - return self.client.post( - f'message/sendButtons/{instance_id}', - data=data, - instance_token=instance_token - ) \ No newline at end of file diff --git a/build/lib/evolutionapi/services/profile.py b/build/lib/evolutionapi/services/profile.py deleted file mode 100644 index b0da8ab..0000000 --- a/build/lib/evolutionapi/services/profile.py +++ /dev/null @@ -1,60 +0,0 @@ -from typing import Union, BinaryIO -from ..models.profile import * - -class ProfileService: - def __init__(self, client): - self.client = client - - def fetch_business_profile(self, instance_id: str, data: FetchProfile, instance_token: str): - return self.client.post( - f'chat/fetchBusinessProfile/{instance_id}', - data=data.__dict__, - instance_token=instance_token - ) - - def fetch_profile(self, instance_id: str, data: FetchProfile, instance_token: str): - return self.client.post( - f'chat/fetchProfile/{instance_id}', - data=data.__dict__, - instance_token=instance_token - ) - - def update_profile_name(self, instance_id: str, data: ProfileName, instance_token: str): - return self.client.post( - f'chat/updateProfileName/{instance_id}', - data=data.__dict__, - instance_token=instance_token - ) - - def update_profile_status(self, instance_id: str, data: ProfileStatus, instance_token: str): - return self.client.post( - f'chat/updateProfileStatus/{instance_id}', - data=data.__dict__, - instance_token=instance_token - ) - - def update_profile_picture(self, instance_id: str, data: ProfilePicture, instance_token: str): - return self.client.post( - f'chat/updateProfilePicture/{instance_id}', - data=data.__dict__, - instance_token=instance_token - ) - - def remove_profile_picture(self, instance_id: str, instance_token: str): - return self.client.delete( - f'chat/removeProfilePicture/{instance_id}', - instance_token=instance_token - ) - - def fetch_privacy_settings(self, instance_id: str, instance_token: str): - return self.client.get( - f'chat/fetchPrivacySettings/{instance_id}', - instance_token=instance_token - ) - - def update_privacy_settings(self, instance_id: str, data: PrivacySettings, instance_token: str): - return self.client.post( - f'chat/updatePrivacySettings/{instance_id}', - data=data.__dict__, - instance_token=instance_token - ) \ No newline at end of file diff --git a/build/lib/evolutionapi/services/websocket.py b/build/lib/evolutionapi/services/websocket.py deleted file mode 100644 index 6839881..0000000 --- a/build/lib/evolutionapi/services/websocket.py +++ /dev/null @@ -1,174 +0,0 @@ -import socketio -from typing import Callable, Dict, Any -import logging -import time -from typing import Optional -from ..models.websocket import WebSocketConfig, WebSocketInfo - -class WebSocketService: - def __init__(self, client): - self.client = client - - def set_websocket(self, instance_id: str, config: WebSocketConfig, instance_token: str): - """ - Configure WebSocket settings for an instance - - Args: - instance_id (str): The instance ID - config (WebSocketConfig): The WebSocket configuration - instance_token (str): The instance token - - Returns: - dict: The response from the API - """ - return self.client.post( - f'websocket/set/{instance_id}', - data=config.__dict__, - instance_token=instance_token - ) - - def find_websocket(self, instance_id: str, instance_token: str) -> WebSocketInfo: - """ - Get WebSocket settings for an instance - - Args: - instance_id (str): The instance ID - instance_token (str): The instance token - - Returns: - WebSocketInfo: The WebSocket information - """ - response = self.client.get( - f'websocket/find/{instance_id}', - instance_token=instance_token - ) - return WebSocketInfo(**response) - -class WebSocketManager: - def __init__(self, base_url: str, instance_id: str, api_token: str, max_retries: int = 5, retry_delay: float = 1.0): - """ - Initialize the WebSocket manager - - Args: - base_url (str): Base URL of the API - instance_id (str): Instance ID - api_token (str): API authentication token - max_retries (int): Maximum number of reconnection attempts - retry_delay (float): Initial delay between attempts in seconds - """ - self.base_url = base_url.rstrip('/') - self.instance_id = instance_id - self.api_token = api_token - self.max_retries = max_retries - self.retry_delay = retry_delay - self.retry_count = 0 - self.should_reconnect = True - - # Socket.IO configuration - self.sio = socketio.Client( - ssl_verify=False, # For local development - logger=False, - engineio_logger=False, - request_timeout=30 - ) - - # Configure class logger to INFO - self.logger = logging.getLogger(__name__) - self.logger.setLevel(logging.INFO) - - # Dictionary to store registered handlers - self._handlers = {} - - # Configure event handlers - self.sio.on('connect', self._on_connect) - self.sio.on('disconnect', self._on_disconnect) - self.sio.on('error', self._on_error) - - # Register global handler in instance-specific namespace - self.sio.on('*', self._handle_event, namespace=f'/{self.instance_id}') - - def _on_connect(self): - """Handler for connection event""" - self.logger.info("Socket.IO connected") - self.retry_count = 0 # Reset retry counter after successful connection - - def _on_disconnect(self): - """Handler for disconnection event""" - self.logger.warning(f"Socket.IO disconnected. Attempt {self.retry_count + 1}/{self.max_retries}") - if self.should_reconnect and self.retry_count < self.max_retries: - self._attempt_reconnect() - else: - self.logger.error("Maximum number of reconnection attempts reached") - - def _on_error(self, error): - """Handler for error events""" - self.logger.error(f"Socket.IO error: {str(error)}", exc_info=True) - - def _attempt_reconnect(self): - """Attempt to reconnect with exponential backoff""" - try: - delay = self.retry_delay * (2 ** self.retry_count) # Exponential backoff - self.logger.info(f"Attempting to reconnect in {delay:.2f} seconds...") - time.sleep(delay) - self.connect() - self.retry_count += 1 - except Exception as e: - self.logger.error(f"Error during reconnection attempt: {str(e)}", exc_info=True) - if self.retry_count < self.max_retries: - self._attempt_reconnect() - else: - self.logger.error("All reconnection attempts failed") - - def _handle_event(self, event, *args): - """Global handler for all events""" - # Only process registered events - if event in self._handlers: - self.logger.debug(f"Event received in namespace /{self.instance_id}: {event}") - self.logger.debug(f"Event data: {args}") - - try: - # Extract event data - raw_data = args[0] if args else {} - - # Ensure we're passing the correct object to the callback - if isinstance(raw_data, dict): - self.logger.debug(f"Calling handler for {event} with data: {raw_data}") - self._handlers[event](raw_data) - else: - self.logger.error(f"Invalid data received for event {event}: {raw_data}") - except Exception as e: - self.logger.error(f"Error processing event {event}: {str(e)}", exc_info=True) - - def connect(self): - """Connect to Socket.IO server""" - try: - # Connect only to instance namespace with authentication header - self.sio.connect( - f"{self.base_url}?apikey={self.api_token}", - transports=['websocket'], - namespaces=[f'/{self.instance_id}'], - wait_timeout=30 - ) - - # Join instance-specific room - self.sio.emit('subscribe', {'instance': self.instance_id}, namespace=f'/{self.instance_id}') - - except Exception as e: - self.logger.error(f"Error connecting to Socket.IO: {str(e)}", exc_info=True) - raise - - def disconnect(self): - """Disconnect from Socket.IO server""" - self.should_reconnect = False # Prevent reconnection attempts - if self.sio.connected: - self.sio.disconnect() - - def on(self, event: str, callback: Callable): - """ - Register a callback for a specific event - - Args: - event (str): Event name - callback (Callable): Function to be called when the event occurs - """ - self._handlers[event] = callback \ No newline at end of file diff --git a/env/bin/Activate.ps1 b/env/bin/Activate.ps1 deleted file mode 100644 index b49d77b..0000000 --- a/env/bin/Activate.ps1 +++ /dev/null @@ -1,247 +0,0 @@ -<# -.Synopsis -Activate a Python virtual environment for the current PowerShell session. - -.Description -Pushes the python executable for a virtual environment to the front of the -$Env:PATH environment variable and sets the prompt to signify that you are -in a Python virtual environment. Makes use of the command line switches as -well as the `pyvenv.cfg` file values present in the virtual environment. - -.Parameter VenvDir -Path to the directory that contains the virtual environment to activate. The -default value for this is the parent of the directory that the Activate.ps1 -script is located within. - -.Parameter Prompt -The prompt prefix to display when this virtual environment is activated. By -default, this prompt is the name of the virtual environment folder (VenvDir) -surrounded by parentheses and followed by a single space (ie. '(.venv) '). - -.Example -Activate.ps1 -Activates the Python virtual environment that contains the Activate.ps1 script. - -.Example -Activate.ps1 -Verbose -Activates the Python virtual environment that contains the Activate.ps1 script, -and shows extra information about the activation as it executes. - -.Example -Activate.ps1 -VenvDir C:\Users\MyUser\Common\.venv -Activates the Python virtual environment located in the specified location. - -.Example -Activate.ps1 -Prompt "MyPython" -Activates the Python virtual environment that contains the Activate.ps1 script, -and prefixes the current prompt with the specified string (surrounded in -parentheses) while the virtual environment is active. - -.Notes -On Windows, it may be required to enable this Activate.ps1 script by setting the -execution policy for the user. You can do this by issuing the following PowerShell -command: - -PS C:\> Set-ExecutionPolicy -ExecutionPolicy RemoteSigned -Scope CurrentUser - -For more information on Execution Policies: -https://go.microsoft.com/fwlink/?LinkID=135170 - -#> -Param( - [Parameter(Mandatory = $false)] - [String] - $VenvDir, - [Parameter(Mandatory = $false)] - [String] - $Prompt -) - -<# Function declarations --------------------------------------------------- #> - -<# -.Synopsis -Remove all shell session elements added by the Activate script, including the -addition of the virtual environment's Python executable from the beginning of -the PATH variable. - -.Parameter NonDestructive -If present, do not remove this function from the global namespace for the -session. - -#> -function global:deactivate ([switch]$NonDestructive) { - # Revert to original values - - # The prior prompt: - if (Test-Path -Path Function:_OLD_VIRTUAL_PROMPT) { - Copy-Item -Path Function:_OLD_VIRTUAL_PROMPT -Destination Function:prompt - Remove-Item -Path Function:_OLD_VIRTUAL_PROMPT - } - - # The prior PYTHONHOME: - if (Test-Path -Path Env:_OLD_VIRTUAL_PYTHONHOME) { - Copy-Item -Path Env:_OLD_VIRTUAL_PYTHONHOME -Destination Env:PYTHONHOME - Remove-Item -Path Env:_OLD_VIRTUAL_PYTHONHOME - } - - # The prior PATH: - if (Test-Path -Path Env:_OLD_VIRTUAL_PATH) { - Copy-Item -Path Env:_OLD_VIRTUAL_PATH -Destination Env:PATH - Remove-Item -Path Env:_OLD_VIRTUAL_PATH - } - - # Just remove the VIRTUAL_ENV altogether: - if (Test-Path -Path Env:VIRTUAL_ENV) { - Remove-Item -Path env:VIRTUAL_ENV - } - - # Just remove VIRTUAL_ENV_PROMPT altogether. - if (Test-Path -Path Env:VIRTUAL_ENV_PROMPT) { - Remove-Item -Path env:VIRTUAL_ENV_PROMPT - } - - # Just remove the _PYTHON_VENV_PROMPT_PREFIX altogether: - if (Get-Variable -Name "_PYTHON_VENV_PROMPT_PREFIX" -ErrorAction SilentlyContinue) { - Remove-Variable -Name _PYTHON_VENV_PROMPT_PREFIX -Scope Global -Force - } - - # Leave deactivate function in the global namespace if requested: - if (-not $NonDestructive) { - Remove-Item -Path function:deactivate - } -} - -<# -.Description -Get-PyVenvConfig parses the values from the pyvenv.cfg file located in the -given folder, and returns them in a map. - -For each line in the pyvenv.cfg file, if that line can be parsed into exactly -two strings separated by `=` (with any amount of whitespace surrounding the =) -then it is considered a `key = value` line. The left hand string is the key, -the right hand is the value. - -If the value starts with a `'` or a `"` then the first and last character is -stripped from the value before being captured. - -.Parameter ConfigDir -Path to the directory that contains the `pyvenv.cfg` file. -#> -function Get-PyVenvConfig( - [String] - $ConfigDir -) { - Write-Verbose "Given ConfigDir=$ConfigDir, obtain values in pyvenv.cfg" - - # Ensure the file exists, and issue a warning if it doesn't (but still allow the function to continue). - $pyvenvConfigPath = Join-Path -Resolve -Path $ConfigDir -ChildPath 'pyvenv.cfg' -ErrorAction Continue - - # An empty map will be returned if no config file is found. - $pyvenvConfig = @{ } - - if ($pyvenvConfigPath) { - - Write-Verbose "File exists, parse `key = value` lines" - $pyvenvConfigContent = Get-Content -Path $pyvenvConfigPath - - $pyvenvConfigContent | ForEach-Object { - $keyval = $PSItem -split "\s*=\s*", 2 - if ($keyval[0] -and $keyval[1]) { - $val = $keyval[1] - - # Remove extraneous quotations around a string value. - if ("'""".Contains($val.Substring(0, 1))) { - $val = $val.Substring(1, $val.Length - 2) - } - - $pyvenvConfig[$keyval[0]] = $val - Write-Verbose "Adding Key: '$($keyval[0])'='$val'" - } - } - } - return $pyvenvConfig -} - - -<# Begin Activate script --------------------------------------------------- #> - -# Determine the containing directory of this script -$VenvExecPath = Split-Path -Parent $MyInvocation.MyCommand.Definition -$VenvExecDir = Get-Item -Path $VenvExecPath - -Write-Verbose "Activation script is located in path: '$VenvExecPath'" -Write-Verbose "VenvExecDir Fullname: '$($VenvExecDir.FullName)" -Write-Verbose "VenvExecDir Name: '$($VenvExecDir.Name)" - -# Set values required in priority: CmdLine, ConfigFile, Default -# First, get the location of the virtual environment, it might not be -# VenvExecDir if specified on the command line. -if ($VenvDir) { - Write-Verbose "VenvDir given as parameter, using '$VenvDir' to determine values" -} -else { - Write-Verbose "VenvDir not given as a parameter, using parent directory name as VenvDir." - $VenvDir = $VenvExecDir.Parent.FullName.TrimEnd("\\/") - Write-Verbose "VenvDir=$VenvDir" -} - -# Next, read the `pyvenv.cfg` file to determine any required value such -# as `prompt`. -$pyvenvCfg = Get-PyVenvConfig -ConfigDir $VenvDir - -# Next, set the prompt from the command line, or the config file, or -# just use the name of the virtual environment folder. -if ($Prompt) { - Write-Verbose "Prompt specified as argument, using '$Prompt'" -} -else { - Write-Verbose "Prompt not specified as argument to script, checking pyvenv.cfg value" - if ($pyvenvCfg -and $pyvenvCfg['prompt']) { - Write-Verbose " Setting based on value in pyvenv.cfg='$($pyvenvCfg['prompt'])'" - $Prompt = $pyvenvCfg['prompt']; - } - else { - Write-Verbose " Setting prompt based on parent's directory's name. (Is the directory name passed to venv module when creating the virtual environment)" - Write-Verbose " Got leaf-name of $VenvDir='$(Split-Path -Path $venvDir -Leaf)'" - $Prompt = Split-Path -Path $venvDir -Leaf - } -} - -Write-Verbose "Prompt = '$Prompt'" -Write-Verbose "VenvDir='$VenvDir'" - -# Deactivate any currently active virtual environment, but leave the -# deactivate function in place. -deactivate -nondestructive - -# Now set the environment variable VIRTUAL_ENV, used by many tools to determine -# that there is an activated venv. -$env:VIRTUAL_ENV = $VenvDir - -if (-not $Env:VIRTUAL_ENV_DISABLE_PROMPT) { - - Write-Verbose "Setting prompt to '$Prompt'" - - # Set the prompt to include the env name - # Make sure _OLD_VIRTUAL_PROMPT is global - function global:_OLD_VIRTUAL_PROMPT { "" } - Copy-Item -Path function:prompt -Destination function:_OLD_VIRTUAL_PROMPT - New-Variable -Name _PYTHON_VENV_PROMPT_PREFIX -Description "Python virtual environment prompt prefix" -Scope Global -Option ReadOnly -Visibility Public -Value $Prompt - - function global:prompt { - Write-Host -NoNewline -ForegroundColor Green "($_PYTHON_VENV_PROMPT_PREFIX) " - _OLD_VIRTUAL_PROMPT - } - $env:VIRTUAL_ENV_PROMPT = $Prompt -} - -# Clear PYTHONHOME -if (Test-Path -Path Env:PYTHONHOME) { - Copy-Item -Path Env:PYTHONHOME -Destination Env:_OLD_VIRTUAL_PYTHONHOME - Remove-Item -Path Env:PYTHONHOME -} - -# Add the venv to the PATH -Copy-Item -Path Env:PATH -Destination Env:_OLD_VIRTUAL_PATH -$Env:PATH = "$VenvExecDir$([System.IO.Path]::PathSeparator)$Env:PATH" diff --git a/env/bin/activate b/env/bin/activate deleted file mode 100644 index 23ea076..0000000 --- a/env/bin/activate +++ /dev/null @@ -1,69 +0,0 @@ -# This file must be used with "source bin/activate" *from bash* -# you cannot run it directly - -deactivate () { - # reset old environment variables - if [ -n "${_OLD_VIRTUAL_PATH:-}" ] ; then - PATH="${_OLD_VIRTUAL_PATH:-}" - export PATH - unset _OLD_VIRTUAL_PATH - fi - if [ -n "${_OLD_VIRTUAL_PYTHONHOME:-}" ] ; then - PYTHONHOME="${_OLD_VIRTUAL_PYTHONHOME:-}" - export PYTHONHOME - unset _OLD_VIRTUAL_PYTHONHOME - fi - - # This should detect bash and zsh, which have a hash command that must - # be called to get it to forget past commands. Without forgetting - # past commands the $PATH changes we made may not be respected - if [ -n "${BASH:-}" -o -n "${ZSH_VERSION:-}" ] ; then - hash -r 2> /dev/null - fi - - if [ -n "${_OLD_VIRTUAL_PS1:-}" ] ; then - PS1="${_OLD_VIRTUAL_PS1:-}" - export PS1 - unset _OLD_VIRTUAL_PS1 - fi - - unset VIRTUAL_ENV - unset VIRTUAL_ENV_PROMPT - if [ ! "${1:-}" = "nondestructive" ] ; then - # Self destruct! - unset -f deactivate - fi -} - -# unset irrelevant variables -deactivate nondestructive - -VIRTUAL_ENV="/home/davidson/Projects/evolution_client/python/env" -export VIRTUAL_ENV - -_OLD_VIRTUAL_PATH="$PATH" -PATH="$VIRTUAL_ENV/bin:$PATH" -export PATH - -# unset PYTHONHOME if set -# this will fail if PYTHONHOME is set to the empty string (which is bad anyway) -# could use `if (set -u; : $PYTHONHOME) ;` in bash -if [ -n "${PYTHONHOME:-}" ] ; then - _OLD_VIRTUAL_PYTHONHOME="${PYTHONHOME:-}" - unset PYTHONHOME -fi - -if [ -z "${VIRTUAL_ENV_DISABLE_PROMPT:-}" ] ; then - _OLD_VIRTUAL_PS1="${PS1:-}" - PS1="(env) ${PS1:-}" - export PS1 - VIRTUAL_ENV_PROMPT="(env) " - export VIRTUAL_ENV_PROMPT -fi - -# This should detect bash and zsh, which have a hash command that must -# be called to get it to forget past commands. Without forgetting -# past commands the $PATH changes we made may not be respected -if [ -n "${BASH:-}" -o -n "${ZSH_VERSION:-}" ] ; then - hash -r 2> /dev/null -fi diff --git a/env/bin/activate.csh b/env/bin/activate.csh deleted file mode 100644 index c17b58b..0000000 --- a/env/bin/activate.csh +++ /dev/null @@ -1,26 +0,0 @@ -# This file must be used with "source bin/activate.csh" *from csh*. -# You cannot run it directly. -# Created by Davide Di Blasi . -# Ported to Python 3.3 venv by Andrew Svetlov - -alias deactivate 'test $?_OLD_VIRTUAL_PATH != 0 && setenv PATH "$_OLD_VIRTUAL_PATH" && unset _OLD_VIRTUAL_PATH; rehash; test $?_OLD_VIRTUAL_PROMPT != 0 && set prompt="$_OLD_VIRTUAL_PROMPT" && unset _OLD_VIRTUAL_PROMPT; unsetenv VIRTUAL_ENV; unsetenv VIRTUAL_ENV_PROMPT; test "\!:*" != "nondestructive" && unalias deactivate' - -# Unset irrelevant variables. -deactivate nondestructive - -setenv VIRTUAL_ENV "/home/davidson/Projects/evolution_client/python/env" - -set _OLD_VIRTUAL_PATH="$PATH" -setenv PATH "$VIRTUAL_ENV/bin:$PATH" - - -set _OLD_VIRTUAL_PROMPT="$prompt" - -if (! "$?VIRTUAL_ENV_DISABLE_PROMPT") then - set prompt = "(env) $prompt" - setenv VIRTUAL_ENV_PROMPT "(env) " -endif - -alias pydoc python -m pydoc - -rehash diff --git a/env/bin/activate.fish b/env/bin/activate.fish deleted file mode 100644 index dbc64df..0000000 --- a/env/bin/activate.fish +++ /dev/null @@ -1,69 +0,0 @@ -# This file must be used with "source /bin/activate.fish" *from fish* -# (https://fishshell.com/); you cannot run it directly. - -function deactivate -d "Exit virtual environment and return to normal shell environment" - # reset old environment variables - if test -n "$_OLD_VIRTUAL_PATH" - set -gx PATH $_OLD_VIRTUAL_PATH - set -e _OLD_VIRTUAL_PATH - end - if test -n "$_OLD_VIRTUAL_PYTHONHOME" - set -gx PYTHONHOME $_OLD_VIRTUAL_PYTHONHOME - set -e _OLD_VIRTUAL_PYTHONHOME - end - - if test -n "$_OLD_FISH_PROMPT_OVERRIDE" - set -e _OLD_FISH_PROMPT_OVERRIDE - # prevents error when using nested fish instances (Issue #93858) - if functions -q _old_fish_prompt - functions -e fish_prompt - functions -c _old_fish_prompt fish_prompt - functions -e _old_fish_prompt - end - end - - set -e VIRTUAL_ENV - set -e VIRTUAL_ENV_PROMPT - if test "$argv[1]" != "nondestructive" - # Self-destruct! - functions -e deactivate - end -end - -# Unset irrelevant variables. -deactivate nondestructive - -set -gx VIRTUAL_ENV "/home/davidson/Projects/evolution_client/python/env" - -set -gx _OLD_VIRTUAL_PATH $PATH -set -gx PATH "$VIRTUAL_ENV/bin" $PATH - -# Unset PYTHONHOME if set. -if set -q PYTHONHOME - set -gx _OLD_VIRTUAL_PYTHONHOME $PYTHONHOME - set -e PYTHONHOME -end - -if test -z "$VIRTUAL_ENV_DISABLE_PROMPT" - # fish uses a function instead of an env var to generate the prompt. - - # Save the current fish_prompt function as the function _old_fish_prompt. - functions -c fish_prompt _old_fish_prompt - - # With the original prompt function renamed, we can override with our own. - function fish_prompt - # Save the return status of the last command. - set -l old_status $status - - # Output the venv prompt; color taken from the blue of the Python logo. - printf "%s%s%s" (set_color 4B8BBE) "(env) " (set_color normal) - - # Restore the return status of the previous command. - echo "exit $old_status" | . - # Output the original/"old" prompt. - _old_fish_prompt - end - - set -gx _OLD_FISH_PROMPT_OVERRIDE "$VIRTUAL_ENV" - set -gx VIRTUAL_ENV_PROMPT "(env) " -end diff --git a/env/bin/docutils b/env/bin/docutils deleted file mode 100755 index 92273b3..0000000 --- a/env/bin/docutils +++ /dev/null @@ -1,8 +0,0 @@ -#!/home/davidson/Projects/evolution_client/python/env/bin/python -# -*- coding: utf-8 -*- -import re -import sys -from docutils.__main__ import main -if __name__ == '__main__': - sys.argv[0] = re.sub(r'(-script\.pyw|\.exe)?$', '', sys.argv[0]) - sys.exit(main()) diff --git a/env/bin/keyring b/env/bin/keyring deleted file mode 100755 index 3d157c1..0000000 --- a/env/bin/keyring +++ /dev/null @@ -1,8 +0,0 @@ -#!/home/davidson/Projects/evolution_client/python/env/bin/python -# -*- coding: utf-8 -*- -import re -import sys -from keyring.cli import main -if __name__ == '__main__': - sys.argv[0] = re.sub(r'(-script\.pyw|\.exe)?$', '', sys.argv[0]) - sys.exit(main()) diff --git a/env/bin/markdown-it b/env/bin/markdown-it deleted file mode 100755 index 81dd709..0000000 --- a/env/bin/markdown-it +++ /dev/null @@ -1,8 +0,0 @@ -#!/home/davidson/Projects/evolution_client/python/env/bin/python -# -*- coding: utf-8 -*- -import re -import sys -from markdown_it.cli.parse import main -if __name__ == '__main__': - sys.argv[0] = re.sub(r'(-script\.pyw|\.exe)?$', '', sys.argv[0]) - sys.exit(main()) diff --git a/env/bin/normalizer b/env/bin/normalizer deleted file mode 100755 index 86c04b3..0000000 --- a/env/bin/normalizer +++ /dev/null @@ -1,8 +0,0 @@ -#!/home/davidson/Projects/evolution_client/python/env/bin/python -# -*- coding: utf-8 -*- -import re -import sys -from charset_normalizer.cli import cli_detect -if __name__ == '__main__': - sys.argv[0] = re.sub(r'(-script\.pyw|\.exe)?$', '', sys.argv[0]) - sys.exit(cli_detect()) diff --git a/env/bin/pip b/env/bin/pip deleted file mode 100755 index 8e80bbe..0000000 --- a/env/bin/pip +++ /dev/null @@ -1,8 +0,0 @@ -#!/home/davidson/Projects/evolution_client/python/env/bin/python -# -*- coding: utf-8 -*- -import re -import sys -from pip._internal.cli.main import main -if __name__ == '__main__': - sys.argv[0] = re.sub(r'(-script\.pyw|\.exe)?$', '', sys.argv[0]) - sys.exit(main()) diff --git a/env/bin/pip3 b/env/bin/pip3 deleted file mode 100755 index 8e80bbe..0000000 --- a/env/bin/pip3 +++ /dev/null @@ -1,8 +0,0 @@ -#!/home/davidson/Projects/evolution_client/python/env/bin/python -# -*- coding: utf-8 -*- -import re -import sys -from pip._internal.cli.main import main -if __name__ == '__main__': - sys.argv[0] = re.sub(r'(-script\.pyw|\.exe)?$', '', sys.argv[0]) - sys.exit(main()) diff --git a/env/bin/pip3.10 b/env/bin/pip3.10 deleted file mode 100755 index 8e80bbe..0000000 --- a/env/bin/pip3.10 +++ /dev/null @@ -1,8 +0,0 @@ -#!/home/davidson/Projects/evolution_client/python/env/bin/python -# -*- coding: utf-8 -*- -import re -import sys -from pip._internal.cli.main import main -if __name__ == '__main__': - sys.argv[0] = re.sub(r'(-script\.pyw|\.exe)?$', '', sys.argv[0]) - sys.exit(main()) diff --git a/env/bin/pkginfo b/env/bin/pkginfo deleted file mode 100755 index a3413de..0000000 --- a/env/bin/pkginfo +++ /dev/null @@ -1,8 +0,0 @@ -#!/home/davidson/Projects/evolution_client/python/env/bin/python -# -*- coding: utf-8 -*- -import re -import sys -from pkginfo.commandline import main -if __name__ == '__main__': - sys.argv[0] = re.sub(r'(-script\.pyw|\.exe)?$', '', sys.argv[0]) - sys.exit(main()) diff --git a/env/bin/pygmentize b/env/bin/pygmentize deleted file mode 100755 index f515992..0000000 --- a/env/bin/pygmentize +++ /dev/null @@ -1,8 +0,0 @@ -#!/home/davidson/Projects/evolution_client/python/env/bin/python -# -*- coding: utf-8 -*- -import re -import sys -from pygments.cmdline import main -if __name__ == '__main__': - sys.argv[0] = re.sub(r'(-script\.pyw|\.exe)?$', '', sys.argv[0]) - sys.exit(main()) diff --git a/env/bin/python b/env/bin/python deleted file mode 120000 index acd4152..0000000 --- a/env/bin/python +++ /dev/null @@ -1 +0,0 @@ -/usr/bin/python \ No newline at end of file diff --git a/env/bin/python3 b/env/bin/python3 deleted file mode 120000 index d8654aa..0000000 --- a/env/bin/python3 +++ /dev/null @@ -1 +0,0 @@ -python \ No newline at end of file diff --git a/env/bin/python3.10 b/env/bin/python3.10 deleted file mode 120000 index d8654aa..0000000 --- a/env/bin/python3.10 +++ /dev/null @@ -1 +0,0 @@ -python \ No newline at end of file diff --git a/env/bin/rst2html b/env/bin/rst2html deleted file mode 100755 index 63d001d..0000000 --- a/env/bin/rst2html +++ /dev/null @@ -1,8 +0,0 @@ -#!/home/davidson/Projects/evolution_client/python/env/bin/python -# -*- coding: utf-8 -*- -import re -import sys -from docutils.core import rst2html -if __name__ == '__main__': - sys.argv[0] = re.sub(r'(-script\.pyw|\.exe)?$', '', sys.argv[0]) - sys.exit(rst2html()) diff --git a/env/bin/rst2html4 b/env/bin/rst2html4 deleted file mode 100755 index 7027254..0000000 --- a/env/bin/rst2html4 +++ /dev/null @@ -1,8 +0,0 @@ -#!/home/davidson/Projects/evolution_client/python/env/bin/python -# -*- coding: utf-8 -*- -import re -import sys -from docutils.core import rst2html4 -if __name__ == '__main__': - sys.argv[0] = re.sub(r'(-script\.pyw|\.exe)?$', '', sys.argv[0]) - sys.exit(rst2html4()) diff --git a/env/bin/rst2html5 b/env/bin/rst2html5 deleted file mode 100755 index ef6f5e5..0000000 --- a/env/bin/rst2html5 +++ /dev/null @@ -1,8 +0,0 @@ -#!/home/davidson/Projects/evolution_client/python/env/bin/python -# -*- coding: utf-8 -*- -import re -import sys -from docutils.core import rst2html5 -if __name__ == '__main__': - sys.argv[0] = re.sub(r'(-script\.pyw|\.exe)?$', '', sys.argv[0]) - sys.exit(rst2html5()) diff --git a/env/bin/rst2latex b/env/bin/rst2latex deleted file mode 100755 index e2ae11e..0000000 --- a/env/bin/rst2latex +++ /dev/null @@ -1,8 +0,0 @@ -#!/home/davidson/Projects/evolution_client/python/env/bin/python -# -*- coding: utf-8 -*- -import re -import sys -from docutils.core import rst2latex -if __name__ == '__main__': - sys.argv[0] = re.sub(r'(-script\.pyw|\.exe)?$', '', sys.argv[0]) - sys.exit(rst2latex()) diff --git a/env/bin/rst2man b/env/bin/rst2man deleted file mode 100755 index 8435de8..0000000 --- a/env/bin/rst2man +++ /dev/null @@ -1,8 +0,0 @@ -#!/home/davidson/Projects/evolution_client/python/env/bin/python -# -*- coding: utf-8 -*- -import re -import sys -from docutils.core import rst2man -if __name__ == '__main__': - sys.argv[0] = re.sub(r'(-script\.pyw|\.exe)?$', '', sys.argv[0]) - sys.exit(rst2man()) diff --git a/env/bin/rst2odt b/env/bin/rst2odt deleted file mode 100755 index c500c16..0000000 --- a/env/bin/rst2odt +++ /dev/null @@ -1,8 +0,0 @@ -#!/home/davidson/Projects/evolution_client/python/env/bin/python -# -*- coding: utf-8 -*- -import re -import sys -from docutils.core import rst2odt -if __name__ == '__main__': - sys.argv[0] = re.sub(r'(-script\.pyw|\.exe)?$', '', sys.argv[0]) - sys.exit(rst2odt()) diff --git a/env/bin/rst2pseudoxml b/env/bin/rst2pseudoxml deleted file mode 100755 index 421bc37..0000000 --- a/env/bin/rst2pseudoxml +++ /dev/null @@ -1,8 +0,0 @@ -#!/home/davidson/Projects/evolution_client/python/env/bin/python -# -*- coding: utf-8 -*- -import re -import sys -from docutils.core import rst2pseudoxml -if __name__ == '__main__': - sys.argv[0] = re.sub(r'(-script\.pyw|\.exe)?$', '', sys.argv[0]) - sys.exit(rst2pseudoxml()) diff --git a/env/bin/rst2s5 b/env/bin/rst2s5 deleted file mode 100755 index 7d77888..0000000 --- a/env/bin/rst2s5 +++ /dev/null @@ -1,8 +0,0 @@ -#!/home/davidson/Projects/evolution_client/python/env/bin/python -# -*- coding: utf-8 -*- -import re -import sys -from docutils.core import rst2s5 -if __name__ == '__main__': - sys.argv[0] = re.sub(r'(-script\.pyw|\.exe)?$', '', sys.argv[0]) - sys.exit(rst2s5()) diff --git a/env/bin/rst2xetex b/env/bin/rst2xetex deleted file mode 100755 index febd617..0000000 --- a/env/bin/rst2xetex +++ /dev/null @@ -1,8 +0,0 @@ -#!/home/davidson/Projects/evolution_client/python/env/bin/python -# -*- coding: utf-8 -*- -import re -import sys -from docutils.core import rst2xetex -if __name__ == '__main__': - sys.argv[0] = re.sub(r'(-script\.pyw|\.exe)?$', '', sys.argv[0]) - sys.exit(rst2xetex()) diff --git a/env/bin/rst2xml b/env/bin/rst2xml deleted file mode 100755 index ddb772a..0000000 --- a/env/bin/rst2xml +++ /dev/null @@ -1,8 +0,0 @@ -#!/home/davidson/Projects/evolution_client/python/env/bin/python -# -*- coding: utf-8 -*- -import re -import sys -from docutils.core import rst2xml -if __name__ == '__main__': - sys.argv[0] = re.sub(r'(-script\.pyw|\.exe)?$', '', sys.argv[0]) - sys.exit(rst2xml()) diff --git a/env/bin/twine b/env/bin/twine deleted file mode 100755 index 14cab11..0000000 --- a/env/bin/twine +++ /dev/null @@ -1,8 +0,0 @@ -#!/home/davidson/Projects/evolution_client/python/env/bin/python -# -*- coding: utf-8 -*- -import re -import sys -from twine.__main__ import main -if __name__ == '__main__': - sys.argv[0] = re.sub(r'(-script\.pyw|\.exe)?$', '', sys.argv[0]) - sys.exit(main()) diff --git a/env/bin/wheel b/env/bin/wheel deleted file mode 100755 index a92e3be..0000000 --- a/env/bin/wheel +++ /dev/null @@ -1,8 +0,0 @@ -#!/home/davidson/Projects/evolution_client/python/env/bin/python -# -*- coding: utf-8 -*- -import re -import sys -from wheel.cli import main -if __name__ == '__main__': - sys.argv[0] = re.sub(r'(-script\.pyw|\.exe)?$', '', sys.argv[0]) - sys.exit(main()) diff --git a/env/bin/wsdump b/env/bin/wsdump deleted file mode 100755 index cffc8dc..0000000 --- a/env/bin/wsdump +++ /dev/null @@ -1,8 +0,0 @@ -#!/home/davidson/Projects/evolution_client/python/env/bin/python -# -*- coding: utf-8 -*- -import re -import sys -from websocket._wsdump import main -if __name__ == '__main__': - sys.argv[0] = re.sub(r'(-script\.pyw|\.exe)?$', '', sys.argv[0]) - sys.exit(main()) diff --git a/env/lib/python3.10/site-packages/SecretStorage-3.3.3.dist-info/INSTALLER b/env/lib/python3.10/site-packages/SecretStorage-3.3.3.dist-info/INSTALLER deleted file mode 100644 index a1b589e..0000000 --- a/env/lib/python3.10/site-packages/SecretStorage-3.3.3.dist-info/INSTALLER +++ /dev/null @@ -1 +0,0 @@ -pip diff --git a/env/lib/python3.10/site-packages/SecretStorage-3.3.3.dist-info/LICENSE b/env/lib/python3.10/site-packages/SecretStorage-3.3.3.dist-info/LICENSE deleted file mode 100644 index a343a62..0000000 --- a/env/lib/python3.10/site-packages/SecretStorage-3.3.3.dist-info/LICENSE +++ /dev/null @@ -1,25 +0,0 @@ -Copyright 2012-2018 Dmitry Shachnev -All rights reserved. - -Redistribution and use in source and binary forms, with or without -modification, are permitted provided that the following conditions are met: - -1. Redistributions of source code must retain the above copyright notice, this - list of conditions and the following disclaimer. -2. Redistributions in binary form must reproduce the above copyright notice, - this list of conditions and the following disclaimer in the documentation - and/or other materials provided with the distribution. -3. Neither the name of the University nor the names of its contributors may be - used to endorse or promote products derived from this software without - specific prior written permission. - -THIS SOFTWARE IS PROVIDED BY THE COPYRIGHT HOLDERS AND CONTRIBUTORS "AS IS" AND -ANY EXPRESS OR IMPLIED WARRANTIES, INCLUDING, BUT NOT LIMITED TO, THE IMPLIED -WARRANTIES OF MERCHANTABILITY AND FITNESS FOR A PARTICULAR PURPOSE ARE -DISCLAIMED. IN NO EVENT SHALL THE REGENTS OR CONTRIBUTORS BE LIABLE FOR ANY -DIRECT, INDIRECT, INCIDENTAL, SPECIAL, EXEMPLARY, OR CONSEQUENTIAL DAMAGES -(INCLUDING, BUT NOT LIMITED TO, PROCUREMENT OF SUBSTITUTE GOODS OR SERVICES; -LOSS OF USE, DATA, OR PROFITS; OR BUSINESS INTERRUPTION) HOWEVER CAUSED AND ON -ANY THEORY OF LIABILITY, WHETHER IN CONTRACT, STRICT LIABILITY, OR TORT -(INCLUDING NEGLIGENCE OR OTHERWISE) ARISING IN ANY WAY OUT OF THE USE OF THIS -SOFTWARE, EVEN IF ADVISED OF THE POSSIBILITY OF SUCH DAMAGE. diff --git a/env/lib/python3.10/site-packages/SecretStorage-3.3.3.dist-info/METADATA b/env/lib/python3.10/site-packages/SecretStorage-3.3.3.dist-info/METADATA deleted file mode 100644 index f0323fd..0000000 --- a/env/lib/python3.10/site-packages/SecretStorage-3.3.3.dist-info/METADATA +++ /dev/null @@ -1,114 +0,0 @@ -Metadata-Version: 2.1 -Name: SecretStorage -Version: 3.3.3 -Summary: Python bindings to FreeDesktop.org Secret Service API -Home-page: https://github.com/mitya57/secretstorage -Author: Dmitry Shachnev -Author-email: mitya57@gmail.com -License: BSD 3-Clause License -Platform: Linux -Classifier: Development Status :: 5 - Production/Stable -Classifier: License :: OSI Approved :: BSD License -Classifier: Operating System :: POSIX -Classifier: Programming Language :: Python -Classifier: Programming Language :: Python :: 3 :: Only -Classifier: Programming Language :: Python :: 3.6 -Classifier: Programming Language :: Python :: 3.7 -Classifier: Programming Language :: Python :: 3.8 -Classifier: Programming Language :: Python :: 3.9 -Classifier: Programming Language :: Python :: 3.10 -Classifier: Topic :: Security -Classifier: Topic :: Software Development :: Libraries :: Python Modules -Requires-Python: >=3.6 -Description-Content-Type: text/x-rst -License-File: LICENSE -Requires-Dist: cryptography (>=2.0) -Requires-Dist: jeepney (>=0.6) - -.. image:: https://github.com/mitya57/secretstorage/workflows/tests/badge.svg - :target: https://github.com/mitya57/secretstorage/actions - :alt: GitHub Actions status -.. image:: https://codecov.io/gh/mitya57/secretstorage/branch/master/graph/badge.svg - :target: https://codecov.io/gh/mitya57/secretstorage - :alt: Coverage status -.. image:: https://readthedocs.org/projects/secretstorage/badge/?version=latest - :target: https://secretstorage.readthedocs.io/en/latest/ - :alt: ReadTheDocs status - -Module description -================== - -This module provides a way for securely storing passwords and other secrets. - -It uses D-Bus `Secret Service`_ API that is supported by GNOME Keyring, -KWallet (since version 5.97) and KeePassXC. - -The main classes provided are ``secretstorage.Item``, representing a secret -item (that has a *label*, a *secret* and some *attributes*) and -``secretstorage.Collection``, a place items are stored in. - -SecretStorage supports most of the functions provided by Secret Service, -including creating and deleting items and collections, editing items, -locking and unlocking collections (asynchronous unlocking is also supported). - -The documentation can be found on `secretstorage.readthedocs.io`_. - -.. _`Secret Service`: https://specifications.freedesktop.org/secret-service/ -.. _`secretstorage.readthedocs.io`: https://secretstorage.readthedocs.io/en/latest/ - -Building the module -=================== - -.. note:: - SecretStorage 3.x supports Python 3.6 and newer versions. - If you have an older version of Python, install SecretStorage 2.x:: - - pip install "SecretStorage < 3" - -SecretStorage requires these packages to work: - -* Jeepney_ -* `python-cryptography`_ - -To build SecretStorage, use this command:: - - python3 setup.py build - -If you have Sphinx_ installed, you can also build the documentation:: - - python3 setup.py build_sphinx - -.. _Jeepney: https://pypi.org/project/jeepney/ -.. _`python-cryptography`: https://pypi.org/project/cryptography/ -.. _Sphinx: http://sphinx-doc.org/ - -Testing the module -================== - -First, make sure that you have the Secret Service daemon installed. -The `GNOME Keyring`_ is the reference server-side implementation for the -Secret Service specification. - -.. _`GNOME Keyring`: https://download.gnome.org/sources/gnome-keyring/ - -Then, start the daemon and unlock the ``default`` collection, if needed. -The testsuite will fail to run if the ``default`` collection exists and is -locked. If it does not exist, the testsuite can also use the temporary -``session`` collection, as provided by the GNOME Keyring. - -Then, run the Python unittest module:: - - python3 -m unittest discover -s tests - -If you want to run the tests in an isolated or headless environment, run -this command in a D-Bus session:: - - dbus-run-session -- python3 -m unittest discover -s tests - -Get the code -============ - -SecretStorage is available under BSD license. The source code can be found -on GitHub_. - -.. _GitHub: https://github.com/mitya57/secretstorage diff --git a/env/lib/python3.10/site-packages/SecretStorage-3.3.3.dist-info/RECORD b/env/lib/python3.10/site-packages/SecretStorage-3.3.3.dist-info/RECORD deleted file mode 100644 index 5e371bc..0000000 --- a/env/lib/python3.10/site-packages/SecretStorage-3.3.3.dist-info/RECORD +++ /dev/null @@ -1,21 +0,0 @@ -SecretStorage-3.3.3.dist-info/INSTALLER,sha256=zuuue4knoyJ-UwPPXg8fezS7VCrXJQrAP7zeNuwvFQg,4 -SecretStorage-3.3.3.dist-info/LICENSE,sha256=cPa_yndjPDXvohgyjtpUhtcFTCkU1hggmA43h5dSCiU,1504 -SecretStorage-3.3.3.dist-info/METADATA,sha256=ZScD5voEgjme04wlw9OZigESMxLa2xG_eaIeZ_IMqJI,4027 -SecretStorage-3.3.3.dist-info/RECORD,, -SecretStorage-3.3.3.dist-info/WHEEL,sha256=G16H4A3IeoQmnOrYV4ueZGKSjhipXx8zc8nu9FGlvMA,92 -SecretStorage-3.3.3.dist-info/top_level.txt,sha256=hveSi1OWGaEt3kEVbjmZ0M-ASPxi6y-nTPVa-d3c0B4,14 -secretstorage/__init__.py,sha256=W1p-HB1Qh12Dv6K8ml0Wj_MzN09_dEeezJjQZAHf-O4,3364 -secretstorage/__pycache__/__init__.cpython-310.pyc,, -secretstorage/__pycache__/collection.cpython-310.pyc,, -secretstorage/__pycache__/defines.cpython-310.pyc,, -secretstorage/__pycache__/dhcrypto.cpython-310.pyc,, -secretstorage/__pycache__/exceptions.cpython-310.pyc,, -secretstorage/__pycache__/item.cpython-310.pyc,, -secretstorage/__pycache__/util.cpython-310.pyc,, -secretstorage/collection.py,sha256=lHwSOkFO5sRspgcUBoBI8ZG2au2bcUSO6X64ksVdnsQ,9436 -secretstorage/defines.py,sha256=DzUrEWzSvBlN8kK2nVXnLGlCZv7HWNyfN1AYqRmjKGE,807 -secretstorage/dhcrypto.py,sha256=BiuDoNvNmd8i7Ul4ENouRnbqFE3SUmTUSAn6RVvn7Tg,2578 -secretstorage/exceptions.py,sha256=1uUZXTua4jRZf4PKDIT2SVWcSKP2lP97s8r3eJZudio,1655 -secretstorage/item.py,sha256=3niFSjOzwrB2hV1jrg78RXgBsTrpw44852VpZHXUpeE,5813 -secretstorage/py.typed,sha256=47DEQpj8HBSa-_TImW-5JCeuQeRkm5NMpJWZG3hSuFU,0 -secretstorage/util.py,sha256=vHu01QaooMQ5sRdRDFX9pg7rrzfJWF9vg0plm3Zg0Po,6755 diff --git a/env/lib/python3.10/site-packages/SecretStorage-3.3.3.dist-info/WHEEL b/env/lib/python3.10/site-packages/SecretStorage-3.3.3.dist-info/WHEEL deleted file mode 100644 index becc9a6..0000000 --- a/env/lib/python3.10/site-packages/SecretStorage-3.3.3.dist-info/WHEEL +++ /dev/null @@ -1,5 +0,0 @@ -Wheel-Version: 1.0 -Generator: bdist_wheel (0.37.1) -Root-Is-Purelib: true -Tag: py3-none-any - diff --git a/env/lib/python3.10/site-packages/SecretStorage-3.3.3.dist-info/top_level.txt b/env/lib/python3.10/site-packages/SecretStorage-3.3.3.dist-info/top_level.txt deleted file mode 100644 index 0ec6ae8..0000000 --- a/env/lib/python3.10/site-packages/SecretStorage-3.3.3.dist-info/top_level.txt +++ /dev/null @@ -1 +0,0 @@ -secretstorage diff --git a/env/lib/python3.10/site-packages/__pycache__/typing_extensions.cpython-310.pyc b/env/lib/python3.10/site-packages/__pycache__/typing_extensions.cpython-310.pyc deleted file mode 100644 index 083bb59..0000000 Binary files a/env/lib/python3.10/site-packages/__pycache__/typing_extensions.cpython-310.pyc and /dev/null differ diff --git a/env/lib/python3.10/site-packages/_cffi_backend.cpython-310-x86_64-linux-gnu.so b/env/lib/python3.10/site-packages/_cffi_backend.cpython-310-x86_64-linux-gnu.so deleted file mode 100755 index 24944c8..0000000 Binary files a/env/lib/python3.10/site-packages/_cffi_backend.cpython-310-x86_64-linux-gnu.so and /dev/null differ diff --git a/env/lib/python3.10/site-packages/_distutils_hack/__init__.py b/env/lib/python3.10/site-packages/_distutils_hack/__init__.py deleted file mode 100644 index f707416..0000000 --- a/env/lib/python3.10/site-packages/_distutils_hack/__init__.py +++ /dev/null @@ -1,132 +0,0 @@ -import sys -import os -import re -import importlib -import warnings - - -is_pypy = '__pypy__' in sys.builtin_module_names - - -warnings.filterwarnings('ignore', - r'.+ distutils\b.+ deprecated', - DeprecationWarning) - - -def warn_distutils_present(): - if 'distutils' not in sys.modules: - return - if is_pypy and sys.version_info < (3, 7): - # PyPy for 3.6 unconditionally imports distutils, so bypass the warning - # https://foss.heptapod.net/pypy/pypy/-/blob/be829135bc0d758997b3566062999ee8b23872b4/lib-python/3/site.py#L250 - return - warnings.warn( - "Distutils was imported before Setuptools, but importing Setuptools " - "also replaces the `distutils` module in `sys.modules`. This may lead " - "to undesirable behaviors or errors. To avoid these issues, avoid " - "using distutils directly, ensure that setuptools is installed in the " - "traditional way (e.g. not an editable install), and/or make sure " - "that setuptools is always imported before distutils.") - - -def clear_distutils(): - if 'distutils' not in sys.modules: - return - warnings.warn("Setuptools is replacing distutils.") - mods = [name for name in sys.modules if re.match(r'distutils\b', name)] - for name in mods: - del sys.modules[name] - - -def enabled(): - """ - Allow selection of distutils by environment variable. - """ - which = os.environ.get('SETUPTOOLS_USE_DISTUTILS', 'stdlib') - return which == 'local' - - -def ensure_local_distutils(): - clear_distutils() - - # With the DistutilsMetaFinder in place, - # perform an import to cause distutils to be - # loaded from setuptools._distutils. Ref #2906. - add_shim() - importlib.import_module('distutils') - remove_shim() - - # check that submodules load as expected - core = importlib.import_module('distutils.core') - assert '_distutils' in core.__file__, core.__file__ - - -def do_override(): - """ - Ensure that the local copy of distutils is preferred over stdlib. - - See https://github.com/pypa/setuptools/issues/417#issuecomment-392298401 - for more motivation. - """ - if enabled(): - warn_distutils_present() - ensure_local_distutils() - - -class DistutilsMetaFinder: - def find_spec(self, fullname, path, target=None): - if path is not None: - return - - method_name = 'spec_for_{fullname}'.format(**locals()) - method = getattr(self, method_name, lambda: None) - return method() - - def spec_for_distutils(self): - import importlib.abc - import importlib.util - - class DistutilsLoader(importlib.abc.Loader): - - def create_module(self, spec): - return importlib.import_module('setuptools._distutils') - - def exec_module(self, module): - pass - - return importlib.util.spec_from_loader('distutils', DistutilsLoader()) - - def spec_for_pip(self): - """ - Ensure stdlib distutils when running under pip. - See pypa/pip#8761 for rationale. - """ - if self.pip_imported_during_build(): - return - clear_distutils() - self.spec_for_distutils = lambda: None - - @staticmethod - def pip_imported_during_build(): - """ - Detect if pip is being imported in a build script. Ref #2355. - """ - import traceback - return any( - frame.f_globals['__file__'].endswith('setup.py') - for frame, line in traceback.walk_stack(None) - ) - - -DISTUTILS_FINDER = DistutilsMetaFinder() - - -def add_shim(): - sys.meta_path.insert(0, DISTUTILS_FINDER) - - -def remove_shim(): - try: - sys.meta_path.remove(DISTUTILS_FINDER) - except ValueError: - pass diff --git a/env/lib/python3.10/site-packages/_distutils_hack/__pycache__/__init__.cpython-310.pyc b/env/lib/python3.10/site-packages/_distutils_hack/__pycache__/__init__.cpython-310.pyc deleted file mode 100644 index acc7ccb..0000000 Binary files a/env/lib/python3.10/site-packages/_distutils_hack/__pycache__/__init__.cpython-310.pyc and /dev/null differ diff --git a/env/lib/python3.10/site-packages/_distutils_hack/__pycache__/override.cpython-310.pyc b/env/lib/python3.10/site-packages/_distutils_hack/__pycache__/override.cpython-310.pyc deleted file mode 100644 index 7b7a5ca..0000000 Binary files a/env/lib/python3.10/site-packages/_distutils_hack/__pycache__/override.cpython-310.pyc and /dev/null differ diff --git a/env/lib/python3.10/site-packages/_distutils_hack/override.py b/env/lib/python3.10/site-packages/_distutils_hack/override.py deleted file mode 100644 index 2cc433a..0000000 --- a/env/lib/python3.10/site-packages/_distutils_hack/override.py +++ /dev/null @@ -1 +0,0 @@ -__import__('_distutils_hack').do_override() diff --git a/env/lib/python3.10/site-packages/backports.tarfile-1.2.0.dist-info/INSTALLER b/env/lib/python3.10/site-packages/backports.tarfile-1.2.0.dist-info/INSTALLER deleted file mode 100644 index a1b589e..0000000 --- a/env/lib/python3.10/site-packages/backports.tarfile-1.2.0.dist-info/INSTALLER +++ /dev/null @@ -1 +0,0 @@ -pip diff --git a/env/lib/python3.10/site-packages/backports.tarfile-1.2.0.dist-info/LICENSE b/env/lib/python3.10/site-packages/backports.tarfile-1.2.0.dist-info/LICENSE deleted file mode 100644 index 1bb5a44..0000000 --- a/env/lib/python3.10/site-packages/backports.tarfile-1.2.0.dist-info/LICENSE +++ /dev/null @@ -1,17 +0,0 @@ -Permission is hereby granted, free of charge, to any person obtaining a copy -of this software and associated documentation files (the "Software"), to -deal in the Software without restriction, including without limitation the -rights to use, copy, modify, merge, publish, distribute, sublicense, and/or -sell copies of the Software, and to permit persons to whom the Software is -furnished to do so, subject to the following conditions: - -The above copyright notice and this permission notice shall be included in -all copies or substantial portions of the Software. - -THE SOFTWARE IS PROVIDED "AS IS", WITHOUT WARRANTY OF ANY KIND, EXPRESS OR -IMPLIED, INCLUDING BUT NOT LIMITED TO THE WARRANTIES OF MERCHANTABILITY, -FITNESS FOR A PARTICULAR PURPOSE AND NONINFRINGEMENT. IN NO EVENT SHALL THE -AUTHORS OR COPYRIGHT HOLDERS BE LIABLE FOR ANY CLAIM, DAMAGES OR OTHER -LIABILITY, WHETHER IN AN ACTION OF CONTRACT, TORT OR OTHERWISE, ARISING -FROM, OUT OF OR IN CONNECTION WITH THE SOFTWARE OR THE USE OR OTHER DEALINGS -IN THE SOFTWARE. diff --git a/env/lib/python3.10/site-packages/backports.tarfile-1.2.0.dist-info/METADATA b/env/lib/python3.10/site-packages/backports.tarfile-1.2.0.dist-info/METADATA deleted file mode 100644 index db0a2dc..0000000 --- a/env/lib/python3.10/site-packages/backports.tarfile-1.2.0.dist-info/METADATA +++ /dev/null @@ -1,46 +0,0 @@ -Metadata-Version: 2.1 -Name: backports.tarfile -Version: 1.2.0 -Summary: Backport of CPython tarfile module -Author-email: "Jason R. Coombs" -Project-URL: Homepage, https://github.com/jaraco/backports.tarfile -Classifier: Development Status :: 5 - Production/Stable -Classifier: Intended Audience :: Developers -Classifier: License :: OSI Approved :: MIT License -Classifier: Programming Language :: Python :: 3 -Classifier: Programming Language :: Python :: 3 :: Only -Requires-Python: >=3.8 -Description-Content-Type: text/x-rst -License-File: LICENSE -Provides-Extra: docs -Requires-Dist: sphinx >=3.5 ; extra == 'docs' -Requires-Dist: jaraco.packaging >=9.3 ; extra == 'docs' -Requires-Dist: rst.linker >=1.9 ; extra == 'docs' -Requires-Dist: furo ; extra == 'docs' -Requires-Dist: sphinx-lint ; extra == 'docs' -Provides-Extra: testing -Requires-Dist: pytest !=8.1.*,>=6 ; extra == 'testing' -Requires-Dist: pytest-checkdocs >=2.4 ; extra == 'testing' -Requires-Dist: pytest-cov ; extra == 'testing' -Requires-Dist: pytest-enabler >=2.2 ; extra == 'testing' -Requires-Dist: jaraco.test ; extra == 'testing' -Requires-Dist: pytest !=8.0.* ; extra == 'testing' - -.. image:: https://img.shields.io/pypi/v/backports.tarfile.svg - :target: https://pypi.org/project/backports.tarfile - -.. image:: https://img.shields.io/pypi/pyversions/backports.tarfile.svg - -.. image:: https://github.com/jaraco/backports.tarfile/actions/workflows/main.yml/badge.svg - :target: https://github.com/jaraco/backports.tarfile/actions?query=workflow%3A%22tests%22 - :alt: tests - -.. image:: https://img.shields.io/endpoint?url=https://raw.githubusercontent.com/charliermarsh/ruff/main/assets/badge/v2.json - :target: https://github.com/astral-sh/ruff - :alt: Ruff - -.. .. image:: https://readthedocs.org/projects/backportstarfile/badge/?version=latest -.. :target: https://backportstarfile.readthedocs.io/en/latest/?badge=latest - -.. image:: https://img.shields.io/badge/skeleton-2024-informational - :target: https://blog.jaraco.com/skeleton diff --git a/env/lib/python3.10/site-packages/backports.tarfile-1.2.0.dist-info/RECORD b/env/lib/python3.10/site-packages/backports.tarfile-1.2.0.dist-info/RECORD deleted file mode 100644 index be28997..0000000 --- a/env/lib/python3.10/site-packages/backports.tarfile-1.2.0.dist-info/RECORD +++ /dev/null @@ -1,16 +0,0 @@ -backports.tarfile-1.2.0.dist-info/INSTALLER,sha256=zuuue4knoyJ-UwPPXg8fezS7VCrXJQrAP7zeNuwvFQg,4 -backports.tarfile-1.2.0.dist-info/LICENSE,sha256=htoPAa6uRjSKPD1GUZXcHOzN55956HdppkuNoEsqR0E,1023 -backports.tarfile-1.2.0.dist-info/METADATA,sha256=ghXFTq132dxaEIolxr3HK1mZqm9iyUmaRANZQSr6WlE,2020 -backports.tarfile-1.2.0.dist-info/RECORD,, -backports.tarfile-1.2.0.dist-info/WHEEL,sha256=GJ7t_kWBFywbagK5eo9IoUwLW6oyOeTKmQ-9iHFVNxQ,92 -backports.tarfile-1.2.0.dist-info/top_level.txt,sha256=cGjaLMOoBR1FK0ApojtzWVmViTtJ7JGIK_HwXiEsvtU,10 -backports/__init__.py,sha256=iOEMwnlORWezdO8-2vxBIPSR37D7JGjluZ8f55vzxls,81 -backports/__pycache__/__init__.cpython-310.pyc,, -backports/tarfile/__init__.py,sha256=Pwf2qUIfB0SolJPCKcx3vz3UEu_aids4g4sAfxy94qg,108491 -backports/tarfile/__main__.py,sha256=Yw2oGT1afrz2eBskzdPYL8ReB_3liApmhFkN2EbDmc4,59 -backports/tarfile/__pycache__/__init__.cpython-310.pyc,, -backports/tarfile/__pycache__/__main__.cpython-310.pyc,, -backports/tarfile/compat/__init__.py,sha256=47DEQpj8HBSa-_TImW-5JCeuQeRkm5NMpJWZG3hSuFU,0 -backports/tarfile/compat/__pycache__/__init__.cpython-310.pyc,, -backports/tarfile/compat/__pycache__/py38.cpython-310.pyc,, -backports/tarfile/compat/py38.py,sha256=iYkyt_gvWjLzGUTJD9TuTfMMjOk-ersXZmRlvQYN2qE,568 diff --git a/env/lib/python3.10/site-packages/backports.tarfile-1.2.0.dist-info/WHEEL b/env/lib/python3.10/site-packages/backports.tarfile-1.2.0.dist-info/WHEEL deleted file mode 100644 index bab98d6..0000000 --- a/env/lib/python3.10/site-packages/backports.tarfile-1.2.0.dist-info/WHEEL +++ /dev/null @@ -1,5 +0,0 @@ -Wheel-Version: 1.0 -Generator: bdist_wheel (0.43.0) -Root-Is-Purelib: true -Tag: py3-none-any - diff --git a/env/lib/python3.10/site-packages/backports.tarfile-1.2.0.dist-info/top_level.txt b/env/lib/python3.10/site-packages/backports.tarfile-1.2.0.dist-info/top_level.txt deleted file mode 100644 index 99d2be5..0000000 --- a/env/lib/python3.10/site-packages/backports.tarfile-1.2.0.dist-info/top_level.txt +++ /dev/null @@ -1 +0,0 @@ -backports diff --git a/env/lib/python3.10/site-packages/backports/__init__.py b/env/lib/python3.10/site-packages/backports/__init__.py deleted file mode 100644 index 0d1f7ed..0000000 --- a/env/lib/python3.10/site-packages/backports/__init__.py +++ /dev/null @@ -1 +0,0 @@ -__path__ = __import__('pkgutil').extend_path(__path__, __name__) # type: ignore diff --git a/env/lib/python3.10/site-packages/backports/__pycache__/__init__.cpython-310.pyc b/env/lib/python3.10/site-packages/backports/__pycache__/__init__.cpython-310.pyc deleted file mode 100644 index f7c0e4b..0000000 Binary files a/env/lib/python3.10/site-packages/backports/__pycache__/__init__.cpython-310.pyc and /dev/null differ diff --git a/env/lib/python3.10/site-packages/backports/tarfile/__init__.py b/env/lib/python3.10/site-packages/backports/tarfile/__init__.py deleted file mode 100644 index 8c16881..0000000 --- a/env/lib/python3.10/site-packages/backports/tarfile/__init__.py +++ /dev/null @@ -1,2937 +0,0 @@ -#------------------------------------------------------------------- -# tarfile.py -#------------------------------------------------------------------- -# Copyright (C) 2002 Lars Gustaebel -# All rights reserved. -# -# Permission is hereby granted, free of charge, to any person -# obtaining a copy of this software and associated documentation -# files (the "Software"), to deal in the Software without -# restriction, including without limitation the rights to use, -# copy, modify, merge, publish, distribute, sublicense, and/or sell -# copies of the Software, and to permit persons to whom the -# Software is furnished to do so, subject to the following -# conditions: -# -# The above copyright notice and this permission notice shall be -# included in all copies or substantial portions of the Software. -# -# THE SOFTWARE IS PROVIDED "AS IS", WITHOUT WARRANTY OF ANY KIND, -# EXPRESS OR IMPLIED, INCLUDING BUT NOT LIMITED TO THE WARRANTIES -# OF MERCHANTABILITY, FITNESS FOR A PARTICULAR PURPOSE AND -# NONINFRINGEMENT. IN NO EVENT SHALL THE AUTHORS OR COPYRIGHT -# HOLDERS BE LIABLE FOR ANY CLAIM, DAMAGES OR OTHER LIABILITY, -# WHETHER IN AN ACTION OF CONTRACT, TORT OR OTHERWISE, ARISING -# FROM, OUT OF OR IN CONNECTION WITH THE SOFTWARE OR THE USE OR -# OTHER DEALINGS IN THE SOFTWARE. -# -"""Read from and write to tar format archives. -""" - -version = "0.9.0" -__author__ = "Lars Gust\u00e4bel (lars@gustaebel.de)" -__credits__ = "Gustavo Niemeyer, Niels Gust\u00e4bel, Richard Townsend." - -#--------- -# Imports -#--------- -from builtins import open as bltn_open -import sys -import os -import io -import shutil -import stat -import time -import struct -import copy -import re - -from .compat.py38 import removesuffix - -try: - import pwd -except ImportError: - pwd = None -try: - import grp -except ImportError: - grp = None - -# os.symlink on Windows prior to 6.0 raises NotImplementedError -# OSError (winerror=1314) will be raised if the caller does not hold the -# SeCreateSymbolicLinkPrivilege privilege -symlink_exception = (AttributeError, NotImplementedError, OSError) - -# from tarfile import * -__all__ = ["TarFile", "TarInfo", "is_tarfile", "TarError", "ReadError", - "CompressionError", "StreamError", "ExtractError", "HeaderError", - "ENCODING", "USTAR_FORMAT", "GNU_FORMAT", "PAX_FORMAT", - "DEFAULT_FORMAT", "open","fully_trusted_filter", "data_filter", - "tar_filter", "FilterError", "AbsoluteLinkError", - "OutsideDestinationError", "SpecialFileError", "AbsolutePathError", - "LinkOutsideDestinationError"] - - -#--------------------------------------------------------- -# tar constants -#--------------------------------------------------------- -NUL = b"\0" # the null character -BLOCKSIZE = 512 # length of processing blocks -RECORDSIZE = BLOCKSIZE * 20 # length of records -GNU_MAGIC = b"ustar \0" # magic gnu tar string -POSIX_MAGIC = b"ustar\x0000" # magic posix tar string - -LENGTH_NAME = 100 # maximum length of a filename -LENGTH_LINK = 100 # maximum length of a linkname -LENGTH_PREFIX = 155 # maximum length of the prefix field - -REGTYPE = b"0" # regular file -AREGTYPE = b"\0" # regular file -LNKTYPE = b"1" # link (inside tarfile) -SYMTYPE = b"2" # symbolic link -CHRTYPE = b"3" # character special device -BLKTYPE = b"4" # block special device -DIRTYPE = b"5" # directory -FIFOTYPE = b"6" # fifo special device -CONTTYPE = b"7" # contiguous file - -GNUTYPE_LONGNAME = b"L" # GNU tar longname -GNUTYPE_LONGLINK = b"K" # GNU tar longlink -GNUTYPE_SPARSE = b"S" # GNU tar sparse file - -XHDTYPE = b"x" # POSIX.1-2001 extended header -XGLTYPE = b"g" # POSIX.1-2001 global header -SOLARIS_XHDTYPE = b"X" # Solaris extended header - -USTAR_FORMAT = 0 # POSIX.1-1988 (ustar) format -GNU_FORMAT = 1 # GNU tar format -PAX_FORMAT = 2 # POSIX.1-2001 (pax) format -DEFAULT_FORMAT = PAX_FORMAT - -#--------------------------------------------------------- -# tarfile constants -#--------------------------------------------------------- -# File types that tarfile supports: -SUPPORTED_TYPES = (REGTYPE, AREGTYPE, LNKTYPE, - SYMTYPE, DIRTYPE, FIFOTYPE, - CONTTYPE, CHRTYPE, BLKTYPE, - GNUTYPE_LONGNAME, GNUTYPE_LONGLINK, - GNUTYPE_SPARSE) - -# File types that will be treated as a regular file. -REGULAR_TYPES = (REGTYPE, AREGTYPE, - CONTTYPE, GNUTYPE_SPARSE) - -# File types that are part of the GNU tar format. -GNU_TYPES = (GNUTYPE_LONGNAME, GNUTYPE_LONGLINK, - GNUTYPE_SPARSE) - -# Fields from a pax header that override a TarInfo attribute. -PAX_FIELDS = ("path", "linkpath", "size", "mtime", - "uid", "gid", "uname", "gname") - -# Fields from a pax header that are affected by hdrcharset. -PAX_NAME_FIELDS = {"path", "linkpath", "uname", "gname"} - -# Fields in a pax header that are numbers, all other fields -# are treated as strings. -PAX_NUMBER_FIELDS = { - "atime": float, - "ctime": float, - "mtime": float, - "uid": int, - "gid": int, - "size": int -} - -#--------------------------------------------------------- -# initialization -#--------------------------------------------------------- -if os.name == "nt": - ENCODING = "utf-8" -else: - ENCODING = sys.getfilesystemencoding() - -#--------------------------------------------------------- -# Some useful functions -#--------------------------------------------------------- - -def stn(s, length, encoding, errors): - """Convert a string to a null-terminated bytes object. - """ - if s is None: - raise ValueError("metadata cannot contain None") - s = s.encode(encoding, errors) - return s[:length] + (length - len(s)) * NUL - -def nts(s, encoding, errors): - """Convert a null-terminated bytes object to a string. - """ - p = s.find(b"\0") - if p != -1: - s = s[:p] - return s.decode(encoding, errors) - -def nti(s): - """Convert a number field to a python number. - """ - # There are two possible encodings for a number field, see - # itn() below. - if s[0] in (0o200, 0o377): - n = 0 - for i in range(len(s) - 1): - n <<= 8 - n += s[i + 1] - if s[0] == 0o377: - n = -(256 ** (len(s) - 1) - n) - else: - try: - s = nts(s, "ascii", "strict") - n = int(s.strip() or "0", 8) - except ValueError: - raise InvalidHeaderError("invalid header") - return n - -def itn(n, digits=8, format=DEFAULT_FORMAT): - """Convert a python number to a number field. - """ - # POSIX 1003.1-1988 requires numbers to be encoded as a string of - # octal digits followed by a null-byte, this allows values up to - # (8**(digits-1))-1. GNU tar allows storing numbers greater than - # that if necessary. A leading 0o200 or 0o377 byte indicate this - # particular encoding, the following digits-1 bytes are a big-endian - # base-256 representation. This allows values up to (256**(digits-1))-1. - # A 0o200 byte indicates a positive number, a 0o377 byte a negative - # number. - original_n = n - n = int(n) - if 0 <= n < 8 ** (digits - 1): - s = bytes("%0*o" % (digits - 1, n), "ascii") + NUL - elif format == GNU_FORMAT and -256 ** (digits - 1) <= n < 256 ** (digits - 1): - if n >= 0: - s = bytearray([0o200]) - else: - s = bytearray([0o377]) - n = 256 ** digits + n - - for i in range(digits - 1): - s.insert(1, n & 0o377) - n >>= 8 - else: - raise ValueError("overflow in number field") - - return s - -def calc_chksums(buf): - """Calculate the checksum for a member's header by summing up all - characters except for the chksum field which is treated as if - it was filled with spaces. According to the GNU tar sources, - some tars (Sun and NeXT) calculate chksum with signed char, - which will be different if there are chars in the buffer with - the high bit set. So we calculate two checksums, unsigned and - signed. - """ - unsigned_chksum = 256 + sum(struct.unpack_from("148B8x356B", buf)) - signed_chksum = 256 + sum(struct.unpack_from("148b8x356b", buf)) - return unsigned_chksum, signed_chksum - -def copyfileobj(src, dst, length=None, exception=OSError, bufsize=None): - """Copy length bytes from fileobj src to fileobj dst. - If length is None, copy the entire content. - """ - bufsize = bufsize or 16 * 1024 - if length == 0: - return - if length is None: - shutil.copyfileobj(src, dst, bufsize) - return - - blocks, remainder = divmod(length, bufsize) - for b in range(blocks): - buf = src.read(bufsize) - if len(buf) < bufsize: - raise exception("unexpected end of data") - dst.write(buf) - - if remainder != 0: - buf = src.read(remainder) - if len(buf) < remainder: - raise exception("unexpected end of data") - dst.write(buf) - return - -def _safe_print(s): - encoding = getattr(sys.stdout, 'encoding', None) - if encoding is not None: - s = s.encode(encoding, 'backslashreplace').decode(encoding) - print(s, end=' ') - - -class TarError(Exception): - """Base exception.""" - pass -class ExtractError(TarError): - """General exception for extract errors.""" - pass -class ReadError(TarError): - """Exception for unreadable tar archives.""" - pass -class CompressionError(TarError): - """Exception for unavailable compression methods.""" - pass -class StreamError(TarError): - """Exception for unsupported operations on stream-like TarFiles.""" - pass -class HeaderError(TarError): - """Base exception for header errors.""" - pass -class EmptyHeaderError(HeaderError): - """Exception for empty headers.""" - pass -class TruncatedHeaderError(HeaderError): - """Exception for truncated headers.""" - pass -class EOFHeaderError(HeaderError): - """Exception for end of file headers.""" - pass -class InvalidHeaderError(HeaderError): - """Exception for invalid headers.""" - pass -class SubsequentHeaderError(HeaderError): - """Exception for missing and invalid extended headers.""" - pass - -#--------------------------- -# internal stream interface -#--------------------------- -class _LowLevelFile: - """Low-level file object. Supports reading and writing. - It is used instead of a regular file object for streaming - access. - """ - - def __init__(self, name, mode): - mode = { - "r": os.O_RDONLY, - "w": os.O_WRONLY | os.O_CREAT | os.O_TRUNC, - }[mode] - if hasattr(os, "O_BINARY"): - mode |= os.O_BINARY - self.fd = os.open(name, mode, 0o666) - - def close(self): - os.close(self.fd) - - def read(self, size): - return os.read(self.fd, size) - - def write(self, s): - os.write(self.fd, s) - -class _Stream: - """Class that serves as an adapter between TarFile and - a stream-like object. The stream-like object only - needs to have a read() or write() method that works with bytes, - and the method is accessed blockwise. - Use of gzip or bzip2 compression is possible. - A stream-like object could be for example: sys.stdin.buffer, - sys.stdout.buffer, a socket, a tape device etc. - - _Stream is intended to be used only internally. - """ - - def __init__(self, name, mode, comptype, fileobj, bufsize, - compresslevel): - """Construct a _Stream object. - """ - self._extfileobj = True - if fileobj is None: - fileobj = _LowLevelFile(name, mode) - self._extfileobj = False - - if comptype == '*': - # Enable transparent compression detection for the - # stream interface - fileobj = _StreamProxy(fileobj) - comptype = fileobj.getcomptype() - - self.name = name or "" - self.mode = mode - self.comptype = comptype - self.fileobj = fileobj - self.bufsize = bufsize - self.buf = b"" - self.pos = 0 - self.closed = False - - try: - if comptype == "gz": - try: - import zlib - except ImportError: - raise CompressionError("zlib module is not available") from None - self.zlib = zlib - self.crc = zlib.crc32(b"") - if mode == "r": - self.exception = zlib.error - self._init_read_gz() - else: - self._init_write_gz(compresslevel) - - elif comptype == "bz2": - try: - import bz2 - except ImportError: - raise CompressionError("bz2 module is not available") from None - if mode == "r": - self.dbuf = b"" - self.cmp = bz2.BZ2Decompressor() - self.exception = OSError - else: - self.cmp = bz2.BZ2Compressor(compresslevel) - - elif comptype == "xz": - try: - import lzma - except ImportError: - raise CompressionError("lzma module is not available") from None - if mode == "r": - self.dbuf = b"" - self.cmp = lzma.LZMADecompressor() - self.exception = lzma.LZMAError - else: - self.cmp = lzma.LZMACompressor() - - elif comptype != "tar": - raise CompressionError("unknown compression type %r" % comptype) - - except: - if not self._extfileobj: - self.fileobj.close() - self.closed = True - raise - - def __del__(self): - if hasattr(self, "closed") and not self.closed: - self.close() - - def _init_write_gz(self, compresslevel): - """Initialize for writing with gzip compression. - """ - self.cmp = self.zlib.compressobj(compresslevel, - self.zlib.DEFLATED, - -self.zlib.MAX_WBITS, - self.zlib.DEF_MEM_LEVEL, - 0) - timestamp = struct.pack(" self.bufsize: - self.fileobj.write(self.buf[:self.bufsize]) - self.buf = self.buf[self.bufsize:] - - def close(self): - """Close the _Stream object. No operation should be - done on it afterwards. - """ - if self.closed: - return - - self.closed = True - try: - if self.mode == "w" and self.comptype != "tar": - self.buf += self.cmp.flush() - - if self.mode == "w" and self.buf: - self.fileobj.write(self.buf) - self.buf = b"" - if self.comptype == "gz": - self.fileobj.write(struct.pack("= 0: - blocks, remainder = divmod(pos - self.pos, self.bufsize) - for i in range(blocks): - self.read(self.bufsize) - self.read(remainder) - else: - raise StreamError("seeking backwards is not allowed") - return self.pos - - def read(self, size): - """Return the next size number of bytes from the stream.""" - assert size is not None - buf = self._read(size) - self.pos += len(buf) - return buf - - def _read(self, size): - """Return size bytes from the stream. - """ - if self.comptype == "tar": - return self.__read(size) - - c = len(self.dbuf) - t = [self.dbuf] - while c < size: - # Skip underlying buffer to avoid unaligned double buffering. - if self.buf: - buf = self.buf - self.buf = b"" - else: - buf = self.fileobj.read(self.bufsize) - if not buf: - break - try: - buf = self.cmp.decompress(buf) - except self.exception as e: - raise ReadError("invalid compressed data") from e - t.append(buf) - c += len(buf) - t = b"".join(t) - self.dbuf = t[size:] - return t[:size] - - def __read(self, size): - """Return size bytes from stream. If internal buffer is empty, - read another block from the stream. - """ - c = len(self.buf) - t = [self.buf] - while c < size: - buf = self.fileobj.read(self.bufsize) - if not buf: - break - t.append(buf) - c += len(buf) - t = b"".join(t) - self.buf = t[size:] - return t[:size] -# class _Stream - -class _StreamProxy(object): - """Small proxy class that enables transparent compression - detection for the Stream interface (mode 'r|*'). - """ - - def __init__(self, fileobj): - self.fileobj = fileobj - self.buf = self.fileobj.read(BLOCKSIZE) - - def read(self, size): - self.read = self.fileobj.read - return self.buf - - def getcomptype(self): - if self.buf.startswith(b"\x1f\x8b\x08"): - return "gz" - elif self.buf[0:3] == b"BZh" and self.buf[4:10] == b"1AY&SY": - return "bz2" - elif self.buf.startswith((b"\x5d\x00\x00\x80", b"\xfd7zXZ")): - return "xz" - else: - return "tar" - - def close(self): - self.fileobj.close() -# class StreamProxy - -#------------------------ -# Extraction file object -#------------------------ -class _FileInFile(object): - """A thin wrapper around an existing file object that - provides a part of its data as an individual file - object. - """ - - def __init__(self, fileobj, offset, size, name, blockinfo=None): - self.fileobj = fileobj - self.offset = offset - self.size = size - self.position = 0 - self.name = name - self.closed = False - - if blockinfo is None: - blockinfo = [(0, size)] - - # Construct a map with data and zero blocks. - self.map_index = 0 - self.map = [] - lastpos = 0 - realpos = self.offset - for offset, size in blockinfo: - if offset > lastpos: - self.map.append((False, lastpos, offset, None)) - self.map.append((True, offset, offset + size, realpos)) - realpos += size - lastpos = offset + size - if lastpos < self.size: - self.map.append((False, lastpos, self.size, None)) - - def flush(self): - pass - - @property - def mode(self): - return 'rb' - - def readable(self): - return True - - def writable(self): - return False - - def seekable(self): - return self.fileobj.seekable() - - def tell(self): - """Return the current file position. - """ - return self.position - - def seek(self, position, whence=io.SEEK_SET): - """Seek to a position in the file. - """ - if whence == io.SEEK_SET: - self.position = min(max(position, 0), self.size) - elif whence == io.SEEK_CUR: - if position < 0: - self.position = max(self.position + position, 0) - else: - self.position = min(self.position + position, self.size) - elif whence == io.SEEK_END: - self.position = max(min(self.size + position, self.size), 0) - else: - raise ValueError("Invalid argument") - return self.position - - def read(self, size=None): - """Read data from the file. - """ - if size is None: - size = self.size - self.position - else: - size = min(size, self.size - self.position) - - buf = b"" - while size > 0: - while True: - data, start, stop, offset = self.map[self.map_index] - if start <= self.position < stop: - break - else: - self.map_index += 1 - if self.map_index == len(self.map): - self.map_index = 0 - length = min(size, stop - self.position) - if data: - self.fileobj.seek(offset + (self.position - start)) - b = self.fileobj.read(length) - if len(b) != length: - raise ReadError("unexpected end of data") - buf += b - else: - buf += NUL * length - size -= length - self.position += length - return buf - - def readinto(self, b): - buf = self.read(len(b)) - b[:len(buf)] = buf - return len(buf) - - def close(self): - self.closed = True -#class _FileInFile - -class ExFileObject(io.BufferedReader): - - def __init__(self, tarfile, tarinfo): - fileobj = _FileInFile(tarfile.fileobj, tarinfo.offset_data, - tarinfo.size, tarinfo.name, tarinfo.sparse) - super().__init__(fileobj) -#class ExFileObject - - -#----------------------------- -# extraction filters (PEP 706) -#----------------------------- - -class FilterError(TarError): - pass - -class AbsolutePathError(FilterError): - def __init__(self, tarinfo): - self.tarinfo = tarinfo - super().__init__(f'member {tarinfo.name!r} has an absolute path') - -class OutsideDestinationError(FilterError): - def __init__(self, tarinfo, path): - self.tarinfo = tarinfo - self._path = path - super().__init__(f'{tarinfo.name!r} would be extracted to {path!r}, ' - + 'which is outside the destination') - -class SpecialFileError(FilterError): - def __init__(self, tarinfo): - self.tarinfo = tarinfo - super().__init__(f'{tarinfo.name!r} is a special file') - -class AbsoluteLinkError(FilterError): - def __init__(self, tarinfo): - self.tarinfo = tarinfo - super().__init__(f'{tarinfo.name!r} is a link to an absolute path') - -class LinkOutsideDestinationError(FilterError): - def __init__(self, tarinfo, path): - self.tarinfo = tarinfo - self._path = path - super().__init__(f'{tarinfo.name!r} would link to {path!r}, ' - + 'which is outside the destination') - -def _get_filtered_attrs(member, dest_path, for_data=True): - new_attrs = {} - name = member.name - dest_path = os.path.realpath(dest_path) - # Strip leading / (tar's directory separator) from filenames. - # Include os.sep (target OS directory separator) as well. - if name.startswith(('/', os.sep)): - name = new_attrs['name'] = member.path.lstrip('/' + os.sep) - if os.path.isabs(name): - # Path is absolute even after stripping. - # For example, 'C:/foo' on Windows. - raise AbsolutePathError(member) - # Ensure we stay in the destination - target_path = os.path.realpath(os.path.join(dest_path, name)) - if os.path.commonpath([target_path, dest_path]) != dest_path: - raise OutsideDestinationError(member, target_path) - # Limit permissions (no high bits, and go-w) - mode = member.mode - if mode is not None: - # Strip high bits & group/other write bits - mode = mode & 0o755 - if for_data: - # For data, handle permissions & file types - if member.isreg() or member.islnk(): - if not mode & 0o100: - # Clear executable bits if not executable by user - mode &= ~0o111 - # Ensure owner can read & write - mode |= 0o600 - elif member.isdir() or member.issym(): - # Ignore mode for directories & symlinks - mode = None - else: - # Reject special files - raise SpecialFileError(member) - if mode != member.mode: - new_attrs['mode'] = mode - if for_data: - # Ignore ownership for 'data' - if member.uid is not None: - new_attrs['uid'] = None - if member.gid is not None: - new_attrs['gid'] = None - if member.uname is not None: - new_attrs['uname'] = None - if member.gname is not None: - new_attrs['gname'] = None - # Check link destination for 'data' - if member.islnk() or member.issym(): - if os.path.isabs(member.linkname): - raise AbsoluteLinkError(member) - if member.issym(): - target_path = os.path.join(dest_path, - os.path.dirname(name), - member.linkname) - else: - target_path = os.path.join(dest_path, - member.linkname) - target_path = os.path.realpath(target_path) - if os.path.commonpath([target_path, dest_path]) != dest_path: - raise LinkOutsideDestinationError(member, target_path) - return new_attrs - -def fully_trusted_filter(member, dest_path): - return member - -def tar_filter(member, dest_path): - new_attrs = _get_filtered_attrs(member, dest_path, False) - if new_attrs: - return member.replace(**new_attrs, deep=False) - return member - -def data_filter(member, dest_path): - new_attrs = _get_filtered_attrs(member, dest_path, True) - if new_attrs: - return member.replace(**new_attrs, deep=False) - return member - -_NAMED_FILTERS = { - "fully_trusted": fully_trusted_filter, - "tar": tar_filter, - "data": data_filter, -} - -#------------------ -# Exported Classes -#------------------ - -# Sentinel for replace() defaults, meaning "don't change the attribute" -_KEEP = object() - -class TarInfo(object): - """Informational class which holds the details about an - archive member given by a tar header block. - TarInfo objects are returned by TarFile.getmember(), - TarFile.getmembers() and TarFile.gettarinfo() and are - usually created internally. - """ - - __slots__ = dict( - name = 'Name of the archive member.', - mode = 'Permission bits.', - uid = 'User ID of the user who originally stored this member.', - gid = 'Group ID of the user who originally stored this member.', - size = 'Size in bytes.', - mtime = 'Time of last modification.', - chksum = 'Header checksum.', - type = ('File type. type is usually one of these constants: ' - 'REGTYPE, AREGTYPE, LNKTYPE, SYMTYPE, DIRTYPE, FIFOTYPE, ' - 'CONTTYPE, CHRTYPE, BLKTYPE, GNUTYPE_SPARSE.'), - linkname = ('Name of the target file name, which is only present ' - 'in TarInfo objects of type LNKTYPE and SYMTYPE.'), - uname = 'User name.', - gname = 'Group name.', - devmajor = 'Device major number.', - devminor = 'Device minor number.', - offset = 'The tar header starts here.', - offset_data = "The file's data starts here.", - pax_headers = ('A dictionary containing key-value pairs of an ' - 'associated pax extended header.'), - sparse = 'Sparse member information.', - _tarfile = None, - _sparse_structs = None, - _link_target = None, - ) - - def __init__(self, name=""): - """Construct a TarInfo object. name is the optional name - of the member. - """ - self.name = name # member name - self.mode = 0o644 # file permissions - self.uid = 0 # user id - self.gid = 0 # group id - self.size = 0 # file size - self.mtime = 0 # modification time - self.chksum = 0 # header checksum - self.type = REGTYPE # member type - self.linkname = "" # link name - self.uname = "" # user name - self.gname = "" # group name - self.devmajor = 0 # device major number - self.devminor = 0 # device minor number - - self.offset = 0 # the tar header starts here - self.offset_data = 0 # the file's data starts here - - self.sparse = None # sparse member information - self.pax_headers = {} # pax header information - - @property - def tarfile(self): - import warnings - warnings.warn( - 'The undocumented "tarfile" attribute of TarInfo objects ' - + 'is deprecated and will be removed in Python 3.16', - DeprecationWarning, stacklevel=2) - return self._tarfile - - @tarfile.setter - def tarfile(self, tarfile): - import warnings - warnings.warn( - 'The undocumented "tarfile" attribute of TarInfo objects ' - + 'is deprecated and will be removed in Python 3.16', - DeprecationWarning, stacklevel=2) - self._tarfile = tarfile - - @property - def path(self): - 'In pax headers, "name" is called "path".' - return self.name - - @path.setter - def path(self, name): - self.name = name - - @property - def linkpath(self): - 'In pax headers, "linkname" is called "linkpath".' - return self.linkname - - @linkpath.setter - def linkpath(self, linkname): - self.linkname = linkname - - def __repr__(self): - return "<%s %r at %#x>" % (self.__class__.__name__,self.name,id(self)) - - def replace(self, *, - name=_KEEP, mtime=_KEEP, mode=_KEEP, linkname=_KEEP, - uid=_KEEP, gid=_KEEP, uname=_KEEP, gname=_KEEP, - deep=True, _KEEP=_KEEP): - """Return a deep copy of self with the given attributes replaced. - """ - if deep: - result = copy.deepcopy(self) - else: - result = copy.copy(self) - if name is not _KEEP: - result.name = name - if mtime is not _KEEP: - result.mtime = mtime - if mode is not _KEEP: - result.mode = mode - if linkname is not _KEEP: - result.linkname = linkname - if uid is not _KEEP: - result.uid = uid - if gid is not _KEEP: - result.gid = gid - if uname is not _KEEP: - result.uname = uname - if gname is not _KEEP: - result.gname = gname - return result - - def get_info(self): - """Return the TarInfo's attributes as a dictionary. - """ - if self.mode is None: - mode = None - else: - mode = self.mode & 0o7777 - info = { - "name": self.name, - "mode": mode, - "uid": self.uid, - "gid": self.gid, - "size": self.size, - "mtime": self.mtime, - "chksum": self.chksum, - "type": self.type, - "linkname": self.linkname, - "uname": self.uname, - "gname": self.gname, - "devmajor": self.devmajor, - "devminor": self.devminor - } - - if info["type"] == DIRTYPE and not info["name"].endswith("/"): - info["name"] += "/" - - return info - - def tobuf(self, format=DEFAULT_FORMAT, encoding=ENCODING, errors="surrogateescape"): - """Return a tar header as a string of 512 byte blocks. - """ - info = self.get_info() - for name, value in info.items(): - if value is None: - raise ValueError("%s may not be None" % name) - - if format == USTAR_FORMAT: - return self.create_ustar_header(info, encoding, errors) - elif format == GNU_FORMAT: - return self.create_gnu_header(info, encoding, errors) - elif format == PAX_FORMAT: - return self.create_pax_header(info, encoding) - else: - raise ValueError("invalid format") - - def create_ustar_header(self, info, encoding, errors): - """Return the object as a ustar header block. - """ - info["magic"] = POSIX_MAGIC - - if len(info["linkname"].encode(encoding, errors)) > LENGTH_LINK: - raise ValueError("linkname is too long") - - if len(info["name"].encode(encoding, errors)) > LENGTH_NAME: - info["prefix"], info["name"] = self._posix_split_name(info["name"], encoding, errors) - - return self._create_header(info, USTAR_FORMAT, encoding, errors) - - def create_gnu_header(self, info, encoding, errors): - """Return the object as a GNU header block sequence. - """ - info["magic"] = GNU_MAGIC - - buf = b"" - if len(info["linkname"].encode(encoding, errors)) > LENGTH_LINK: - buf += self._create_gnu_long_header(info["linkname"], GNUTYPE_LONGLINK, encoding, errors) - - if len(info["name"].encode(encoding, errors)) > LENGTH_NAME: - buf += self._create_gnu_long_header(info["name"], GNUTYPE_LONGNAME, encoding, errors) - - return buf + self._create_header(info, GNU_FORMAT, encoding, errors) - - def create_pax_header(self, info, encoding): - """Return the object as a ustar header block. If it cannot be - represented this way, prepend a pax extended header sequence - with supplement information. - """ - info["magic"] = POSIX_MAGIC - pax_headers = self.pax_headers.copy() - - # Test string fields for values that exceed the field length or cannot - # be represented in ASCII encoding. - for name, hname, length in ( - ("name", "path", LENGTH_NAME), ("linkname", "linkpath", LENGTH_LINK), - ("uname", "uname", 32), ("gname", "gname", 32)): - - if hname in pax_headers: - # The pax header has priority. - continue - - # Try to encode the string as ASCII. - try: - info[name].encode("ascii", "strict") - except UnicodeEncodeError: - pax_headers[hname] = info[name] - continue - - if len(info[name]) > length: - pax_headers[hname] = info[name] - - # Test number fields for values that exceed the field limit or values - # that like to be stored as float. - for name, digits in (("uid", 8), ("gid", 8), ("size", 12), ("mtime", 12)): - needs_pax = False - - val = info[name] - val_is_float = isinstance(val, float) - val_int = round(val) if val_is_float else val - if not 0 <= val_int < 8 ** (digits - 1): - # Avoid overflow. - info[name] = 0 - needs_pax = True - elif val_is_float: - # Put rounded value in ustar header, and full - # precision value in pax header. - info[name] = val_int - needs_pax = True - - # The existing pax header has priority. - if needs_pax and name not in pax_headers: - pax_headers[name] = str(val) - - # Create a pax extended header if necessary. - if pax_headers: - buf = self._create_pax_generic_header(pax_headers, XHDTYPE, encoding) - else: - buf = b"" - - return buf + self._create_header(info, USTAR_FORMAT, "ascii", "replace") - - @classmethod - def create_pax_global_header(cls, pax_headers): - """Return the object as a pax global header block sequence. - """ - return cls._create_pax_generic_header(pax_headers, XGLTYPE, "utf-8") - - def _posix_split_name(self, name, encoding, errors): - """Split a name longer than 100 chars into a prefix - and a name part. - """ - components = name.split("/") - for i in range(1, len(components)): - prefix = "/".join(components[:i]) - name = "/".join(components[i:]) - if len(prefix.encode(encoding, errors)) <= LENGTH_PREFIX and \ - len(name.encode(encoding, errors)) <= LENGTH_NAME: - break - else: - raise ValueError("name is too long") - - return prefix, name - - @staticmethod - def _create_header(info, format, encoding, errors): - """Return a header block. info is a dictionary with file - information, format must be one of the *_FORMAT constants. - """ - has_device_fields = info.get("type") in (CHRTYPE, BLKTYPE) - if has_device_fields: - devmajor = itn(info.get("devmajor", 0), 8, format) - devminor = itn(info.get("devminor", 0), 8, format) - else: - devmajor = stn("", 8, encoding, errors) - devminor = stn("", 8, encoding, errors) - - # None values in metadata should cause ValueError. - # itn()/stn() do this for all fields except type. - filetype = info.get("type", REGTYPE) - if filetype is None: - raise ValueError("TarInfo.type must not be None") - - parts = [ - stn(info.get("name", ""), 100, encoding, errors), - itn(info.get("mode", 0) & 0o7777, 8, format), - itn(info.get("uid", 0), 8, format), - itn(info.get("gid", 0), 8, format), - itn(info.get("size", 0), 12, format), - itn(info.get("mtime", 0), 12, format), - b" ", # checksum field - filetype, - stn(info.get("linkname", ""), 100, encoding, errors), - info.get("magic", POSIX_MAGIC), - stn(info.get("uname", ""), 32, encoding, errors), - stn(info.get("gname", ""), 32, encoding, errors), - devmajor, - devminor, - stn(info.get("prefix", ""), 155, encoding, errors) - ] - - buf = struct.pack("%ds" % BLOCKSIZE, b"".join(parts)) - chksum = calc_chksums(buf[-BLOCKSIZE:])[0] - buf = buf[:-364] + bytes("%06o\0" % chksum, "ascii") + buf[-357:] - return buf - - @staticmethod - def _create_payload(payload): - """Return the string payload filled with zero bytes - up to the next 512 byte border. - """ - blocks, remainder = divmod(len(payload), BLOCKSIZE) - if remainder > 0: - payload += (BLOCKSIZE - remainder) * NUL - return payload - - @classmethod - def _create_gnu_long_header(cls, name, type, encoding, errors): - """Return a GNUTYPE_LONGNAME or GNUTYPE_LONGLINK sequence - for name. - """ - name = name.encode(encoding, errors) + NUL - - info = {} - info["name"] = "././@LongLink" - info["type"] = type - info["size"] = len(name) - info["magic"] = GNU_MAGIC - - # create extended header + name blocks. - return cls._create_header(info, USTAR_FORMAT, encoding, errors) + \ - cls._create_payload(name) - - @classmethod - def _create_pax_generic_header(cls, pax_headers, type, encoding): - """Return a POSIX.1-2008 extended or global header sequence - that contains a list of keyword, value pairs. The values - must be strings. - """ - # Check if one of the fields contains surrogate characters and thereby - # forces hdrcharset=BINARY, see _proc_pax() for more information. - binary = False - for keyword, value in pax_headers.items(): - try: - value.encode("utf-8", "strict") - except UnicodeEncodeError: - binary = True - break - - records = b"" - if binary: - # Put the hdrcharset field at the beginning of the header. - records += b"21 hdrcharset=BINARY\n" - - for keyword, value in pax_headers.items(): - keyword = keyword.encode("utf-8") - if binary: - # Try to restore the original byte representation of 'value'. - # Needless to say, that the encoding must match the string. - value = value.encode(encoding, "surrogateescape") - else: - value = value.encode("utf-8") - - l = len(keyword) + len(value) + 3 # ' ' + '=' + '\n' - n = p = 0 - while True: - n = l + len(str(p)) - if n == p: - break - p = n - records += bytes(str(p), "ascii") + b" " + keyword + b"=" + value + b"\n" - - # We use a hardcoded "././@PaxHeader" name like star does - # instead of the one that POSIX recommends. - info = {} - info["name"] = "././@PaxHeader" - info["type"] = type - info["size"] = len(records) - info["magic"] = POSIX_MAGIC - - # Create pax header + record blocks. - return cls._create_header(info, USTAR_FORMAT, "ascii", "replace") + \ - cls._create_payload(records) - - @classmethod - def frombuf(cls, buf, encoding, errors): - """Construct a TarInfo object from a 512 byte bytes object. - """ - if len(buf) == 0: - raise EmptyHeaderError("empty header") - if len(buf) != BLOCKSIZE: - raise TruncatedHeaderError("truncated header") - if buf.count(NUL) == BLOCKSIZE: - raise EOFHeaderError("end of file header") - - chksum = nti(buf[148:156]) - if chksum not in calc_chksums(buf): - raise InvalidHeaderError("bad checksum") - - obj = cls() - obj.name = nts(buf[0:100], encoding, errors) - obj.mode = nti(buf[100:108]) - obj.uid = nti(buf[108:116]) - obj.gid = nti(buf[116:124]) - obj.size = nti(buf[124:136]) - obj.mtime = nti(buf[136:148]) - obj.chksum = chksum - obj.type = buf[156:157] - obj.linkname = nts(buf[157:257], encoding, errors) - obj.uname = nts(buf[265:297], encoding, errors) - obj.gname = nts(buf[297:329], encoding, errors) - obj.devmajor = nti(buf[329:337]) - obj.devminor = nti(buf[337:345]) - prefix = nts(buf[345:500], encoding, errors) - - # Old V7 tar format represents a directory as a regular - # file with a trailing slash. - if obj.type == AREGTYPE and obj.name.endswith("/"): - obj.type = DIRTYPE - - # The old GNU sparse format occupies some of the unused - # space in the buffer for up to 4 sparse structures. - # Save them for later processing in _proc_sparse(). - if obj.type == GNUTYPE_SPARSE: - pos = 386 - structs = [] - for i in range(4): - try: - offset = nti(buf[pos:pos + 12]) - numbytes = nti(buf[pos + 12:pos + 24]) - except ValueError: - break - structs.append((offset, numbytes)) - pos += 24 - isextended = bool(buf[482]) - origsize = nti(buf[483:495]) - obj._sparse_structs = (structs, isextended, origsize) - - # Remove redundant slashes from directories. - if obj.isdir(): - obj.name = obj.name.rstrip("/") - - # Reconstruct a ustar longname. - if prefix and obj.type not in GNU_TYPES: - obj.name = prefix + "/" + obj.name - return obj - - @classmethod - def fromtarfile(cls, tarfile): - """Return the next TarInfo object from TarFile object - tarfile. - """ - buf = tarfile.fileobj.read(BLOCKSIZE) - obj = cls.frombuf(buf, tarfile.encoding, tarfile.errors) - obj.offset = tarfile.fileobj.tell() - BLOCKSIZE - return obj._proc_member(tarfile) - - #-------------------------------------------------------------------------- - # The following are methods that are called depending on the type of a - # member. The entry point is _proc_member() which can be overridden in a - # subclass to add custom _proc_*() methods. A _proc_*() method MUST - # implement the following - # operations: - # 1. Set self.offset_data to the position where the data blocks begin, - # if there is data that follows. - # 2. Set tarfile.offset to the position where the next member's header will - # begin. - # 3. Return self or another valid TarInfo object. - def _proc_member(self, tarfile): - """Choose the right processing method depending on - the type and call it. - """ - if self.type in (GNUTYPE_LONGNAME, GNUTYPE_LONGLINK): - return self._proc_gnulong(tarfile) - elif self.type == GNUTYPE_SPARSE: - return self._proc_sparse(tarfile) - elif self.type in (XHDTYPE, XGLTYPE, SOLARIS_XHDTYPE): - return self._proc_pax(tarfile) - else: - return self._proc_builtin(tarfile) - - def _proc_builtin(self, tarfile): - """Process a builtin type or an unknown type which - will be treated as a regular file. - """ - self.offset_data = tarfile.fileobj.tell() - offset = self.offset_data - if self.isreg() or self.type not in SUPPORTED_TYPES: - # Skip the following data blocks. - offset += self._block(self.size) - tarfile.offset = offset - - # Patch the TarInfo object with saved global - # header information. - self._apply_pax_info(tarfile.pax_headers, tarfile.encoding, tarfile.errors) - - # Remove redundant slashes from directories. This is to be consistent - # with frombuf(). - if self.isdir(): - self.name = self.name.rstrip("/") - - return self - - def _proc_gnulong(self, tarfile): - """Process the blocks that hold a GNU longname - or longlink member. - """ - buf = tarfile.fileobj.read(self._block(self.size)) - - # Fetch the next header and process it. - try: - next = self.fromtarfile(tarfile) - except HeaderError as e: - raise SubsequentHeaderError(str(e)) from None - - # Patch the TarInfo object from the next header with - # the longname information. - next.offset = self.offset - if self.type == GNUTYPE_LONGNAME: - next.name = nts(buf, tarfile.encoding, tarfile.errors) - elif self.type == GNUTYPE_LONGLINK: - next.linkname = nts(buf, tarfile.encoding, tarfile.errors) - - # Remove redundant slashes from directories. This is to be consistent - # with frombuf(). - if next.isdir(): - next.name = removesuffix(next.name, "/") - - return next - - def _proc_sparse(self, tarfile): - """Process a GNU sparse header plus extra headers. - """ - # We already collected some sparse structures in frombuf(). - structs, isextended, origsize = self._sparse_structs - del self._sparse_structs - - # Collect sparse structures from extended header blocks. - while isextended: - buf = tarfile.fileobj.read(BLOCKSIZE) - pos = 0 - for i in range(21): - try: - offset = nti(buf[pos:pos + 12]) - numbytes = nti(buf[pos + 12:pos + 24]) - except ValueError: - break - if offset and numbytes: - structs.append((offset, numbytes)) - pos += 24 - isextended = bool(buf[504]) - self.sparse = structs - - self.offset_data = tarfile.fileobj.tell() - tarfile.offset = self.offset_data + self._block(self.size) - self.size = origsize - return self - - def _proc_pax(self, tarfile): - """Process an extended or global header as described in - POSIX.1-2008. - """ - # Read the header information. - buf = tarfile.fileobj.read(self._block(self.size)) - - # A pax header stores supplemental information for either - # the following file (extended) or all following files - # (global). - if self.type == XGLTYPE: - pax_headers = tarfile.pax_headers - else: - pax_headers = tarfile.pax_headers.copy() - - # Check if the pax header contains a hdrcharset field. This tells us - # the encoding of the path, linkpath, uname and gname fields. Normally, - # these fields are UTF-8 encoded but since POSIX.1-2008 tar - # implementations are allowed to store them as raw binary strings if - # the translation to UTF-8 fails. - match = re.search(br"\d+ hdrcharset=([^\n]+)\n", buf) - if match is not None: - pax_headers["hdrcharset"] = match.group(1).decode("utf-8") - - # For the time being, we don't care about anything other than "BINARY". - # The only other value that is currently allowed by the standard is - # "ISO-IR 10646 2000 UTF-8" in other words UTF-8. - hdrcharset = pax_headers.get("hdrcharset") - if hdrcharset == "BINARY": - encoding = tarfile.encoding - else: - encoding = "utf-8" - - # Parse pax header information. A record looks like that: - # "%d %s=%s\n" % (length, keyword, value). length is the size - # of the complete record including the length field itself and - # the newline. keyword and value are both UTF-8 encoded strings. - regex = re.compile(br"(\d+) ([^=]+)=") - pos = 0 - while match := regex.match(buf, pos): - length, keyword = match.groups() - length = int(length) - if length == 0: - raise InvalidHeaderError("invalid header") - value = buf[match.end(2) + 1:match.start(1) + length - 1] - - # Normally, we could just use "utf-8" as the encoding and "strict" - # as the error handler, but we better not take the risk. For - # example, GNU tar <= 1.23 is known to store filenames it cannot - # translate to UTF-8 as raw strings (unfortunately without a - # hdrcharset=BINARY header). - # We first try the strict standard encoding, and if that fails we - # fall back on the user's encoding and error handler. - keyword = self._decode_pax_field(keyword, "utf-8", "utf-8", - tarfile.errors) - if keyword in PAX_NAME_FIELDS: - value = self._decode_pax_field(value, encoding, tarfile.encoding, - tarfile.errors) - else: - value = self._decode_pax_field(value, "utf-8", "utf-8", - tarfile.errors) - - pax_headers[keyword] = value - pos += length - - # Fetch the next header. - try: - next = self.fromtarfile(tarfile) - except HeaderError as e: - raise SubsequentHeaderError(str(e)) from None - - # Process GNU sparse information. - if "GNU.sparse.map" in pax_headers: - # GNU extended sparse format version 0.1. - self._proc_gnusparse_01(next, pax_headers) - - elif "GNU.sparse.size" in pax_headers: - # GNU extended sparse format version 0.0. - self._proc_gnusparse_00(next, pax_headers, buf) - - elif pax_headers.get("GNU.sparse.major") == "1" and pax_headers.get("GNU.sparse.minor") == "0": - # GNU extended sparse format version 1.0. - self._proc_gnusparse_10(next, pax_headers, tarfile) - - if self.type in (XHDTYPE, SOLARIS_XHDTYPE): - # Patch the TarInfo object with the extended header info. - next._apply_pax_info(pax_headers, tarfile.encoding, tarfile.errors) - next.offset = self.offset - - if "size" in pax_headers: - # If the extended header replaces the size field, - # we need to recalculate the offset where the next - # header starts. - offset = next.offset_data - if next.isreg() or next.type not in SUPPORTED_TYPES: - offset += next._block(next.size) - tarfile.offset = offset - - return next - - def _proc_gnusparse_00(self, next, pax_headers, buf): - """Process a GNU tar extended sparse header, version 0.0. - """ - offsets = [] - for match in re.finditer(br"\d+ GNU.sparse.offset=(\d+)\n", buf): - offsets.append(int(match.group(1))) - numbytes = [] - for match in re.finditer(br"\d+ GNU.sparse.numbytes=(\d+)\n", buf): - numbytes.append(int(match.group(1))) - next.sparse = list(zip(offsets, numbytes)) - - def _proc_gnusparse_01(self, next, pax_headers): - """Process a GNU tar extended sparse header, version 0.1. - """ - sparse = [int(x) for x in pax_headers["GNU.sparse.map"].split(",")] - next.sparse = list(zip(sparse[::2], sparse[1::2])) - - def _proc_gnusparse_10(self, next, pax_headers, tarfile): - """Process a GNU tar extended sparse header, version 1.0. - """ - fields = None - sparse = [] - buf = tarfile.fileobj.read(BLOCKSIZE) - fields, buf = buf.split(b"\n", 1) - fields = int(fields) - while len(sparse) < fields * 2: - if b"\n" not in buf: - buf += tarfile.fileobj.read(BLOCKSIZE) - number, buf = buf.split(b"\n", 1) - sparse.append(int(number)) - next.offset_data = tarfile.fileobj.tell() - next.sparse = list(zip(sparse[::2], sparse[1::2])) - - def _apply_pax_info(self, pax_headers, encoding, errors): - """Replace fields with supplemental information from a previous - pax extended or global header. - """ - for keyword, value in pax_headers.items(): - if keyword == "GNU.sparse.name": - setattr(self, "path", value) - elif keyword == "GNU.sparse.size": - setattr(self, "size", int(value)) - elif keyword == "GNU.sparse.realsize": - setattr(self, "size", int(value)) - elif keyword in PAX_FIELDS: - if keyword in PAX_NUMBER_FIELDS: - try: - value = PAX_NUMBER_FIELDS[keyword](value) - except ValueError: - value = 0 - if keyword == "path": - value = value.rstrip("/") - setattr(self, keyword, value) - - self.pax_headers = pax_headers.copy() - - def _decode_pax_field(self, value, encoding, fallback_encoding, fallback_errors): - """Decode a single field from a pax record. - """ - try: - return value.decode(encoding, "strict") - except UnicodeDecodeError: - return value.decode(fallback_encoding, fallback_errors) - - def _block(self, count): - """Round up a byte count by BLOCKSIZE and return it, - e.g. _block(834) => 1024. - """ - blocks, remainder = divmod(count, BLOCKSIZE) - if remainder: - blocks += 1 - return blocks * BLOCKSIZE - - def isreg(self): - 'Return True if the Tarinfo object is a regular file.' - return self.type in REGULAR_TYPES - - def isfile(self): - 'Return True if the Tarinfo object is a regular file.' - return self.isreg() - - def isdir(self): - 'Return True if it is a directory.' - return self.type == DIRTYPE - - def issym(self): - 'Return True if it is a symbolic link.' - return self.type == SYMTYPE - - def islnk(self): - 'Return True if it is a hard link.' - return self.type == LNKTYPE - - def ischr(self): - 'Return True if it is a character device.' - return self.type == CHRTYPE - - def isblk(self): - 'Return True if it is a block device.' - return self.type == BLKTYPE - - def isfifo(self): - 'Return True if it is a FIFO.' - return self.type == FIFOTYPE - - def issparse(self): - return self.sparse is not None - - def isdev(self): - 'Return True if it is one of character device, block device or FIFO.' - return self.type in (CHRTYPE, BLKTYPE, FIFOTYPE) -# class TarInfo - -class TarFile(object): - """The TarFile Class provides an interface to tar archives. - """ - - debug = 0 # May be set from 0 (no msgs) to 3 (all msgs) - - dereference = False # If true, add content of linked file to the - # tar file, else the link. - - ignore_zeros = False # If true, skips empty or invalid blocks and - # continues processing. - - errorlevel = 1 # If 0, fatal errors only appear in debug - # messages (if debug >= 0). If > 0, errors - # are passed to the caller as exceptions. - - format = DEFAULT_FORMAT # The format to use when creating an archive. - - encoding = ENCODING # Encoding for 8-bit character strings. - - errors = None # Error handler for unicode conversion. - - tarinfo = TarInfo # The default TarInfo class to use. - - fileobject = ExFileObject # The file-object for extractfile(). - - extraction_filter = None # The default filter for extraction. - - def __init__(self, name=None, mode="r", fileobj=None, format=None, - tarinfo=None, dereference=None, ignore_zeros=None, encoding=None, - errors="surrogateescape", pax_headers=None, debug=None, - errorlevel=None, copybufsize=None, stream=False): - """Open an (uncompressed) tar archive 'name'. 'mode' is either 'r' to - read from an existing archive, 'a' to append data to an existing - file or 'w' to create a new file overwriting an existing one. 'mode' - defaults to 'r'. - If 'fileobj' is given, it is used for reading or writing data. If it - can be determined, 'mode' is overridden by 'fileobj's mode. - 'fileobj' is not closed, when TarFile is closed. - """ - modes = {"r": "rb", "a": "r+b", "w": "wb", "x": "xb"} - if mode not in modes: - raise ValueError("mode must be 'r', 'a', 'w' or 'x'") - self.mode = mode - self._mode = modes[mode] - - if not fileobj: - if self.mode == "a" and not os.path.exists(name): - # Create nonexistent files in append mode. - self.mode = "w" - self._mode = "wb" - fileobj = bltn_open(name, self._mode) - self._extfileobj = False - else: - if (name is None and hasattr(fileobj, "name") and - isinstance(fileobj.name, (str, bytes))): - name = fileobj.name - if hasattr(fileobj, "mode"): - self._mode = fileobj.mode - self._extfileobj = True - self.name = os.path.abspath(name) if name else None - self.fileobj = fileobj - - self.stream = stream - - # Init attributes. - if format is not None: - self.format = format - if tarinfo is not None: - self.tarinfo = tarinfo - if dereference is not None: - self.dereference = dereference - if ignore_zeros is not None: - self.ignore_zeros = ignore_zeros - if encoding is not None: - self.encoding = encoding - self.errors = errors - - if pax_headers is not None and self.format == PAX_FORMAT: - self.pax_headers = pax_headers - else: - self.pax_headers = {} - - if debug is not None: - self.debug = debug - if errorlevel is not None: - self.errorlevel = errorlevel - - # Init datastructures. - self.copybufsize = copybufsize - self.closed = False - self.members = [] # list of members as TarInfo objects - self._loaded = False # flag if all members have been read - self.offset = self.fileobj.tell() - # current position in the archive file - self.inodes = {} # dictionary caching the inodes of - # archive members already added - - try: - if self.mode == "r": - self.firstmember = None - self.firstmember = self.next() - - if self.mode == "a": - # Move to the end of the archive, - # before the first empty block. - while True: - self.fileobj.seek(self.offset) - try: - tarinfo = self.tarinfo.fromtarfile(self) - self.members.append(tarinfo) - except EOFHeaderError: - self.fileobj.seek(self.offset) - break - except HeaderError as e: - raise ReadError(str(e)) from None - - if self.mode in ("a", "w", "x"): - self._loaded = True - - if self.pax_headers: - buf = self.tarinfo.create_pax_global_header(self.pax_headers.copy()) - self.fileobj.write(buf) - self.offset += len(buf) - except: - if not self._extfileobj: - self.fileobj.close() - self.closed = True - raise - - #-------------------------------------------------------------------------- - # Below are the classmethods which act as alternate constructors to the - # TarFile class. The open() method is the only one that is needed for - # public use; it is the "super"-constructor and is able to select an - # adequate "sub"-constructor for a particular compression using the mapping - # from OPEN_METH. - # - # This concept allows one to subclass TarFile without losing the comfort of - # the super-constructor. A sub-constructor is registered and made available - # by adding it to the mapping in OPEN_METH. - - @classmethod - def open(cls, name=None, mode="r", fileobj=None, bufsize=RECORDSIZE, **kwargs): - r"""Open a tar archive for reading, writing or appending. Return - an appropriate TarFile class. - - mode: - 'r' or 'r:\*' open for reading with transparent compression - 'r:' open for reading exclusively uncompressed - 'r:gz' open for reading with gzip compression - 'r:bz2' open for reading with bzip2 compression - 'r:xz' open for reading with lzma compression - 'a' or 'a:' open for appending, creating the file if necessary - 'w' or 'w:' open for writing without compression - 'w:gz' open for writing with gzip compression - 'w:bz2' open for writing with bzip2 compression - 'w:xz' open for writing with lzma compression - - 'x' or 'x:' create a tarfile exclusively without compression, raise - an exception if the file is already created - 'x:gz' create a gzip compressed tarfile, raise an exception - if the file is already created - 'x:bz2' create a bzip2 compressed tarfile, raise an exception - if the file is already created - 'x:xz' create an lzma compressed tarfile, raise an exception - if the file is already created - - 'r|\*' open a stream of tar blocks with transparent compression - 'r|' open an uncompressed stream of tar blocks for reading - 'r|gz' open a gzip compressed stream of tar blocks - 'r|bz2' open a bzip2 compressed stream of tar blocks - 'r|xz' open an lzma compressed stream of tar blocks - 'w|' open an uncompressed stream for writing - 'w|gz' open a gzip compressed stream for writing - 'w|bz2' open a bzip2 compressed stream for writing - 'w|xz' open an lzma compressed stream for writing - """ - - if not name and not fileobj: - raise ValueError("nothing to open") - - if mode in ("r", "r:*"): - # Find out which *open() is appropriate for opening the file. - def not_compressed(comptype): - return cls.OPEN_METH[comptype] == 'taropen' - error_msgs = [] - for comptype in sorted(cls.OPEN_METH, key=not_compressed): - func = getattr(cls, cls.OPEN_METH[comptype]) - if fileobj is not None: - saved_pos = fileobj.tell() - try: - return func(name, "r", fileobj, **kwargs) - except (ReadError, CompressionError) as e: - error_msgs.append(f'- method {comptype}: {e!r}') - if fileobj is not None: - fileobj.seek(saved_pos) - continue - error_msgs_summary = '\n'.join(error_msgs) - raise ReadError(f"file could not be opened successfully:\n{error_msgs_summary}") - - elif ":" in mode: - filemode, comptype = mode.split(":", 1) - filemode = filemode or "r" - comptype = comptype or "tar" - - # Select the *open() function according to - # given compression. - if comptype in cls.OPEN_METH: - func = getattr(cls, cls.OPEN_METH[comptype]) - else: - raise CompressionError("unknown compression type %r" % comptype) - return func(name, filemode, fileobj, **kwargs) - - elif "|" in mode: - filemode, comptype = mode.split("|", 1) - filemode = filemode or "r" - comptype = comptype or "tar" - - if filemode not in ("r", "w"): - raise ValueError("mode must be 'r' or 'w'") - - compresslevel = kwargs.pop("compresslevel", 9) - stream = _Stream(name, filemode, comptype, fileobj, bufsize, - compresslevel) - try: - t = cls(name, filemode, stream, **kwargs) - except: - stream.close() - raise - t._extfileobj = False - return t - - elif mode in ("a", "w", "x"): - return cls.taropen(name, mode, fileobj, **kwargs) - - raise ValueError("undiscernible mode") - - @classmethod - def taropen(cls, name, mode="r", fileobj=None, **kwargs): - """Open uncompressed tar archive name for reading or writing. - """ - if mode not in ("r", "a", "w", "x"): - raise ValueError("mode must be 'r', 'a', 'w' or 'x'") - return cls(name, mode, fileobj, **kwargs) - - @classmethod - def gzopen(cls, name, mode="r", fileobj=None, compresslevel=9, **kwargs): - """Open gzip compressed tar archive name for reading or writing. - Appending is not allowed. - """ - if mode not in ("r", "w", "x"): - raise ValueError("mode must be 'r', 'w' or 'x'") - - try: - from gzip import GzipFile - except ImportError: - raise CompressionError("gzip module is not available") from None - - try: - fileobj = GzipFile(name, mode + "b", compresslevel, fileobj) - except OSError as e: - if fileobj is not None and mode == 'r': - raise ReadError("not a gzip file") from e - raise - - try: - t = cls.taropen(name, mode, fileobj, **kwargs) - except OSError as e: - fileobj.close() - if mode == 'r': - raise ReadError("not a gzip file") from e - raise - except: - fileobj.close() - raise - t._extfileobj = False - return t - - @classmethod - def bz2open(cls, name, mode="r", fileobj=None, compresslevel=9, **kwargs): - """Open bzip2 compressed tar archive name for reading or writing. - Appending is not allowed. - """ - if mode not in ("r", "w", "x"): - raise ValueError("mode must be 'r', 'w' or 'x'") - - try: - from bz2 import BZ2File - except ImportError: - raise CompressionError("bz2 module is not available") from None - - fileobj = BZ2File(fileobj or name, mode, compresslevel=compresslevel) - - try: - t = cls.taropen(name, mode, fileobj, **kwargs) - except (OSError, EOFError) as e: - fileobj.close() - if mode == 'r': - raise ReadError("not a bzip2 file") from e - raise - except: - fileobj.close() - raise - t._extfileobj = False - return t - - @classmethod - def xzopen(cls, name, mode="r", fileobj=None, preset=None, **kwargs): - """Open lzma compressed tar archive name for reading or writing. - Appending is not allowed. - """ - if mode not in ("r", "w", "x"): - raise ValueError("mode must be 'r', 'w' or 'x'") - - try: - from lzma import LZMAFile, LZMAError - except ImportError: - raise CompressionError("lzma module is not available") from None - - fileobj = LZMAFile(fileobj or name, mode, preset=preset) - - try: - t = cls.taropen(name, mode, fileobj, **kwargs) - except (LZMAError, EOFError) as e: - fileobj.close() - if mode == 'r': - raise ReadError("not an lzma file") from e - raise - except: - fileobj.close() - raise - t._extfileobj = False - return t - - # All *open() methods are registered here. - OPEN_METH = { - "tar": "taropen", # uncompressed tar - "gz": "gzopen", # gzip compressed tar - "bz2": "bz2open", # bzip2 compressed tar - "xz": "xzopen" # lzma compressed tar - } - - #-------------------------------------------------------------------------- - # The public methods which TarFile provides: - - def close(self): - """Close the TarFile. In write-mode, two finishing zero blocks are - appended to the archive. - """ - if self.closed: - return - - self.closed = True - try: - if self.mode in ("a", "w", "x"): - self.fileobj.write(NUL * (BLOCKSIZE * 2)) - self.offset += (BLOCKSIZE * 2) - # fill up the end with zero-blocks - # (like option -b20 for tar does) - blocks, remainder = divmod(self.offset, RECORDSIZE) - if remainder > 0: - self.fileobj.write(NUL * (RECORDSIZE - remainder)) - finally: - if not self._extfileobj: - self.fileobj.close() - - def getmember(self, name): - """Return a TarInfo object for member 'name'. If 'name' can not be - found in the archive, KeyError is raised. If a member occurs more - than once in the archive, its last occurrence is assumed to be the - most up-to-date version. - """ - tarinfo = self._getmember(name.rstrip('/')) - if tarinfo is None: - raise KeyError("filename %r not found" % name) - return tarinfo - - def getmembers(self): - """Return the members of the archive as a list of TarInfo objects. The - list has the same order as the members in the archive. - """ - self._check() - if not self._loaded: # if we want to obtain a list of - self._load() # all members, we first have to - # scan the whole archive. - return self.members - - def getnames(self): - """Return the members of the archive as a list of their names. It has - the same order as the list returned by getmembers(). - """ - return [tarinfo.name for tarinfo in self.getmembers()] - - def gettarinfo(self, name=None, arcname=None, fileobj=None): - """Create a TarInfo object from the result of os.stat or equivalent - on an existing file. The file is either named by 'name', or - specified as a file object 'fileobj' with a file descriptor. If - given, 'arcname' specifies an alternative name for the file in the - archive, otherwise, the name is taken from the 'name' attribute of - 'fileobj', or the 'name' argument. The name should be a text - string. - """ - self._check("awx") - - # When fileobj is given, replace name by - # fileobj's real name. - if fileobj is not None: - name = fileobj.name - - # Building the name of the member in the archive. - # Backward slashes are converted to forward slashes, - # Absolute paths are turned to relative paths. - if arcname is None: - arcname = name - drv, arcname = os.path.splitdrive(arcname) - arcname = arcname.replace(os.sep, "/") - arcname = arcname.lstrip("/") - - # Now, fill the TarInfo object with - # information specific for the file. - tarinfo = self.tarinfo() - tarinfo._tarfile = self # To be removed in 3.16. - - # Use os.stat or os.lstat, depending on if symlinks shall be resolved. - if fileobj is None: - if not self.dereference: - statres = os.lstat(name) - else: - statres = os.stat(name) - else: - statres = os.fstat(fileobj.fileno()) - linkname = "" - - stmd = statres.st_mode - if stat.S_ISREG(stmd): - inode = (statres.st_ino, statres.st_dev) - if not self.dereference and statres.st_nlink > 1 and \ - inode in self.inodes and arcname != self.inodes[inode]: - # Is it a hardlink to an already - # archived file? - type = LNKTYPE - linkname = self.inodes[inode] - else: - # The inode is added only if its valid. - # For win32 it is always 0. - type = REGTYPE - if inode[0]: - self.inodes[inode] = arcname - elif stat.S_ISDIR(stmd): - type = DIRTYPE - elif stat.S_ISFIFO(stmd): - type = FIFOTYPE - elif stat.S_ISLNK(stmd): - type = SYMTYPE - linkname = os.readlink(name) - elif stat.S_ISCHR(stmd): - type = CHRTYPE - elif stat.S_ISBLK(stmd): - type = BLKTYPE - else: - return None - - # Fill the TarInfo object with all - # information we can get. - tarinfo.name = arcname - tarinfo.mode = stmd - tarinfo.uid = statres.st_uid - tarinfo.gid = statres.st_gid - if type == REGTYPE: - tarinfo.size = statres.st_size - else: - tarinfo.size = 0 - tarinfo.mtime = statres.st_mtime - tarinfo.type = type - tarinfo.linkname = linkname - if pwd: - try: - tarinfo.uname = pwd.getpwuid(tarinfo.uid)[0] - except KeyError: - pass - if grp: - try: - tarinfo.gname = grp.getgrgid(tarinfo.gid)[0] - except KeyError: - pass - - if type in (CHRTYPE, BLKTYPE): - if hasattr(os, "major") and hasattr(os, "minor"): - tarinfo.devmajor = os.major(statres.st_rdev) - tarinfo.devminor = os.minor(statres.st_rdev) - return tarinfo - - def list(self, verbose=True, *, members=None): - """Print a table of contents to sys.stdout. If 'verbose' is False, only - the names of the members are printed. If it is True, an 'ls -l'-like - output is produced. 'members' is optional and must be a subset of the - list returned by getmembers(). - """ - # Convert tarinfo type to stat type. - type2mode = {REGTYPE: stat.S_IFREG, SYMTYPE: stat.S_IFLNK, - FIFOTYPE: stat.S_IFIFO, CHRTYPE: stat.S_IFCHR, - DIRTYPE: stat.S_IFDIR, BLKTYPE: stat.S_IFBLK} - self._check() - - if members is None: - members = self - for tarinfo in members: - if verbose: - if tarinfo.mode is None: - _safe_print("??????????") - else: - modetype = type2mode.get(tarinfo.type, 0) - _safe_print(stat.filemode(modetype | tarinfo.mode)) - _safe_print("%s/%s" % (tarinfo.uname or tarinfo.uid, - tarinfo.gname or tarinfo.gid)) - if tarinfo.ischr() or tarinfo.isblk(): - _safe_print("%10s" % - ("%d,%d" % (tarinfo.devmajor, tarinfo.devminor))) - else: - _safe_print("%10d" % tarinfo.size) - if tarinfo.mtime is None: - _safe_print("????-??-?? ??:??:??") - else: - _safe_print("%d-%02d-%02d %02d:%02d:%02d" \ - % time.localtime(tarinfo.mtime)[:6]) - - _safe_print(tarinfo.name + ("/" if tarinfo.isdir() else "")) - - if verbose: - if tarinfo.issym(): - _safe_print("-> " + tarinfo.linkname) - if tarinfo.islnk(): - _safe_print("link to " + tarinfo.linkname) - print() - - def add(self, name, arcname=None, recursive=True, *, filter=None): - """Add the file 'name' to the archive. 'name' may be any type of file - (directory, fifo, symbolic link, etc.). If given, 'arcname' - specifies an alternative name for the file in the archive. - Directories are added recursively by default. This can be avoided by - setting 'recursive' to False. 'filter' is a function - that expects a TarInfo object argument and returns the changed - TarInfo object, if it returns None the TarInfo object will be - excluded from the archive. - """ - self._check("awx") - - if arcname is None: - arcname = name - - # Skip if somebody tries to archive the archive... - if self.name is not None and os.path.abspath(name) == self.name: - self._dbg(2, "tarfile: Skipped %r" % name) - return - - self._dbg(1, name) - - # Create a TarInfo object from the file. - tarinfo = self.gettarinfo(name, arcname) - - if tarinfo is None: - self._dbg(1, "tarfile: Unsupported type %r" % name) - return - - # Change or exclude the TarInfo object. - if filter is not None: - tarinfo = filter(tarinfo) - if tarinfo is None: - self._dbg(2, "tarfile: Excluded %r" % name) - return - - # Append the tar header and data to the archive. - if tarinfo.isreg(): - with bltn_open(name, "rb") as f: - self.addfile(tarinfo, f) - - elif tarinfo.isdir(): - self.addfile(tarinfo) - if recursive: - for f in sorted(os.listdir(name)): - self.add(os.path.join(name, f), os.path.join(arcname, f), - recursive, filter=filter) - - else: - self.addfile(tarinfo) - - def addfile(self, tarinfo, fileobj=None): - """Add the TarInfo object 'tarinfo' to the archive. If 'tarinfo' represents - a non zero-size regular file, the 'fileobj' argument should be a binary file, - and tarinfo.size bytes are read from it and added to the archive. - You can create TarInfo objects directly, or by using gettarinfo(). - """ - self._check("awx") - - if fileobj is None and tarinfo.isreg() and tarinfo.size != 0: - raise ValueError("fileobj not provided for non zero-size regular file") - - tarinfo = copy.copy(tarinfo) - - buf = tarinfo.tobuf(self.format, self.encoding, self.errors) - self.fileobj.write(buf) - self.offset += len(buf) - bufsize=self.copybufsize - # If there's data to follow, append it. - if fileobj is not None: - copyfileobj(fileobj, self.fileobj, tarinfo.size, bufsize=bufsize) - blocks, remainder = divmod(tarinfo.size, BLOCKSIZE) - if remainder > 0: - self.fileobj.write(NUL * (BLOCKSIZE - remainder)) - blocks += 1 - self.offset += blocks * BLOCKSIZE - - self.members.append(tarinfo) - - def _get_filter_function(self, filter): - if filter is None: - filter = self.extraction_filter - if filter is None: - import warnings - warnings.warn( - 'Python 3.14 will, by default, filter extracted tar ' - + 'archives and reject files or modify their metadata. ' - + 'Use the filter argument to control this behavior.', - DeprecationWarning, stacklevel=3) - return fully_trusted_filter - if isinstance(filter, str): - raise TypeError( - 'String names are not supported for ' - + 'TarFile.extraction_filter. Use a function such as ' - + 'tarfile.data_filter directly.') - return filter - if callable(filter): - return filter - try: - return _NAMED_FILTERS[filter] - except KeyError: - raise ValueError(f"filter {filter!r} not found") from None - - def extractall(self, path=".", members=None, *, numeric_owner=False, - filter=None): - """Extract all members from the archive to the current working - directory and set owner, modification time and permissions on - directories afterwards. 'path' specifies a different directory - to extract to. 'members' is optional and must be a subset of the - list returned by getmembers(). If 'numeric_owner' is True, only - the numbers for user/group names are used and not the names. - - The 'filter' function will be called on each member just - before extraction. - It can return a changed TarInfo or None to skip the member. - String names of common filters are accepted. - """ - directories = [] - - filter_function = self._get_filter_function(filter) - if members is None: - members = self - - for member in members: - tarinfo = self._get_extract_tarinfo(member, filter_function, path) - if tarinfo is None: - continue - if tarinfo.isdir(): - # For directories, delay setting attributes until later, - # since permissions can interfere with extraction and - # extracting contents can reset mtime. - directories.append(tarinfo) - self._extract_one(tarinfo, path, set_attrs=not tarinfo.isdir(), - numeric_owner=numeric_owner) - - # Reverse sort directories. - directories.sort(key=lambda a: a.name, reverse=True) - - # Set correct owner, mtime and filemode on directories. - for tarinfo in directories: - dirpath = os.path.join(path, tarinfo.name) - try: - self.chown(tarinfo, dirpath, numeric_owner=numeric_owner) - self.utime(tarinfo, dirpath) - self.chmod(tarinfo, dirpath) - except ExtractError as e: - self._handle_nonfatal_error(e) - - def extract(self, member, path="", set_attrs=True, *, numeric_owner=False, - filter=None): - """Extract a member from the archive to the current working directory, - using its full name. Its file information is extracted as accurately - as possible. 'member' may be a filename or a TarInfo object. You can - specify a different directory using 'path'. File attributes (owner, - mtime, mode) are set unless 'set_attrs' is False. If 'numeric_owner' - is True, only the numbers for user/group names are used and not - the names. - - The 'filter' function will be called before extraction. - It can return a changed TarInfo or None to skip the member. - String names of common filters are accepted. - """ - filter_function = self._get_filter_function(filter) - tarinfo = self._get_extract_tarinfo(member, filter_function, path) - if tarinfo is not None: - self._extract_one(tarinfo, path, set_attrs, numeric_owner) - - def _get_extract_tarinfo(self, member, filter_function, path): - """Get filtered TarInfo (or None) from member, which might be a str""" - if isinstance(member, str): - tarinfo = self.getmember(member) - else: - tarinfo = member - - unfiltered = tarinfo - try: - tarinfo = filter_function(tarinfo, path) - except (OSError, FilterError) as e: - self._handle_fatal_error(e) - except ExtractError as e: - self._handle_nonfatal_error(e) - if tarinfo is None: - self._dbg(2, "tarfile: Excluded %r" % unfiltered.name) - return None - # Prepare the link target for makelink(). - if tarinfo.islnk(): - tarinfo = copy.copy(tarinfo) - tarinfo._link_target = os.path.join(path, tarinfo.linkname) - return tarinfo - - def _extract_one(self, tarinfo, path, set_attrs, numeric_owner): - """Extract from filtered tarinfo to disk""" - self._check("r") - - try: - self._extract_member(tarinfo, os.path.join(path, tarinfo.name), - set_attrs=set_attrs, - numeric_owner=numeric_owner) - except OSError as e: - self._handle_fatal_error(e) - except ExtractError as e: - self._handle_nonfatal_error(e) - - def _handle_nonfatal_error(self, e): - """Handle non-fatal error (ExtractError) according to errorlevel""" - if self.errorlevel > 1: - raise - else: - self._dbg(1, "tarfile: %s" % e) - - def _handle_fatal_error(self, e): - """Handle "fatal" error according to self.errorlevel""" - if self.errorlevel > 0: - raise - elif isinstance(e, OSError): - if e.filename is None: - self._dbg(1, "tarfile: %s" % e.strerror) - else: - self._dbg(1, "tarfile: %s %r" % (e.strerror, e.filename)) - else: - self._dbg(1, "tarfile: %s %s" % (type(e).__name__, e)) - - def extractfile(self, member): - """Extract a member from the archive as a file object. 'member' may be - a filename or a TarInfo object. If 'member' is a regular file or - a link, an io.BufferedReader object is returned. For all other - existing members, None is returned. If 'member' does not appear - in the archive, KeyError is raised. - """ - self._check("r") - - if isinstance(member, str): - tarinfo = self.getmember(member) - else: - tarinfo = member - - if tarinfo.isreg() or tarinfo.type not in SUPPORTED_TYPES: - # Members with unknown types are treated as regular files. - return self.fileobject(self, tarinfo) - - elif tarinfo.islnk() or tarinfo.issym(): - if isinstance(self.fileobj, _Stream): - # A small but ugly workaround for the case that someone tries - # to extract a (sym)link as a file-object from a non-seekable - # stream of tar blocks. - raise StreamError("cannot extract (sym)link as file object") - else: - # A (sym)link's file object is its target's file object. - return self.extractfile(self._find_link_target(tarinfo)) - else: - # If there's no data associated with the member (directory, chrdev, - # blkdev, etc.), return None instead of a file object. - return None - - def _extract_member(self, tarinfo, targetpath, set_attrs=True, - numeric_owner=False): - """Extract the TarInfo object tarinfo to a physical - file called targetpath. - """ - # Fetch the TarInfo object for the given name - # and build the destination pathname, replacing - # forward slashes to platform specific separators. - targetpath = targetpath.rstrip("/") - targetpath = targetpath.replace("/", os.sep) - - # Create all upper directories. - upperdirs = os.path.dirname(targetpath) - if upperdirs and not os.path.exists(upperdirs): - # Create directories that are not part of the archive with - # default permissions. - os.makedirs(upperdirs, exist_ok=True) - - if tarinfo.islnk() or tarinfo.issym(): - self._dbg(1, "%s -> %s" % (tarinfo.name, tarinfo.linkname)) - else: - self._dbg(1, tarinfo.name) - - if tarinfo.isreg(): - self.makefile(tarinfo, targetpath) - elif tarinfo.isdir(): - self.makedir(tarinfo, targetpath) - elif tarinfo.isfifo(): - self.makefifo(tarinfo, targetpath) - elif tarinfo.ischr() or tarinfo.isblk(): - self.makedev(tarinfo, targetpath) - elif tarinfo.islnk() or tarinfo.issym(): - self.makelink(tarinfo, targetpath) - elif tarinfo.type not in SUPPORTED_TYPES: - self.makeunknown(tarinfo, targetpath) - else: - self.makefile(tarinfo, targetpath) - - if set_attrs: - self.chown(tarinfo, targetpath, numeric_owner) - if not tarinfo.issym(): - self.chmod(tarinfo, targetpath) - self.utime(tarinfo, targetpath) - - #-------------------------------------------------------------------------- - # Below are the different file methods. They are called via - # _extract_member() when extract() is called. They can be replaced in a - # subclass to implement other functionality. - - def makedir(self, tarinfo, targetpath): - """Make a directory called targetpath. - """ - try: - if tarinfo.mode is None: - # Use the system's default mode - os.mkdir(targetpath) - else: - # Use a safe mode for the directory, the real mode is set - # later in _extract_member(). - os.mkdir(targetpath, 0o700) - except FileExistsError: - if not os.path.isdir(targetpath): - raise - - def makefile(self, tarinfo, targetpath): - """Make a file called targetpath. - """ - source = self.fileobj - source.seek(tarinfo.offset_data) - bufsize = self.copybufsize - with bltn_open(targetpath, "wb") as target: - if tarinfo.sparse is not None: - for offset, size in tarinfo.sparse: - target.seek(offset) - copyfileobj(source, target, size, ReadError, bufsize) - target.seek(tarinfo.size) - target.truncate() - else: - copyfileobj(source, target, tarinfo.size, ReadError, bufsize) - - def makeunknown(self, tarinfo, targetpath): - """Make a file from a TarInfo object with an unknown type - at targetpath. - """ - self.makefile(tarinfo, targetpath) - self._dbg(1, "tarfile: Unknown file type %r, " \ - "extracted as regular file." % tarinfo.type) - - def makefifo(self, tarinfo, targetpath): - """Make a fifo called targetpath. - """ - if hasattr(os, "mkfifo"): - os.mkfifo(targetpath) - else: - raise ExtractError("fifo not supported by system") - - def makedev(self, tarinfo, targetpath): - """Make a character or block device called targetpath. - """ - if not hasattr(os, "mknod") or not hasattr(os, "makedev"): - raise ExtractError("special devices not supported by system") - - mode = tarinfo.mode - if mode is None: - # Use mknod's default - mode = 0o600 - if tarinfo.isblk(): - mode |= stat.S_IFBLK - else: - mode |= stat.S_IFCHR - - os.mknod(targetpath, mode, - os.makedev(tarinfo.devmajor, tarinfo.devminor)) - - def makelink(self, tarinfo, targetpath): - """Make a (symbolic) link called targetpath. If it cannot be created - (platform limitation), we try to make a copy of the referenced file - instead of a link. - """ - try: - # For systems that support symbolic and hard links. - if tarinfo.issym(): - if os.path.lexists(targetpath): - # Avoid FileExistsError on following os.symlink. - os.unlink(targetpath) - os.symlink(tarinfo.linkname, targetpath) - else: - if os.path.exists(tarinfo._link_target): - os.link(tarinfo._link_target, targetpath) - else: - self._extract_member(self._find_link_target(tarinfo), - targetpath) - except symlink_exception: - try: - self._extract_member(self._find_link_target(tarinfo), - targetpath) - except KeyError: - raise ExtractError("unable to resolve link inside archive") from None - - def chown(self, tarinfo, targetpath, numeric_owner): - """Set owner of targetpath according to tarinfo. If numeric_owner - is True, use .gid/.uid instead of .gname/.uname. If numeric_owner - is False, fall back to .gid/.uid when the search based on name - fails. - """ - if hasattr(os, "geteuid") and os.geteuid() == 0: - # We have to be root to do so. - g = tarinfo.gid - u = tarinfo.uid - if not numeric_owner: - try: - if grp and tarinfo.gname: - g = grp.getgrnam(tarinfo.gname)[2] - except KeyError: - pass - try: - if pwd and tarinfo.uname: - u = pwd.getpwnam(tarinfo.uname)[2] - except KeyError: - pass - if g is None: - g = -1 - if u is None: - u = -1 - try: - if tarinfo.issym() and hasattr(os, "lchown"): - os.lchown(targetpath, u, g) - else: - os.chown(targetpath, u, g) - except (OSError, OverflowError) as e: - # OverflowError can be raised if an ID doesn't fit in 'id_t' - raise ExtractError("could not change owner") from e - - def chmod(self, tarinfo, targetpath): - """Set file permissions of targetpath according to tarinfo. - """ - if tarinfo.mode is None: - return - try: - os.chmod(targetpath, tarinfo.mode) - except OSError as e: - raise ExtractError("could not change mode") from e - - def utime(self, tarinfo, targetpath): - """Set modification time of targetpath according to tarinfo. - """ - mtime = tarinfo.mtime - if mtime is None: - return - if not hasattr(os, 'utime'): - return - try: - os.utime(targetpath, (mtime, mtime)) - except OSError as e: - raise ExtractError("could not change modification time") from e - - #-------------------------------------------------------------------------- - def next(self): - """Return the next member of the archive as a TarInfo object, when - TarFile is opened for reading. Return None if there is no more - available. - """ - self._check("ra") - if self.firstmember is not None: - m = self.firstmember - self.firstmember = None - return m - - # Advance the file pointer. - if self.offset != self.fileobj.tell(): - if self.offset == 0: - return None - self.fileobj.seek(self.offset - 1) - if not self.fileobj.read(1): - raise ReadError("unexpected end of data") - - # Read the next block. - tarinfo = None - while True: - try: - tarinfo = self.tarinfo.fromtarfile(self) - except EOFHeaderError as e: - if self.ignore_zeros: - self._dbg(2, "0x%X: %s" % (self.offset, e)) - self.offset += BLOCKSIZE - continue - except InvalidHeaderError as e: - if self.ignore_zeros: - self._dbg(2, "0x%X: %s" % (self.offset, e)) - self.offset += BLOCKSIZE - continue - elif self.offset == 0: - raise ReadError(str(e)) from None - except EmptyHeaderError: - if self.offset == 0: - raise ReadError("empty file") from None - except TruncatedHeaderError as e: - if self.offset == 0: - raise ReadError(str(e)) from None - except SubsequentHeaderError as e: - raise ReadError(str(e)) from None - except Exception as e: - try: - import zlib - if isinstance(e, zlib.error): - raise ReadError(f'zlib error: {e}') from None - else: - raise e - except ImportError: - raise e - break - - if tarinfo is not None: - # if streaming the file we do not want to cache the tarinfo - if not self.stream: - self.members.append(tarinfo) - else: - self._loaded = True - - return tarinfo - - #-------------------------------------------------------------------------- - # Little helper methods: - - def _getmember(self, name, tarinfo=None, normalize=False): - """Find an archive member by name from bottom to top. - If tarinfo is given, it is used as the starting point. - """ - # Ensure that all members have been loaded. - members = self.getmembers() - - # Limit the member search list up to tarinfo. - skipping = False - if tarinfo is not None: - try: - index = members.index(tarinfo) - except ValueError: - # The given starting point might be a (modified) copy. - # We'll later skip members until we find an equivalent. - skipping = True - else: - # Happy fast path - members = members[:index] - - if normalize: - name = os.path.normpath(name) - - for member in reversed(members): - if skipping: - if tarinfo.offset == member.offset: - skipping = False - continue - if normalize: - member_name = os.path.normpath(member.name) - else: - member_name = member.name - - if name == member_name: - return member - - if skipping: - # Starting point was not found - raise ValueError(tarinfo) - - def _load(self): - """Read through the entire archive file and look for readable - members. This should not run if the file is set to stream. - """ - if not self.stream: - while self.next() is not None: - pass - self._loaded = True - - def _check(self, mode=None): - """Check if TarFile is still open, and if the operation's mode - corresponds to TarFile's mode. - """ - if self.closed: - raise OSError("%s is closed" % self.__class__.__name__) - if mode is not None and self.mode not in mode: - raise OSError("bad operation for mode %r" % self.mode) - - def _find_link_target(self, tarinfo): - """Find the target member of a symlink or hardlink member in the - archive. - """ - if tarinfo.issym(): - # Always search the entire archive. - linkname = "/".join(filter(None, (os.path.dirname(tarinfo.name), tarinfo.linkname))) - limit = None - else: - # Search the archive before the link, because a hard link is - # just a reference to an already archived file. - linkname = tarinfo.linkname - limit = tarinfo - - member = self._getmember(linkname, tarinfo=limit, normalize=True) - if member is None: - raise KeyError("linkname %r not found" % linkname) - return member - - def __iter__(self): - """Provide an iterator object. - """ - if self._loaded: - yield from self.members - return - - # Yield items using TarFile's next() method. - # When all members have been read, set TarFile as _loaded. - index = 0 - # Fix for SF #1100429: Under rare circumstances it can - # happen that getmembers() is called during iteration, - # which will have already exhausted the next() method. - if self.firstmember is not None: - tarinfo = self.next() - index += 1 - yield tarinfo - - while True: - if index < len(self.members): - tarinfo = self.members[index] - elif not self._loaded: - tarinfo = self.next() - if not tarinfo: - self._loaded = True - return - else: - return - index += 1 - yield tarinfo - - def _dbg(self, level, msg): - """Write debugging output to sys.stderr. - """ - if level <= self.debug: - print(msg, file=sys.stderr) - - def __enter__(self): - self._check() - return self - - def __exit__(self, type, value, traceback): - if type is None: - self.close() - else: - # An exception occurred. We must not call close() because - # it would try to write end-of-archive blocks and padding. - if not self._extfileobj: - self.fileobj.close() - self.closed = True - -#-------------------- -# exported functions -#-------------------- - -def is_tarfile(name): - """Return True if name points to a tar archive that we - are able to handle, else return False. - - 'name' should be a string, file, or file-like object. - """ - try: - if hasattr(name, "read"): - pos = name.tell() - t = open(fileobj=name) - name.seek(pos) - else: - t = open(name) - t.close() - return True - except TarError: - return False - -open = TarFile.open - - -def main(): - import argparse - - description = 'A simple command-line interface for tarfile module.' - parser = argparse.ArgumentParser(description=description) - parser.add_argument('-v', '--verbose', action='store_true', default=False, - help='Verbose output') - parser.add_argument('--filter', metavar='', - choices=_NAMED_FILTERS, - help='Filter for extraction') - - group = parser.add_mutually_exclusive_group(required=True) - group.add_argument('-l', '--list', metavar='', - help='Show listing of a tarfile') - group.add_argument('-e', '--extract', nargs='+', - metavar=('', ''), - help='Extract tarfile into target dir') - group.add_argument('-c', '--create', nargs='+', - metavar=('', ''), - help='Create tarfile from sources') - group.add_argument('-t', '--test', metavar='', - help='Test if a tarfile is valid') - - args = parser.parse_args() - - if args.filter and args.extract is None: - parser.exit(1, '--filter is only valid for extraction\n') - - if args.test is not None: - src = args.test - if is_tarfile(src): - with open(src, 'r') as tar: - tar.getmembers() - print(tar.getmembers(), file=sys.stderr) - if args.verbose: - print('{!r} is a tar archive.'.format(src)) - else: - parser.exit(1, '{!r} is not a tar archive.\n'.format(src)) - - elif args.list is not None: - src = args.list - if is_tarfile(src): - with TarFile.open(src, 'r:*') as tf: - tf.list(verbose=args.verbose) - else: - parser.exit(1, '{!r} is not a tar archive.\n'.format(src)) - - elif args.extract is not None: - if len(args.extract) == 1: - src = args.extract[0] - curdir = os.curdir - elif len(args.extract) == 2: - src, curdir = args.extract - else: - parser.exit(1, parser.format_help()) - - if is_tarfile(src): - with TarFile.open(src, 'r:*') as tf: - tf.extractall(path=curdir, filter=args.filter) - if args.verbose: - if curdir == '.': - msg = '{!r} file is extracted.'.format(src) - else: - msg = ('{!r} file is extracted ' - 'into {!r} directory.').format(src, curdir) - print(msg) - else: - parser.exit(1, '{!r} is not a tar archive.\n'.format(src)) - - elif args.create is not None: - tar_name = args.create.pop(0) - _, ext = os.path.splitext(tar_name) - compressions = { - # gz - '.gz': 'gz', - '.tgz': 'gz', - # xz - '.xz': 'xz', - '.txz': 'xz', - # bz2 - '.bz2': 'bz2', - '.tbz': 'bz2', - '.tbz2': 'bz2', - '.tb2': 'bz2', - } - tar_mode = 'w:' + compressions[ext] if ext in compressions else 'w' - tar_files = args.create - - with TarFile.open(tar_name, tar_mode) as tf: - for file_name in tar_files: - tf.add(file_name) - - if args.verbose: - print('{!r} file created.'.format(tar_name)) - -if __name__ == '__main__': - main() diff --git a/env/lib/python3.10/site-packages/backports/tarfile/__main__.py b/env/lib/python3.10/site-packages/backports/tarfile/__main__.py deleted file mode 100644 index daf5509..0000000 --- a/env/lib/python3.10/site-packages/backports/tarfile/__main__.py +++ /dev/null @@ -1,5 +0,0 @@ -from . import main - - -if __name__ == '__main__': - main() diff --git a/env/lib/python3.10/site-packages/backports/tarfile/__pycache__/__init__.cpython-310.pyc b/env/lib/python3.10/site-packages/backports/tarfile/__pycache__/__init__.cpython-310.pyc deleted file mode 100644 index 486a7cf..0000000 Binary files a/env/lib/python3.10/site-packages/backports/tarfile/__pycache__/__init__.cpython-310.pyc and /dev/null differ diff --git a/env/lib/python3.10/site-packages/backports/tarfile/__pycache__/__main__.cpython-310.pyc b/env/lib/python3.10/site-packages/backports/tarfile/__pycache__/__main__.cpython-310.pyc deleted file mode 100644 index 514e99d..0000000 Binary files a/env/lib/python3.10/site-packages/backports/tarfile/__pycache__/__main__.cpython-310.pyc and /dev/null differ diff --git a/env/lib/python3.10/site-packages/backports/tarfile/compat/__init__.py b/env/lib/python3.10/site-packages/backports/tarfile/compat/__init__.py deleted file mode 100644 index e69de29..0000000 diff --git a/env/lib/python3.10/site-packages/backports/tarfile/compat/__pycache__/__init__.cpython-310.pyc b/env/lib/python3.10/site-packages/backports/tarfile/compat/__pycache__/__init__.cpython-310.pyc deleted file mode 100644 index 9d35795..0000000 Binary files a/env/lib/python3.10/site-packages/backports/tarfile/compat/__pycache__/__init__.cpython-310.pyc and /dev/null differ diff --git a/env/lib/python3.10/site-packages/backports/tarfile/compat/__pycache__/py38.cpython-310.pyc b/env/lib/python3.10/site-packages/backports/tarfile/compat/__pycache__/py38.cpython-310.pyc deleted file mode 100644 index 7694906..0000000 Binary files a/env/lib/python3.10/site-packages/backports/tarfile/compat/__pycache__/py38.cpython-310.pyc and /dev/null differ diff --git a/env/lib/python3.10/site-packages/backports/tarfile/compat/py38.py b/env/lib/python3.10/site-packages/backports/tarfile/compat/py38.py deleted file mode 100644 index 20fbbfc..0000000 --- a/env/lib/python3.10/site-packages/backports/tarfile/compat/py38.py +++ /dev/null @@ -1,24 +0,0 @@ -import sys - - -if sys.version_info < (3, 9): - - def removesuffix(self, suffix): - # suffix='' should not call self[:-0]. - if suffix and self.endswith(suffix): - return self[: -len(suffix)] - else: - return self[:] - - def removeprefix(self, prefix): - if self.startswith(prefix): - return self[len(prefix) :] - else: - return self[:] -else: - - def removesuffix(self, suffix): - return self.removesuffix(suffix) - - def removeprefix(self, prefix): - return self.removeprefix(prefix) diff --git a/env/lib/python3.10/site-packages/bidict-0.23.1.dist-info/INSTALLER b/env/lib/python3.10/site-packages/bidict-0.23.1.dist-info/INSTALLER deleted file mode 100644 index a1b589e..0000000 --- a/env/lib/python3.10/site-packages/bidict-0.23.1.dist-info/INSTALLER +++ /dev/null @@ -1 +0,0 @@ -pip diff --git a/env/lib/python3.10/site-packages/bidict-0.23.1.dist-info/LICENSE b/env/lib/python3.10/site-packages/bidict-0.23.1.dist-info/LICENSE deleted file mode 100644 index d1cc6f8..0000000 --- a/env/lib/python3.10/site-packages/bidict-0.23.1.dist-info/LICENSE +++ /dev/null @@ -1,376 +0,0 @@ -Mozilla Public License Version 2.0 -================================== - -Copyright 2009-2024 Joshua Bronson. All rights reserved. - - -1. Definitions --------------- - -1.1. "Contributor" - means each individual or legal entity that creates, contributes to - the creation of, or owns Covered Software. - -1.2. "Contributor Version" - means the combination of the Contributions of others (if any) used - by a Contributor and that particular Contributor's Contribution. - -1.3. "Contribution" - means Covered Software of a particular Contributor. - -1.4. "Covered Software" - means Source Code Form to which the initial Contributor has attached - the notice in Exhibit A, the Executable Form of such Source Code - Form, and Modifications of such Source Code Form, in each case - including portions thereof. - -1.5. "Incompatible With Secondary Licenses" - means - - (a) that the initial Contributor has attached the notice described - in Exhibit B to the Covered Software; or - - (b) that the Covered Software was made available under the terms of - version 1.1 or earlier of the License, but not also under the - terms of a Secondary License. - -1.6. "Executable Form" - means any form of the work other than Source Code Form. - -1.7. "Larger Work" - means a work that combines Covered Software with other material, in - a separate file or files, that is not Covered Software. - -1.8. "License" - means this document. - -1.9. "Licensable" - means having the right to grant, to the maximum extent possible, - whether at the time of the initial grant or subsequently, any and - all of the rights conveyed by this License. - -1.10. "Modifications" - means any of the following: - - (a) any file in Source Code Form that results from an addition to, - deletion from, or modification of the contents of Covered - Software; or - - (b) any new file in Source Code Form that contains any Covered - Software. - -1.11. "Patent Claims" of a Contributor - means any patent claim(s), including without limitation, method, - process, and apparatus claims, in any patent Licensable by such - Contributor that would be infringed, but for the grant of the - License, by the making, using, selling, offering for sale, having - made, import, or transfer of either its Contributions or its - Contributor Version. - -1.12. "Secondary License" - means either the GNU General Public License, Version 2.0, the GNU - Lesser General Public License, Version 2.1, the GNU Affero General - Public License, Version 3.0, or any later versions of those - licenses. - -1.13. "Source Code Form" - means the form of the work preferred for making modifications. - -1.14. "You" (or "Your") - means an individual or a legal entity exercising rights under this - License. For legal entities, "You" includes any entity that - controls, is controlled by, or is under common control with You. For - purposes of this definition, "control" means (a) the power, direct - or indirect, to cause the direction or management of such entity, - whether by contract or otherwise, or (b) ownership of more than - fifty percent (50%) of the outstanding shares or beneficial - ownership of such entity. - -2. License Grants and Conditions --------------------------------- - -2.1. Grants - -Each Contributor hereby grants You a world-wide, royalty-free, -non-exclusive license: - -(a) under intellectual property rights (other than patent or trademark) - Licensable by such Contributor to use, reproduce, make available, - modify, display, perform, distribute, and otherwise exploit its - Contributions, either on an unmodified basis, with Modifications, or - as part of a Larger Work; and - -(b) under Patent Claims of such Contributor to make, use, sell, offer - for sale, have made, import, and otherwise transfer either its - Contributions or its Contributor Version. - -2.2. Effective Date - -The licenses granted in Section 2.1 with respect to any Contribution -become effective for each Contribution on the date the Contributor first -distributes such Contribution. - -2.3. Limitations on Grant Scope - -The licenses granted in this Section 2 are the only rights granted under -this License. No additional rights or licenses will be implied from the -distribution or licensing of Covered Software under this License. -Notwithstanding Section 2.1(b) above, no patent license is granted by a -Contributor: - -(a) for any code that a Contributor has removed from Covered Software; - or - -(b) for infringements caused by: (i) Your and any other third party's - modifications of Covered Software, or (ii) the combination of its - Contributions with other software (except as part of its Contributor - Version); or - -(c) under Patent Claims infringed by Covered Software in the absence of - its Contributions. - -This License does not grant any rights in the trademarks, service marks, -or logos of any Contributor (except as may be necessary to comply with -the notice requirements in Section 3.4). - -2.4. Subsequent Licenses - -No Contributor makes additional grants as a result of Your choice to -distribute the Covered Software under a subsequent version of this -License (see Section 10.2) or under the terms of a Secondary License (if -permitted under the terms of Section 3.3). - -2.5. Representation - -Each Contributor represents that the Contributor believes its -Contributions are its original creation(s) or it has sufficient rights -to grant the rights to its Contributions conveyed by this License. - -2.6. Fair Use - -This License is not intended to limit any rights You have under -applicable copyright doctrines of fair use, fair dealing, or other -equivalents. - -2.7. Conditions - -Sections 3.1, 3.2, 3.3, and 3.4 are conditions of the licenses granted -in Section 2.1. - -3. Responsibilities -------------------- - -3.1. Distribution of Source Form - -All distribution of Covered Software in Source Code Form, including any -Modifications that You create or to which You contribute, must be under -the terms of this License. You must inform recipients that the Source -Code Form of the Covered Software is governed by the terms of this -License, and how they can obtain a copy of this License. You may not -attempt to alter or restrict the recipients' rights in the Source Code -Form. - -3.2. Distribution of Executable Form - -If You distribute Covered Software in Executable Form then: - -(a) such Covered Software must also be made available in Source Code - Form, as described in Section 3.1, and You must inform recipients of - the Executable Form how they can obtain a copy of such Source Code - Form by reasonable means in a timely manner, at a charge no more - than the cost of distribution to the recipient; and - -(b) You may distribute such Executable Form under the terms of this - License, or sublicense it under different terms, provided that the - license for the Executable Form does not attempt to limit or alter - the recipients' rights in the Source Code Form under this License. - -3.3. Distribution of a Larger Work - -You may create and distribute a Larger Work under terms of Your choice, -provided that You also comply with the requirements of this License for -the Covered Software. If the Larger Work is a combination of Covered -Software with a work governed by one or more Secondary Licenses, and the -Covered Software is not Incompatible With Secondary Licenses, this -License permits You to additionally distribute such Covered Software -under the terms of such Secondary License(s), so that the recipient of -the Larger Work may, at their option, further distribute the Covered -Software under the terms of either this License or such Secondary -License(s). - -3.4. Notices - -You may not remove or alter the substance of any license notices -(including copyright notices, patent notices, disclaimers of warranty, -or limitations of liability) contained within the Source Code Form of -the Covered Software, except that You may alter any license notices to -the extent required to remedy known factual inaccuracies. - -3.5. Application of Additional Terms - -You may choose to offer, and to charge a fee for, warranty, support, -indemnity or liability obligations to one or more recipients of Covered -Software. However, You may do so only on Your own behalf, and not on -behalf of any Contributor. You must make it absolutely clear that any -such warranty, support, indemnity, or liability obligation is offered by -You alone, and You hereby agree to indemnify every Contributor for any -liability incurred by such Contributor as a result of warranty, support, -indemnity or liability terms You offer. You may include additional -disclaimers of warranty and limitations of liability specific to any -jurisdiction. - -4. Inability to Comply Due to Statute or Regulation ---------------------------------------------------- - -If it is impossible for You to comply with any of the terms of this -License with respect to some or all of the Covered Software due to -statute, judicial order, or regulation then You must: (a) comply with -the terms of this License to the maximum extent possible; and (b) -describe the limitations and the code they affect. Such description must -be placed in a text file included with all distributions of the Covered -Software under this License. Except to the extent prohibited by statute -or regulation, such description must be sufficiently detailed for a -recipient of ordinary skill to be able to understand it. - -5. Termination --------------- - -5.1. The rights granted under this License will terminate automatically -if You fail to comply with any of its terms. However, if You become -compliant, then the rights granted under this License from a particular -Contributor are reinstated (a) provisionally, unless and until such -Contributor explicitly and finally terminates Your grants, and (b) on an -ongoing basis, if such Contributor fails to notify You of the -non-compliance by some reasonable means prior to 60 days after You have -come back into compliance. Moreover, Your grants from a particular -Contributor are reinstated on an ongoing basis if such Contributor -notifies You of the non-compliance by some reasonable means, this is the -first time You have received notice of non-compliance with this License -from such Contributor, and You become compliant prior to 30 days after -Your receipt of the notice. - -5.2. If You initiate litigation against any entity by asserting a patent -infringement claim (excluding declaratory judgment actions, -counter-claims, and cross-claims) alleging that a Contributor Version -directly or indirectly infringes any patent, then the rights granted to -You by any and all Contributors for the Covered Software under Section -2.1 of this License shall terminate. - -5.3. In the event of termination under Sections 5.1 or 5.2 above, all -end user license agreements (excluding distributors and resellers) which -have been validly granted by You or Your distributors under this License -prior to termination shall survive termination. - -************************************************************************ -* * -* 6. Disclaimer of Warranty * -* ------------------------- * -* * -* Covered Software is provided under this License on an "as is" * -* basis, without warranty of any kind, either expressed, implied, or * -* statutory, including, without limitation, warranties that the * -* Covered Software is free of defects, merchantable, fit for a * -* particular purpose or non-infringing. The entire risk as to the * -* quality and performance of the Covered Software is with You. * -* Should any Covered Software prove defective in any respect, You * -* (not any Contributor) assume the cost of any necessary servicing, * -* repair, or correction. This disclaimer of warranty constitutes an * -* essential part of this License. No use of any Covered Software is * -* authorized under this License except under this disclaimer. * -* * -************************************************************************ - -************************************************************************ -* * -* 7. Limitation of Liability * -* -------------------------- * -* * -* Under no circumstances and under no legal theory, whether tort * -* (including negligence), contract, or otherwise, shall any * -* Contributor, or anyone who distributes Covered Software as * -* permitted above, be liable to You for any direct, indirect, * -* special, incidental, or consequential damages of any character * -* including, without limitation, damages for lost profits, loss of * -* goodwill, work stoppage, computer failure or malfunction, or any * -* and all other commercial damages or losses, even if such party * -* shall have been informed of the possibility of such damages. This * -* limitation of liability shall not apply to liability for death or * -* personal injury resulting from such party's negligence to the * -* extent applicable law prohibits such limitation. Some * -* jurisdictions do not allow the exclusion or limitation of * -* incidental or consequential damages, so this exclusion and * -* limitation may not apply to You. * -* * -************************************************************************ - -8. Litigation -------------- - -Any litigation relating to this License may be brought only in the -courts of a jurisdiction where the defendant maintains its principal -place of business and such litigation shall be governed by laws of that -jurisdiction, without reference to its conflict-of-law provisions. -Nothing in this Section shall prevent a party's ability to bring -cross-claims or counter-claims. - -9. Miscellaneous ----------------- - -This License represents the complete agreement concerning the subject -matter hereof. If any provision of this License is held to be -unenforceable, such provision shall be reformed only to the extent -necessary to make it enforceable. Any law or regulation which provides -that the language of a contract shall be construed against the drafter -shall not be used to construe this License against a Contributor. - -10. Versions of the License ---------------------------- - -10.1. New Versions - -Mozilla Foundation is the license steward. Except as provided in Section -10.3, no one other than the license steward has the right to modify or -publish new versions of this License. Each version will be given a -distinguishing version number. - -10.2. Effect of New Versions - -You may distribute the Covered Software under the terms of the version -of the License under which You originally received the Covered Software, -or under the terms of any subsequent version published by the license -steward. - -10.3. Modified Versions - -If you create software not governed by this License, and you want to -create a new license for such software, you may create and use a -modified version of this License if you rename the license and remove -any references to the name of the license steward (except to note that -such modified license differs from this License). - -10.4. Distributing Source Code Form that is Incompatible With Secondary -Licenses - -If You choose to distribute Source Code Form that is Incompatible With -Secondary Licenses under the terms of this version of the License, the -notice described in Exhibit B of this License must be attached. - -Exhibit A - Source Code Form License Notice -------------------------------------------- - - This Source Code Form is subject to the terms of the Mozilla Public - License, v. 2.0. If a copy of the MPL was not distributed with this - file, You can obtain one at http://mozilla.org/MPL/2.0/. - -If it is not possible or desirable to put the notice in a particular -file, then You may include the notice in a location (such as a LICENSE -file in a relevant directory) where a recipient would be likely to look -for such a notice. - -You may add additional accurate notices of copyright ownership. - -Exhibit B - "Incompatible With Secondary Licenses" Notice ---------------------------------------------------------- - - This Source Code Form is "Incompatible With Secondary Licenses", as - defined by the Mozilla Public License, v. 2.0. diff --git a/env/lib/python3.10/site-packages/bidict-0.23.1.dist-info/METADATA b/env/lib/python3.10/site-packages/bidict-0.23.1.dist-info/METADATA deleted file mode 100644 index 5356d23..0000000 --- a/env/lib/python3.10/site-packages/bidict-0.23.1.dist-info/METADATA +++ /dev/null @@ -1,260 +0,0 @@ -Metadata-Version: 2.1 -Name: bidict -Version: 0.23.1 -Summary: The bidirectional mapping library for Python. -Author-email: Joshua Bronson -License: MPL 2.0 -Project-URL: Changelog, https://bidict.readthedocs.io/changelog.html -Project-URL: Documentation, https://bidict.readthedocs.io -Project-URL: Funding, https://bidict.readthedocs.io/#sponsoring -Project-URL: Repository, https://github.com/jab/bidict -Keywords: bidict,bimap,bidirectional,dict,dictionary,mapping,collections -Classifier: License :: OSI Approved :: Mozilla Public License 2.0 (MPL 2.0) -Classifier: Operating System :: OS Independent -Classifier: Programming Language :: Python :: Implementation :: CPython -Classifier: Programming Language :: Python :: Implementation :: PyPy -Classifier: Typing :: Typed -Requires-Python: >=3.8 -Description-Content-Type: text/x-rst -License-File: LICENSE - -.. role:: doc -.. (Forward declaration for the "doc" role that Sphinx defines for interop with renderers that - are often used to show this doc and that are unaware of Sphinx (GitHub.com, PyPI.org, etc.). - Use :doc: rather than :ref: here for better interop as well.) - - -bidict -====== - -*The bidirectional mapping library for Python.* - - -Status ------- - -.. image:: https://img.shields.io/pypi/v/bidict.svg - :target: https://pypi.org/project/bidict - :alt: Latest release - -.. image:: https://img.shields.io/readthedocs/bidict/main.svg - :target: https://bidict.readthedocs.io/en/main/ - :alt: Documentation - -.. image:: https://github.com/jab/bidict/actions/workflows/test.yml/badge.svg - :target: https://github.com/jab/bidict/actions/workflows/test.yml?query=branch%3Amain - :alt: GitHub Actions CI status - -.. image:: https://img.shields.io/pypi/l/bidict.svg - :target: https://raw.githubusercontent.com/jab/bidict/main/LICENSE - :alt: License - -.. image:: https://static.pepy.tech/badge/bidict - :target: https://pepy.tech/project/bidict - :alt: PyPI Downloads - -.. image:: https://img.shields.io/badge/GitHub-sponsor-ff69b4 - :target: https://github.com/sponsors/jab - :alt: Sponsor - - -Features --------- - -- Mature: Depended on by - Google, Venmo, CERN, Baidu, Tencent, - and teams across the world since 2009 - -- Familiar, Pythonic APIs - that are carefully designed for - safety, simplicity, flexibility, and ergonomics - -- Lightweight, with no runtime dependencies - outside Python's standard library - -- Implemented in - concise, well-factored, fully type-hinted Python code - that is optimized for running efficiently - as well as for long-term maintenance and stability - (as well as `joy <#learning-from-bidict>`__) - -- Extensively `documented `__ - -- 100% test coverage - running continuously across all supported Python versions - (including property-based tests and benchmarks) - - -Installation ------------- - -``pip install bidict`` - - -Quick Start ------------ - -.. code:: python - - >>> from bidict import bidict - >>> element_by_symbol = bidict({'H': 'hydrogen'}) - >>> element_by_symbol['H'] - 'hydrogen' - >>> element_by_symbol.inverse['hydrogen'] - 'H' - - -For more usage documentation, -head to the :doc:`intro` [#fn-intro]_ -and proceed from there. - - -Enterprise Support ------------------- - -Enterprise-level support for bidict can be obtained via the -`Tidelift subscription `__ -or by `contacting me directly `__. - -I have a US-based LLC set up for invoicing, -and I have 15+ years of professional experience -delivering software and support to companies successfully. - -You can also sponsor my work through several platforms, including GitHub Sponsors. -See the `Sponsoring <#sponsoring>`__ section below for details, -including rationale and examples of companies -supporting the open source projects they depend on. - - -Voluntary Community Support ---------------------------- - -Please search through already-asked questions and answers -in `GitHub Discussions `__ -and the `issue tracker `__ -in case your question has already been addressed. - -Otherwise, please feel free to -`start a new discussion `__ -or `create a new issue `__ on GitHub -for voluntary community support. - - -Notice of Usage ---------------- - -If you use bidict, -and especially if your usage or your organization is significant in some way, -please let me know in any of the following ways: - -- `star bidict on GitHub `__ -- post in `GitHub Discussions `__ -- `email me `__ - - -Changelog ---------- - -For bidict release notes, see the :doc:`changelog`. [#fn-changelog]_ - - -Release Notifications ---------------------- - -.. duplicated in CHANGELOG.rst: - (Would use `.. include::` but GitHub's renderer doesn't support it.) - -Watch `bidict releases on GitHub `__ -to be notified when new versions of bidict are published. -Click the "Watch" dropdown, choose "Custom", and then choose "Releases". - - -Learning from bidict --------------------- - -One of the best things about bidict -is that it touches a surprising number of -interesting Python corners, -especially given its small size and scope. - -Check out :doc:`learning-from-bidict` [#fn-learning]_ -if you're interested in learning more. - - -Contributing ------------- - -I have been bidict's sole maintainer -and `active contributor `__ -since I started the project ~15 years ago. - -Your help would be most welcome! -See the :doc:`contributors-guide` [#fn-contributing]_ -for more information. - - -Sponsoring ----------- - -.. duplicated in CONTRIBUTING.rst - (Would use `.. include::` but GitHub's renderer doesn't support it.) - -.. image:: https://img.shields.io/badge/GitHub-sponsor-ff69b4 - :target: https://github.com/sponsors/jab - :alt: Sponsor through GitHub - -Bidict is the product of thousands of hours of my unpaid work -over the 15+ years that I've been the sole maintainer. - -If bidict has helped you or your company accomplish your work, -please sponsor my work through one of the following, -and/or ask your company to do the same: - -- `GitHub `__ -- `PayPal `__ -- `Tidelift `__ -- `thanks.dev `__ -- `Gumroad `__ -- `a support engagement with my LLC <#enterprise-support>`__ - -If you're not sure which to use, GitHub is an easy option, -especially if you already have a GitHub account. -Just choose a monthly or one-time amount, and GitHub handles everything else. -Your bidict sponsorship on GitHub will automatically go -on the same regular bill as any other GitHub charges you pay for. -PayPal is another easy option for one-time contributions. - -See the following for rationale and examples of companies -supporting the open source projects they depend on -in this manner: - -- ``__ -- ``__ -- ``__ - -.. - ``__ -.. - ``__ -.. - ``__ - - -Finding Documentation ---------------------- - -If you're viewing this on ``__, -note that multiple versions of the documentation are available, -and you can choose a different version using the popup menu at the bottom-right. -Please make sure you're viewing the version of the documentation -that corresponds to the version of bidict you'd like to use. - -If you're viewing this on GitHub, PyPI, or some other place -that can't render and link this documentation properly -and are seeing broken links, -try these alternate links instead: - -.. [#fn-intro] ``__ | ``__ - -.. [#fn-changelog] ``__ | ``__ - -.. [#fn-learning] ``__ | ``__ - -.. [#fn-contributing] ``__ | ``__ diff --git a/env/lib/python3.10/site-packages/bidict-0.23.1.dist-info/RECORD b/env/lib/python3.10/site-packages/bidict-0.23.1.dist-info/RECORD deleted file mode 100644 index d826b27..0000000 --- a/env/lib/python3.10/site-packages/bidict-0.23.1.dist-info/RECORD +++ /dev/null @@ -1,31 +0,0 @@ -bidict-0.23.1.dist-info/INSTALLER,sha256=zuuue4knoyJ-UwPPXg8fezS7VCrXJQrAP7zeNuwvFQg,4 -bidict-0.23.1.dist-info/LICENSE,sha256=8_U63OyqSNc6ZuI4-lupBstBh2eDtF0ooTRrMULuvZo,16784 -bidict-0.23.1.dist-info/METADATA,sha256=2ovIRm6Df8gdwAMekGqkeBSF5TWj2mv1jpmh4W4ks7o,8704 -bidict-0.23.1.dist-info/RECORD,, -bidict-0.23.1.dist-info/WHEEL,sha256=oiQVh_5PnQM0E3gPdiz09WCNmwiHDMaGer_elqB3coM,92 -bidict-0.23.1.dist-info/top_level.txt,sha256=WuQO02jp0ODioS7sJoaHg3JJ5_3h6Sxo9RITvNGPYmc,7 -bidict/__init__.py,sha256=pL87KsrDpBsl3AG09LQk1t1TSFt0hIJVYa2POMdErN8,4398 -bidict/__pycache__/__init__.cpython-310.pyc,, -bidict/__pycache__/_abc.cpython-310.pyc,, -bidict/__pycache__/_base.cpython-310.pyc,, -bidict/__pycache__/_bidict.cpython-310.pyc,, -bidict/__pycache__/_dup.cpython-310.pyc,, -bidict/__pycache__/_exc.cpython-310.pyc,, -bidict/__pycache__/_frozen.cpython-310.pyc,, -bidict/__pycache__/_iter.cpython-310.pyc,, -bidict/__pycache__/_orderedbase.cpython-310.pyc,, -bidict/__pycache__/_orderedbidict.cpython-310.pyc,, -bidict/__pycache__/_typing.cpython-310.pyc,, -bidict/__pycache__/metadata.cpython-310.pyc,, -bidict/_abc.py,sha256=SMCNdCsmqSWg0OGnMZtnnXY8edjXcyZup5tva4HBm_c,3172 -bidict/_base.py,sha256=YiauA0aj52fNB6cfZ4gBt6OV-CRQoZm7WVhuw1nT-Cg,24439 -bidict/_bidict.py,sha256=Sr-RoEzWOaxpnDRbDJ7ngaGRIsyGnqZgzvR-NyT4jl4,6923 -bidict/_dup.py,sha256=YAn5gWA6lwMBA5A6ebVF19UTZyambGS8WxmbK4TN1Ww,2079 -bidict/_exc.py,sha256=HnD_WgteI5PrXa3zBx9RUiGlgnZTO6CF4nIU9p3-njk,1066 -bidict/_frozen.py,sha256=p4TaRHKeyTs0KmlpwSnZiTlN_CR4J97kAgBpNdZHQMs,1771 -bidict/_iter.py,sha256=zVUx-hJ1M4YuJROoFWRjPKlcaFnyo1AAuRpOaKAFhOQ,1530 -bidict/_orderedbase.py,sha256=M7v5rHa7vrym9Z3DxQBFQDxjnrr39Z8p26V0c1PggoE,8942 -bidict/_orderedbidict.py,sha256=pPnmC19mIISrj8_yjnb-4r_ti1B74tD5eTd08DETNuI,7080 -bidict/_typing.py,sha256=AylMZpBhEFTQegfziPSxfKkKLk7oUsH6o3awDIg2z_k,1289 -bidict/metadata.py,sha256=BMIKu6fBY_OKeV_q48EpumE7MdmFw8rFcdaUz8kcIYk,573 -bidict/py.typed,sha256=RJao5SVFYIp8IfbxhL_SpZkBQYe3XXzPlobSRdh4B_c,16 diff --git a/env/lib/python3.10/site-packages/bidict-0.23.1.dist-info/WHEEL b/env/lib/python3.10/site-packages/bidict-0.23.1.dist-info/WHEEL deleted file mode 100644 index 98c0d20..0000000 --- a/env/lib/python3.10/site-packages/bidict-0.23.1.dist-info/WHEEL +++ /dev/null @@ -1,5 +0,0 @@ -Wheel-Version: 1.0 -Generator: bdist_wheel (0.42.0) -Root-Is-Purelib: true -Tag: py3-none-any - diff --git a/env/lib/python3.10/site-packages/bidict-0.23.1.dist-info/top_level.txt b/env/lib/python3.10/site-packages/bidict-0.23.1.dist-info/top_level.txt deleted file mode 100644 index 6ff5b04..0000000 --- a/env/lib/python3.10/site-packages/bidict-0.23.1.dist-info/top_level.txt +++ /dev/null @@ -1 +0,0 @@ -bidict diff --git a/env/lib/python3.10/site-packages/bidict/__init__.py b/env/lib/python3.10/site-packages/bidict/__init__.py deleted file mode 100644 index 07e5ba5..0000000 --- a/env/lib/python3.10/site-packages/bidict/__init__.py +++ /dev/null @@ -1,103 +0,0 @@ -# Copyright 2009-2024 Joshua Bronson. All rights reserved. -# -# This Source Code Form is subject to the terms of the Mozilla Public -# License, v. 2.0. If a copy of the MPL was not distributed with this -# file, You can obtain one at http://mozilla.org/MPL/2.0/. - - -# ============================================================================ -# * Welcome to the bidict source code * -# ============================================================================ - -# Reading through the code? You'll find a "Code review nav" comment like the one -# below at the top and bottom of the key source files. Follow these cues to take -# a path through the code that's optimized for familiarizing yourself with it. -# -# If you're not reading this on https://github.com/jab/bidict already, go there -# to ensure you have the latest version of the code. While there, you can also -# star the project, watch it for updates, fork the code, and submit an issue or -# pull request with any proposed changes. More information can be found linked -# from README.rst, which is also shown on https://github.com/jab/bidict. - -# * Code review nav * -# ============================================================================ -# Current: __init__.py Next: _abc.py → -# ============================================================================ - - -"""The bidirectional mapping library for Python. - ----- - -bidict by example: - -.. code-block:: python - - >>> from bidict import bidict - >>> element_by_symbol = bidict({'H': 'hydrogen'}) - >>> element_by_symbol['H'] - 'hydrogen' - >>> element_by_symbol.inverse['hydrogen'] - 'H' - - -Please see https://github.com/jab/bidict for the most up-to-date code and -https://bidict.readthedocs.io for the most up-to-date documentation -if you are reading this elsewhere. - ----- - -.. :copyright: (c) 2009-2024 Joshua Bronson. -.. :license: MPLv2. See LICENSE for details. -""" - -# Use private aliases to not re-export these publicly (for Sphinx automodule with imported-members). -from __future__ import annotations as _annotations - -from contextlib import suppress as _suppress - -from ._abc import BidirectionalMapping as BidirectionalMapping -from ._abc import MutableBidirectionalMapping as MutableBidirectionalMapping -from ._base import BidictBase as BidictBase -from ._base import BidictKeysView as BidictKeysView -from ._base import GeneratedBidictInverse as GeneratedBidictInverse -from ._bidict import MutableBidict as MutableBidict -from ._bidict import bidict as bidict -from ._dup import DROP_NEW as DROP_NEW -from ._dup import DROP_OLD as DROP_OLD -from ._dup import ON_DUP_DEFAULT as ON_DUP_DEFAULT -from ._dup import ON_DUP_DROP_OLD as ON_DUP_DROP_OLD -from ._dup import ON_DUP_RAISE as ON_DUP_RAISE -from ._dup import RAISE as RAISE -from ._dup import OnDup as OnDup -from ._dup import OnDupAction as OnDupAction -from ._exc import BidictException as BidictException -from ._exc import DuplicationError as DuplicationError -from ._exc import KeyAndValueDuplicationError as KeyAndValueDuplicationError -from ._exc import KeyDuplicationError as KeyDuplicationError -from ._exc import ValueDuplicationError as ValueDuplicationError -from ._frozen import frozenbidict as frozenbidict -from ._iter import inverted as inverted -from ._orderedbase import OrderedBidictBase as OrderedBidictBase -from ._orderedbidict import OrderedBidict as OrderedBidict -from .metadata import __author__ as __author__ -from .metadata import __copyright__ as __copyright__ -from .metadata import __description__ as __description__ -from .metadata import __license__ as __license__ -from .metadata import __url__ as __url__ -from .metadata import __version__ as __version__ - - -# Set __module__ of re-exported classes to the 'bidict' top-level module, so that e.g. -# 'bidict.bidict' shows up as 'bidict.bidict` rather than 'bidict._bidict.bidict'. -for _obj in tuple(locals().values()): # pragma: no cover - if not getattr(_obj, '__module__', '').startswith('bidict.'): - continue - with _suppress(AttributeError): - _obj.__module__ = 'bidict' - - -# * Code review nav * -# ============================================================================ -# Current: __init__.py Next: _abc.py → -# ============================================================================ diff --git a/env/lib/python3.10/site-packages/bidict/_abc.py b/env/lib/python3.10/site-packages/bidict/_abc.py deleted file mode 100644 index d4a30aa..0000000 --- a/env/lib/python3.10/site-packages/bidict/_abc.py +++ /dev/null @@ -1,79 +0,0 @@ -# Copyright 2009-2024 Joshua Bronson. All rights reserved. -# -# This Source Code Form is subject to the terms of the Mozilla Public -# License, v. 2.0. If a copy of the MPL was not distributed with this -# file, You can obtain one at http://mozilla.org/MPL/2.0/. - - -# * Code review nav * -# (see comments in __init__.py) -# ============================================================================ -# ← Prev: __init__.py Current: _abc.py Next: _base.py → -# ============================================================================ - - -"""Provide the :class:`BidirectionalMapping` abstract base class.""" - -from __future__ import annotations - -import typing as t -from abc import abstractmethod - -from ._typing import KT -from ._typing import VT - - -class BidirectionalMapping(t.Mapping[KT, VT]): - """Abstract base class for bidirectional mapping types. - - Extends :class:`collections.abc.Mapping` primarily by adding the - (abstract) :attr:`inverse` property, - which implementers of :class:`BidirectionalMapping` - should override to return a reference to the inverse - :class:`BidirectionalMapping` instance. - """ - - __slots__ = () - - @property - @abstractmethod - def inverse(self) -> BidirectionalMapping[VT, KT]: - """The inverse of this bidirectional mapping instance. - - *See also* :attr:`bidict.BidictBase.inverse`, :attr:`bidict.BidictBase.inv` - - :raises NotImplementedError: Meant to be overridden in subclasses. - """ - # The @abstractmethod decorator prevents subclasses from being instantiated unless they - # override this method. But an overriding implementation may merely return super().inverse, - # in which case this implementation is used. Raise NotImplementedError to indicate that - # subclasses must actually provide their own implementation. - raise NotImplementedError - - def __inverted__(self) -> t.Iterator[tuple[VT, KT]]: - """Get an iterator over the items in :attr:`inverse`. - - This is functionally equivalent to iterating over the items in the - forward mapping and inverting each one on the fly, but this provides a - more efficient implementation: Assuming the already-inverted items - are stored in :attr:`inverse`, just return an iterator over them directly. - - Providing this default implementation enables external functions, - particularly :func:`~bidict.inverted`, to use this optimized - implementation when available, instead of having to invert on the fly. - - *See also* :func:`bidict.inverted` - """ - return iter(self.inverse.items()) - - -class MutableBidirectionalMapping(BidirectionalMapping[KT, VT], t.MutableMapping[KT, VT]): - """Abstract base class for mutable bidirectional mapping types.""" - - __slots__ = () - - -# * Code review nav * -# ============================================================================ -# ← Prev: __init__.py Current: _abc.py Next: _base.py → -# ============================================================================ diff --git a/env/lib/python3.10/site-packages/bidict/_base.py b/env/lib/python3.10/site-packages/bidict/_base.py deleted file mode 100644 index 848a376..0000000 --- a/env/lib/python3.10/site-packages/bidict/_base.py +++ /dev/null @@ -1,556 +0,0 @@ -# Copyright 2009-2024 Joshua Bronson. All rights reserved. -# -# This Source Code Form is subject to the terms of the Mozilla Public -# License, v. 2.0. If a copy of the MPL was not distributed with this -# file, You can obtain one at http://mozilla.org/MPL/2.0/. - - -# * Code review nav * -# (see comments in __init__.py) -# ============================================================================ -# ← Prev: _abc.py Current: _base.py Next: _frozen.py → -# ============================================================================ - - -"""Provide :class:`BidictBase`.""" - -from __future__ import annotations - -import typing as t -import weakref -from itertools import starmap -from operator import eq -from types import MappingProxyType - -from ._abc import BidirectionalMapping -from ._dup import DROP_NEW -from ._dup import DROP_OLD -from ._dup import ON_DUP_DEFAULT -from ._dup import RAISE -from ._dup import OnDup -from ._exc import DuplicationError -from ._exc import KeyAndValueDuplicationError -from ._exc import KeyDuplicationError -from ._exc import ValueDuplicationError -from ._iter import inverted -from ._iter import iteritems -from ._typing import KT -from ._typing import MISSING -from ._typing import OKT -from ._typing import OVT -from ._typing import VT -from ._typing import Maplike -from ._typing import MapOrItems - - -OldKV = t.Tuple[OKT[KT], OVT[VT]] -DedupResult = t.Optional[OldKV[KT, VT]] -Unwrites = t.List[t.Tuple[t.Any, ...]] -BT = t.TypeVar('BT', bound='BidictBase[t.Any, t.Any]') - - -class BidictKeysView(t.KeysView[KT], t.ValuesView[KT]): - """Since the keys of a bidict are the values of its inverse (and vice versa), - the :class:`~collections.abc.ValuesView` result of calling *bi.values()* - is also a :class:`~collections.abc.KeysView` of *bi.inverse*. - """ - - -class BidictBase(BidirectionalMapping[KT, VT]): - """Base class implementing :class:`BidirectionalMapping`.""" - - #: The default :class:`~bidict.OnDup` - #: that governs behavior when a provided item - #: duplicates the key or value of other item(s). - #: - #: *See also* - #: :ref:`basic-usage:Values Must Be Unique` (https://bidict.rtfd.io/basic-usage.html#values-must-be-unique), - #: :doc:`extending` (https://bidict.rtfd.io/extending.html) - on_dup = ON_DUP_DEFAULT - - _fwdm: t.MutableMapping[KT, VT] #: the backing forward mapping (*key* → *val*) - _invm: t.MutableMapping[VT, KT] #: the backing inverse mapping (*val* → *key*) - - # Use Any rather than KT/VT in the following to avoid "ClassVar cannot contain type variables" errors: - _fwdm_cls: t.ClassVar[type[t.MutableMapping[t.Any, t.Any]]] = dict #: class of the backing forward mapping - _invm_cls: t.ClassVar[type[t.MutableMapping[t.Any, t.Any]]] = dict #: class of the backing inverse mapping - - #: The class of the inverse bidict instance. - _inv_cls: t.ClassVar[type[BidictBase[t.Any, t.Any]]] - - def __init_subclass__(cls) -> None: - super().__init_subclass__() - cls._init_class() - - @classmethod - def _init_class(cls) -> None: - cls._ensure_inv_cls() - cls._set_reversed() - - __reversed__: t.ClassVar[t.Any] - - @classmethod - def _set_reversed(cls) -> None: - """Set __reversed__ for subclasses that do not set it explicitly - according to whether backing mappings are reversible. - """ - if cls is not BidictBase: - resolved = cls.__reversed__ - overridden = resolved is not BidictBase.__reversed__ - if overridden: # E.g. OrderedBidictBase, OrderedBidict - return - backing_reversible = all(issubclass(i, t.Reversible) for i in (cls._fwdm_cls, cls._invm_cls)) - cls.__reversed__ = _fwdm_reversed if backing_reversible else None - - @classmethod - def _ensure_inv_cls(cls) -> None: - """Ensure :attr:`_inv_cls` is set, computing it dynamically if necessary. - - All subclasses provided in :mod:`bidict` are their own inverse classes, - i.e., their backing forward and inverse mappings are both the same type, - but users may define subclasses where this is not the case. - This method ensures that the inverse class is computed correctly regardless. - - See: :ref:`extending:Dynamic Inverse Class Generation` - (https://bidict.rtfd.io/extending.html#dynamic-inverse-class-generation) - """ - # This _ensure_inv_cls() method is (indirectly) corecursive with _make_inv_cls() below - # in the case that we need to dynamically generate the inverse class: - # 1. _ensure_inv_cls() calls cls._make_inv_cls() - # 2. cls._make_inv_cls() calls type(..., (cls, ...), ...) to dynamically generate inv_cls - # 3. Our __init_subclass__ hook (see above) is automatically called on inv_cls - # 4. inv_cls.__init_subclass__() calls inv_cls._ensure_inv_cls() - # 5. inv_cls._ensure_inv_cls() resolves to this implementation - # (inv_cls deliberately does not override this), so we're back where we started. - # But since the _make_inv_cls() call will have set inv_cls.__dict__._inv_cls, - # just check if it's already set before calling _make_inv_cls() to prevent infinite recursion. - if getattr(cls, '__dict__', {}).get('_inv_cls'): # Don't assume cls.__dict__ (e.g. mypyc native class) - return - cls._inv_cls = cls._make_inv_cls() - - @classmethod - def _make_inv_cls(cls: type[BT]) -> type[BT]: - diff = cls._inv_cls_dict_diff() - cls_is_own_inv = all(getattr(cls, k, MISSING) == v for (k, v) in diff.items()) - if cls_is_own_inv: - return cls - # Suppress auto-calculation of _inv_cls's _inv_cls since we know it already. - # Works with the guard in BidictBase._ensure_inv_cls() to prevent infinite recursion. - diff['_inv_cls'] = cls - inv_cls = type(f'{cls.__name__}Inv', (cls, GeneratedBidictInverse), diff) - inv_cls.__module__ = cls.__module__ - return t.cast(t.Type[BT], inv_cls) - - @classmethod - def _inv_cls_dict_diff(cls) -> dict[str, t.Any]: - return { - '_fwdm_cls': cls._invm_cls, - '_invm_cls': cls._fwdm_cls, - } - - def __init__(self, arg: MapOrItems[KT, VT] = (), /, **kw: VT) -> None: - """Make a new bidirectional mapping. - The signature behaves like that of :class:`dict`. - ktems passed via positional arg are processed first, - followed by any items passed via keyword argument. - Any duplication encountered along the way - is handled as per :attr:`on_dup`. - """ - self._fwdm = self._fwdm_cls() - self._invm = self._invm_cls() - self._update(arg, kw, rollback=False) - - # If Python ever adds support for higher-kinded types, `inverse` could use them, e.g. - # def inverse(self: BT[KT, VT]) -> BT[VT, KT]: - # Ref: https://github.com/python/typing/issues/548#issuecomment-621571821 - @property - def inverse(self) -> BidictBase[VT, KT]: - """The inverse of this bidirectional mapping instance.""" - # When `bi.inverse` is called for the first time, this method - # computes the inverse instance, stores it for subsequent use, and then - # returns it. It also stores a reference on `bi.inverse` back to `bi`, - # but uses a weakref to avoid creating a reference cycle. Strong references - # to inverse instances are stored in ._inv, and weak references are stored - # in ._invweak. - - # First check if a strong reference is already stored. - inv: BidictBase[VT, KT] | None = getattr(self, '_inv', None) - if inv is not None: - return inv - # Next check if a weak reference is already stored. - invweak = getattr(self, '_invweak', None) - if invweak is not None: - inv = invweak() # Try to resolve a strong reference and return it. - if inv is not None: - return inv - # No luck. Compute the inverse reference and store it for subsequent use. - inv = self._make_inverse() - self._inv: BidictBase[VT, KT] | None = inv - self._invweak: weakref.ReferenceType[BidictBase[VT, KT]] | None = None - # Also store a weak reference back to `instance` on its inverse instance, so that - # the second `.inverse` access in `bi.inverse.inverse` hits the cached weakref. - inv._inv = None - inv._invweak = weakref.ref(self) - # In e.g. `bidict().inverse.inverse`, this design ensures that a strong reference - # back to the original instance is retained before its refcount drops to zero, - # avoiding an unintended potential deallocation. - return inv - - def _make_inverse(self) -> BidictBase[VT, KT]: - inv: BidictBase[VT, KT] = self._inv_cls() - inv._fwdm = self._invm - inv._invm = self._fwdm - return inv - - @property - def inv(self) -> BidictBase[VT, KT]: - """Alias for :attr:`inverse`.""" - return self.inverse - - def __repr__(self) -> str: - """See :func:`repr`.""" - clsname = self.__class__.__name__ - items = dict(self.items()) if self else '' - return f'{clsname}({items})' - - def values(self) -> BidictKeysView[VT]: - """A set-like object providing a view on the contained values. - - Since the values of a bidict are equivalent to the keys of its inverse, - this method returns a set-like object for this bidict's values - rather than just a collections.abc.ValuesView. - This object supports set operations like union and difference, - and constant- rather than linear-time containment checks, - and is no more expensive to provide than the less capable - collections.abc.ValuesView would be. - - See :meth:`keys` for more information. - """ - return t.cast(BidictKeysView[VT], self.inverse.keys()) - - def keys(self) -> t.KeysView[KT]: - """A set-like object providing a view on the contained keys. - - When *b._fwdm* is a :class:`dict`, *b.keys()* returns a - *dict_keys* object that behaves exactly the same as - *collections.abc.KeysView(b)*, except for - - - offering better performance - - - being reversible on Python 3.8+ - - - having a .mapping attribute in Python 3.10+ - that exposes a mappingproxy to *b._fwdm*. - """ - fwdm, fwdm_cls = self._fwdm, self._fwdm_cls - return fwdm.keys() if fwdm_cls is dict else BidictKeysView(self) - - def items(self) -> t.ItemsView[KT, VT]: - """A set-like object providing a view on the contained items. - - When *b._fwdm* is a :class:`dict`, *b.items()* returns a - *dict_items* object that behaves exactly the same as - *collections.abc.ItemsView(b)*, except for: - - - offering better performance - - - being reversible on Python 3.8+ - - - having a .mapping attribute in Python 3.10+ - that exposes a mappingproxy to *b._fwdm*. - """ - return self._fwdm.items() if self._fwdm_cls is dict else super().items() - - # The inherited collections.abc.Mapping.__contains__() method is implemented by doing a `try` - # `except KeyError` around `self[key]`. The following implementation is much faster, - # especially in the missing case. - def __contains__(self, key: t.Any) -> bool: - """True if the mapping contains the specified key, else False.""" - return key in self._fwdm - - # The inherited collections.abc.Mapping.__eq__() method is implemented in terms of an inefficient - # `dict(self.items()) == dict(other.items())` comparison, so override it with a - # more efficient implementation. - def __eq__(self, other: object) -> bool: - """*x.__eq__(other) ⟺ x == other* - - Equivalent to *dict(x.items()) == dict(other.items())* - but more efficient. - - Note that :meth:`bidict's __eq__() ` implementation - is inherited by subclasses, - in particular by the ordered bidict subclasses, - so even with ordered bidicts, - :ref:`== comparison is order-insensitive ` - (https://bidict.rtfd.io/other-bidict-types.html#eq-is-order-insensitive). - - *See also* :meth:`equals_order_sensitive` - """ - if isinstance(other, t.Mapping): - return self._fwdm.items() == other.items() - # Ref: https://docs.python.org/3/library/constants.html#NotImplemented - return NotImplemented - - def equals_order_sensitive(self, other: object) -> bool: - """Order-sensitive equality check. - - *See also* :ref:`eq-order-insensitive` - (https://bidict.rtfd.io/other-bidict-types.html#eq-is-order-insensitive) - """ - if not isinstance(other, t.Mapping) or len(self) != len(other): - return False - return all(starmap(eq, zip(self.items(), other.items()))) - - def _dedup(self, key: KT, val: VT, on_dup: OnDup) -> DedupResult[KT, VT]: - """Check *key* and *val* for any duplication in self. - - Handle any duplication as per the passed in *on_dup*. - - If (key, val) is already present, return None - since writing (key, val) would be a no-op. - - If duplication is found and the corresponding :class:`~bidict.OnDupAction` is - :attr:`~bidict.DROP_NEW`, return None. - - If duplication is found and the corresponding :class:`~bidict.OnDupAction` is - :attr:`~bidict.RAISE`, raise the appropriate exception. - - If duplication is found and the corresponding :class:`~bidict.OnDupAction` is - :attr:`~bidict.DROP_OLD`, or if no duplication is found, - return *(oldkey, oldval)*. - """ - fwdm, invm = self._fwdm, self._invm - oldval: OVT[VT] = fwdm.get(key, MISSING) - oldkey: OKT[KT] = invm.get(val, MISSING) - isdupkey, isdupval = oldval is not MISSING, oldkey is not MISSING - if isdupkey and isdupval: - if key == oldkey: - assert val == oldval - # (key, val) duplicates an existing item -> no-op. - return None - # key and val each duplicate a different existing item. - if on_dup.val is RAISE: - raise KeyAndValueDuplicationError(key, val) - if on_dup.val is DROP_NEW: - return None - assert on_dup.val is DROP_OLD - # Fall through to the return statement on the last line. - elif isdupkey: - if on_dup.key is RAISE: - raise KeyDuplicationError(key) - if on_dup.key is DROP_NEW: - return None - assert on_dup.key is DROP_OLD - # Fall through to the return statement on the last line. - elif isdupval: - if on_dup.val is RAISE: - raise ValueDuplicationError(val) - if on_dup.val is DROP_NEW: - return None - assert on_dup.val is DROP_OLD - # Fall through to the return statement on the last line. - # else neither isdupkey nor isdupval. - return oldkey, oldval - - def _write(self, newkey: KT, newval: VT, oldkey: OKT[KT], oldval: OVT[VT], unwrites: Unwrites | None) -> None: - """Insert (newkey, newval), extending *unwrites* with associated inverse operations if provided. - - *oldkey* and *oldval* are as returned by :meth:`_dedup`. - - If *unwrites* is not None, it is extended with the inverse operations necessary to undo the write. - This design allows :meth:`_update` to roll back a partially applied update that fails part-way through - when necessary. - - This design also allows subclasses that require additional operations to easily extend this implementation. - For example, :class:`bidict.OrderedBidictBase` calls this inherited implementation, and then extends *unwrites* - with additional operations needed to keep its internal linked list nodes consistent with its items' order - as changes are made. - """ - fwdm, invm = self._fwdm, self._invm - fwdm_set, invm_set = fwdm.__setitem__, invm.__setitem__ - fwdm_del, invm_del = fwdm.__delitem__, invm.__delitem__ - # Always perform the following writes regardless of duplication. - fwdm_set(newkey, newval) - invm_set(newval, newkey) - if oldval is MISSING and oldkey is MISSING: # no key or value duplication - # {0: 1, 2: 3} | {4: 5} => {0: 1, 2: 3, 4: 5} - if unwrites is not None: - unwrites.extend(( - (fwdm_del, newkey), - (invm_del, newval), - )) - elif oldval is not MISSING and oldkey is not MISSING: # key and value duplication across two different items - # {0: 1, 2: 3} | {0: 3} => {0: 3} - fwdm_del(oldkey) - invm_del(oldval) - if unwrites is not None: - unwrites.extend(( - (fwdm_set, newkey, oldval), - (invm_set, oldval, newkey), - (fwdm_set, oldkey, newval), - (invm_set, newval, oldkey), - )) - elif oldval is not MISSING: # just key duplication - # {0: 1, 2: 3} | {2: 4} => {0: 1, 2: 4} - invm_del(oldval) - if unwrites is not None: - unwrites.extend(( - (fwdm_set, newkey, oldval), - (invm_set, oldval, newkey), - (invm_del, newval), - )) - else: - assert oldkey is not MISSING # just value duplication - # {0: 1, 2: 3} | {4: 3} => {0: 1, 4: 3} - fwdm_del(oldkey) - if unwrites is not None: - unwrites.extend(( - (fwdm_set, oldkey, newval), - (invm_set, newval, oldkey), - (fwdm_del, newkey), - )) - - def _update( - self, - arg: MapOrItems[KT, VT], - kw: t.Mapping[str, VT] = MappingProxyType({}), - *, - rollback: bool | None = None, - on_dup: OnDup | None = None, - ) -> None: - """Update with the items from *arg* and *kw*, maybe failing and rolling back as per *on_dup* and *rollback*.""" - # Note: We must process input in a single pass, since arg may be a generator. - if not isinstance(arg, (t.Iterable, Maplike)): - raise TypeError(f"'{arg.__class__.__name__}' object is not iterable") - if not arg and not kw: - return - if on_dup is None: - on_dup = self.on_dup - if rollback is None: - rollback = RAISE in on_dup - - # Fast path when we're empty and updating only from another bidict (i.e. no dup vals in new items). - if not self and not kw and isinstance(arg, BidictBase): - self._init_from(arg) - return - - # Fast path when we're adding more items than we contain already and rollback is enabled: - # Update a copy of self with rollback disabled. Fail if that fails, otherwise become the copy. - if rollback and isinstance(arg, t.Sized) and len(arg) + len(kw) > len(self): - tmp = self.copy() - tmp._update(arg, kw, rollback=False, on_dup=on_dup) - self._init_from(tmp) - return - - # In all other cases, benchmarking has indicated that the update is best implemented as follows: - # For each new item, perform a dup check (raising if necessary), and apply the associated writes we need to - # perform on our backing _fwdm and _invm mappings. If rollback is enabled, also compute the associated unwrites - # as we go. If the update results in a DuplicationError and rollback is enabled, apply the accumulated unwrites - # before raising, to ensure that we fail clean. - write = self._write - unwrites: Unwrites | None = [] if rollback else None - for key, val in iteritems(arg, **kw): - try: - dedup_result = self._dedup(key, val, on_dup) - except DuplicationError: - if unwrites is not None: - for fn, *args in reversed(unwrites): - fn(*args) - raise - if dedup_result is not None: - write(key, val, *dedup_result, unwrites=unwrites) - - def __copy__(self: BT) -> BT: - """Used for the copy protocol. See the :mod:`copy` module.""" - return self.copy() - - def copy(self: BT) -> BT: - """Make a (shallow) copy of this bidict.""" - # Could just `return self.__class__(self)` here, but the below is faster. The former - # would copy this bidict's items into a new instance one at a time (checking for duplication - # for each item), whereas the below copies from the backing mappings all at once, and foregoes - # item-by-item duplication checking since the backing mappings have been checked already. - return self._from_other(self.__class__, self) - - @staticmethod - def _from_other(bt: type[BT], other: MapOrItems[KT, VT], inv: bool = False) -> BT: - """Fast, private constructor based on :meth:`_init_from`. - - If *inv* is true, return the inverse of the instance instead of the instance itself. - (Useful for pickling with dynamically-generated inverse classes -- see :meth:`__reduce__`.) - """ - inst = bt() - inst._init_from(other) - return t.cast(BT, inst.inverse) if inv else inst - - def _init_from(self, other: MapOrItems[KT, VT]) -> None: - """Fast init from *other*, bypassing item-by-item duplication checking.""" - self._fwdm.clear() - self._invm.clear() - self._fwdm.update(other) - # If other is a bidict, use its existing backing inverse mapping, otherwise - # other could be a generator that's now exhausted, so invert self._fwdm on the fly. - inv = other.inverse if isinstance(other, BidictBase) else inverted(self._fwdm) - self._invm.update(inv) - - # other's type is Mapping rather than Maplike since bidict() | SupportsKeysAndGetItem({}) - # raises a TypeError, just like dict() | SupportsKeysAndGetItem({}) does. - def __or__(self: BT, other: t.Mapping[KT, VT]) -> BT: - """Return self|other.""" - if not isinstance(other, t.Mapping): - return NotImplemented - new = self.copy() - new._update(other, rollback=False) - return new - - def __ror__(self: BT, other: t.Mapping[KT, VT]) -> BT: - """Return other|self.""" - if not isinstance(other, t.Mapping): - return NotImplemented - new = self.__class__(other) - new._update(self, rollback=False) - return new - - def __len__(self) -> int: - """The number of contained items.""" - return len(self._fwdm) - - def __iter__(self) -> t.Iterator[KT]: - """Iterator over the contained keys.""" - return iter(self._fwdm) - - def __getitem__(self, key: KT) -> VT: - """*x.__getitem__(key) ⟺ x[key]*""" - return self._fwdm[key] - - def __reduce__(self) -> tuple[t.Any, ...]: - """Return state information for pickling.""" - cls = self.__class__ - inst: t.Mapping[t.Any, t.Any] = self - # If this bidict's class is dynamically generated, pickle the inverse instead, whose (presumably not - # dynamically generated) class the caller is more likely to have a reference to somewhere in sys.modules - # that pickle can discover. - if should_invert := isinstance(self, GeneratedBidictInverse): - cls = self._inv_cls - inst = self.inverse - return self._from_other, (cls, dict(inst), should_invert) - - -# See BidictBase._set_reversed() above. -def _fwdm_reversed(self: BidictBase[KT, t.Any]) -> t.Iterator[KT]: - """Iterator over the contained keys in reverse order.""" - assert isinstance(self._fwdm, t.Reversible) - return reversed(self._fwdm) - - -BidictBase._init_class() - - -class GeneratedBidictInverse: - """Base class for dynamically-generated inverse bidict classes.""" - - -# * Code review nav * -# ============================================================================ -# ← Prev: _abc.py Current: _base.py Next: _frozen.py → -# ============================================================================ diff --git a/env/lib/python3.10/site-packages/bidict/_bidict.py b/env/lib/python3.10/site-packages/bidict/_bidict.py deleted file mode 100644 index 94dd3db..0000000 --- a/env/lib/python3.10/site-packages/bidict/_bidict.py +++ /dev/null @@ -1,194 +0,0 @@ -# Copyright 2009-2024 Joshua Bronson. All rights reserved. -# -# This Source Code Form is subject to the terms of the Mozilla Public -# License, v. 2.0. If a copy of the MPL was not distributed with this -# file, You can obtain one at http://mozilla.org/MPL/2.0/. - - -# * Code review nav * -# (see comments in __init__.py) -# ============================================================================ -# ← Prev: _frozen.py Current: _bidict.py Next: _orderedbase.py → -# ============================================================================ - - -"""Provide :class:`MutableBidict` and :class:`bidict`.""" - -from __future__ import annotations - -import typing as t - -from ._abc import MutableBidirectionalMapping -from ._base import BidictBase -from ._dup import ON_DUP_DROP_OLD -from ._dup import ON_DUP_RAISE -from ._dup import OnDup -from ._typing import DT -from ._typing import KT -from ._typing import MISSING -from ._typing import ODT -from ._typing import VT -from ._typing import MapOrItems - - -class MutableBidict(BidictBase[KT, VT], MutableBidirectionalMapping[KT, VT]): - """Base class for mutable bidirectional mappings.""" - - if t.TYPE_CHECKING: - - @property - def inverse(self) -> MutableBidict[VT, KT]: ... - - @property - def inv(self) -> MutableBidict[VT, KT]: ... - - def _pop(self, key: KT) -> VT: - val = self._fwdm.pop(key) - del self._invm[val] - return val - - def __delitem__(self, key: KT) -> None: - """*x.__delitem__(y) ⟺ del x[y]*""" - self._pop(key) - - def __setitem__(self, key: KT, val: VT) -> None: - """Set the value for *key* to *val*. - - If *key* is already associated with *val*, this is a no-op. - - If *key* is already associated with a different value, - the old value will be replaced with *val*, - as with dict's :meth:`__setitem__`. - - If *val* is already associated with a different key, - an exception is raised - to protect against accidental removal of the key - that's currently associated with *val*. - - Use :meth:`put` instead if you want to specify different behavior in - the case that the provided key or value duplicates an existing one. - Or use :meth:`forceput` to unconditionally associate *key* with *val*, - replacing any existing items as necessary to preserve uniqueness. - - :raises bidict.ValueDuplicationError: if *val* duplicates that of an - existing item. - - :raises bidict.KeyAndValueDuplicationError: if *key* duplicates the key of an - existing item and *val* duplicates the value of a different - existing item. - """ - self.put(key, val, on_dup=self.on_dup) - - def put(self, key: KT, val: VT, on_dup: OnDup = ON_DUP_RAISE) -> None: - """Associate *key* with *val*, honoring the :class:`OnDup` given in *on_dup*. - - For example, if *on_dup* is :attr:`~bidict.ON_DUP_RAISE`, - then *key* will be associated with *val* if and only if - *key* is not already associated with an existing value and - *val* is not already associated with an existing key, - otherwise an exception will be raised. - - If *key* is already associated with *val*, this is a no-op. - - :raises bidict.KeyDuplicationError: if attempting to insert an item - whose key only duplicates an existing item's, and *on_dup.key* is - :attr:`~bidict.RAISE`. - - :raises bidict.ValueDuplicationError: if attempting to insert an item - whose value only duplicates an existing item's, and *on_dup.val* is - :attr:`~bidict.RAISE`. - - :raises bidict.KeyAndValueDuplicationError: if attempting to insert an - item whose key duplicates one existing item's, and whose value - duplicates another existing item's, and *on_dup.val* is - :attr:`~bidict.RAISE`. - """ - self._update(((key, val),), on_dup=on_dup) - - def forceput(self, key: KT, val: VT) -> None: - """Associate *key* with *val* unconditionally. - - Replace any existing mappings containing key *key* or value *val* - as necessary to preserve uniqueness. - """ - self.put(key, val, on_dup=ON_DUP_DROP_OLD) - - def clear(self) -> None: - """Remove all items.""" - self._fwdm.clear() - self._invm.clear() - - @t.overload - def pop(self, key: KT, /) -> VT: ... - @t.overload - def pop(self, key: KT, default: DT = ..., /) -> VT | DT: ... - - def pop(self, key: KT, default: ODT[DT] = MISSING, /) -> VT | DT: - """*x.pop(k[, d]) → v* - - Remove specified key and return the corresponding value. - - :raises KeyError: if *key* is not found and no *default* is provided. - """ - try: - return self._pop(key) - except KeyError: - if default is MISSING: - raise - return default - - def popitem(self) -> tuple[KT, VT]: - """*x.popitem() → (k, v)* - - Remove and return some item as a (key, value) pair. - - :raises KeyError: if *x* is empty. - """ - key, val = self._fwdm.popitem() - del self._invm[val] - return key, val - - def update(self, arg: MapOrItems[KT, VT] = (), /, **kw: VT) -> None: - """Like calling :meth:`putall` with *self.on_dup* passed for *on_dup*.""" - self._update(arg, kw=kw) - - def forceupdate(self, arg: MapOrItems[KT, VT] = (), /, **kw: VT) -> None: - """Like a bulk :meth:`forceput`.""" - self._update(arg, kw=kw, on_dup=ON_DUP_DROP_OLD) - - def putall(self, items: MapOrItems[KT, VT], on_dup: OnDup = ON_DUP_RAISE) -> None: - """Like a bulk :meth:`put`. - - If one of the given items causes an exception to be raised, - none of the items is inserted. - """ - self._update(items, on_dup=on_dup) - - # other's type is Mapping rather than Maplike since bidict() |= SupportsKeysAndGetItem({}) - # raises a TypeError, just like dict() |= SupportsKeysAndGetItem({}) does. - def __ior__(self, other: t.Mapping[KT, VT]) -> MutableBidict[KT, VT]: - """Return self|=other.""" - self.update(other) - return self - - -class bidict(MutableBidict[KT, VT]): - """The main bidirectional mapping type. - - See :ref:`intro:Introduction` and :ref:`basic-usage:Basic Usage` - to get started (also available at https://bidict.rtfd.io). - """ - - if t.TYPE_CHECKING: - - @property - def inverse(self) -> bidict[VT, KT]: ... - - @property - def inv(self) -> bidict[VT, KT]: ... - - -# * Code review nav * -# ============================================================================ -# ← Prev: _frozen.py Current: _bidict.py Next: _orderedbase.py → -# ============================================================================ diff --git a/env/lib/python3.10/site-packages/bidict/_dup.py b/env/lib/python3.10/site-packages/bidict/_dup.py deleted file mode 100644 index fd25b61..0000000 --- a/env/lib/python3.10/site-packages/bidict/_dup.py +++ /dev/null @@ -1,61 +0,0 @@ -# Copyright 2009-2024 Joshua Bronson. All rights reserved. -# -# This Source Code Form is subject to the terms of the Mozilla Public -# License, v. 2.0. If a copy of the MPL was not distributed with this -# file, You can obtain one at http://mozilla.org/MPL/2.0/. - - -"""Provide :class:`OnDup` and related functionality.""" - -from __future__ import annotations - -import typing as t -from enum import Enum - - -class OnDupAction(Enum): - """An action to take to prevent duplication from occurring.""" - - #: Raise a :class:`~bidict.DuplicationError`. - RAISE = 'RAISE' - #: Overwrite existing items with new items. - DROP_OLD = 'DROP_OLD' - #: Keep existing items and drop new items. - DROP_NEW = 'DROP_NEW' - - def __repr__(self) -> str: - return f'{self.__class__.__name__}.{self.name}' - - -RAISE: t.Final[OnDupAction] = OnDupAction.RAISE -DROP_OLD: t.Final[OnDupAction] = OnDupAction.DROP_OLD -DROP_NEW: t.Final[OnDupAction] = OnDupAction.DROP_NEW - - -class OnDup(t.NamedTuple): - r"""A combination of :class:`~bidict.OnDupAction`\s specifying how to handle various types of duplication. - - The :attr:`~OnDup.key` field specifies what action to take when a duplicate key is encountered. - - The :attr:`~OnDup.val` field specifies what action to take when a duplicate value is encountered. - - In the case of both key and value duplication across two different items, - only :attr:`~OnDup.val` is used. - - *See also* :ref:`basic-usage:Values Must Be Unique` - (https://bidict.rtfd.io/basic-usage.html#values-must-be-unique) - """ - - key: OnDupAction = DROP_OLD - val: OnDupAction = RAISE - - -#: Default :class:`OnDup` used for the -#: :meth:`~bidict.bidict.__init__`, -#: :meth:`~bidict.bidict.__setitem__`, and -#: :meth:`~bidict.bidict.update` methods. -ON_DUP_DEFAULT: t.Final[OnDup] = OnDup(key=DROP_OLD, val=RAISE) -#: An :class:`OnDup` whose members are all :obj:`RAISE`. -ON_DUP_RAISE: t.Final[OnDup] = OnDup(key=RAISE, val=RAISE) -#: An :class:`OnDup` whose members are all :obj:`DROP_OLD`. -ON_DUP_DROP_OLD: t.Final[OnDup] = OnDup(key=DROP_OLD, val=DROP_OLD) diff --git a/env/lib/python3.10/site-packages/bidict/_exc.py b/env/lib/python3.10/site-packages/bidict/_exc.py deleted file mode 100644 index e2a96f3..0000000 --- a/env/lib/python3.10/site-packages/bidict/_exc.py +++ /dev/null @@ -1,36 +0,0 @@ -# Copyright 2009-2024 Joshua Bronson. All rights reserved. -# -# This Source Code Form is subject to the terms of the Mozilla Public -# License, v. 2.0. If a copy of the MPL was not distributed with this -# file, You can obtain one at http://mozilla.org/MPL/2.0/. - - -"""Provide all bidict exceptions.""" - -from __future__ import annotations - - -class BidictException(Exception): - """Base class for bidict exceptions.""" - - -class DuplicationError(BidictException): - """Base class for exceptions raised when uniqueness is violated - as per the :attr:`~bidict.RAISE` :class:`~bidict.OnDupAction`. - """ - - -class KeyDuplicationError(DuplicationError): - """Raised when a given key is not unique.""" - - -class ValueDuplicationError(DuplicationError): - """Raised when a given value is not unique.""" - - -class KeyAndValueDuplicationError(KeyDuplicationError, ValueDuplicationError): - """Raised when a given item's key and value are not unique. - - That is, its key duplicates that of another item, - and its value duplicates that of a different other item. - """ diff --git a/env/lib/python3.10/site-packages/bidict/_frozen.py b/env/lib/python3.10/site-packages/bidict/_frozen.py deleted file mode 100644 index e2f789d..0000000 --- a/env/lib/python3.10/site-packages/bidict/_frozen.py +++ /dev/null @@ -1,50 +0,0 @@ -# Copyright 2009-2024 Joshua Bronson. All rights reserved. -# -# This Source Code Form is subject to the terms of the Mozilla Public -# License, v. 2.0. If a copy of the MPL was not distributed with this -# file, You can obtain one at http://mozilla.org/MPL/2.0/. - - -# * Code review nav * -# (see comments in __init__.py) -# ============================================================================ -# ← Prev: _base.py Current: _frozen.py Next: _bidict.py → -# ============================================================================ - -"""Provide :class:`frozenbidict`, an immutable, hashable bidirectional mapping type.""" - -from __future__ import annotations - -import typing as t - -from ._base import BidictBase -from ._typing import KT -from ._typing import VT - - -class frozenbidict(BidictBase[KT, VT]): - """Immutable, hashable bidict type.""" - - _hash: int - - if t.TYPE_CHECKING: - - @property - def inverse(self) -> frozenbidict[VT, KT]: ... - - @property - def inv(self) -> frozenbidict[VT, KT]: ... - - def __hash__(self) -> int: - """The hash of this bidict as determined by its items.""" - if getattr(self, '_hash', None) is None: - # The following is like hash(frozenset(self.items())) - # but more memory efficient. See also: https://bugs.python.org/issue46684 - self._hash = t.ItemsView(self)._hash() - return self._hash - - -# * Code review nav * -# ============================================================================ -# ← Prev: _base.py Current: _frozen.py Next: _bidict.py → -# ============================================================================ diff --git a/env/lib/python3.10/site-packages/bidict/_iter.py b/env/lib/python3.10/site-packages/bidict/_iter.py deleted file mode 100644 index 53ad25d..0000000 --- a/env/lib/python3.10/site-packages/bidict/_iter.py +++ /dev/null @@ -1,51 +0,0 @@ -# Copyright 2009-2024 Joshua Bronson. All rights reserved. -# -# This Source Code Form is subject to the terms of the Mozilla Public -# License, v. 2.0. If a copy of the MPL was not distributed with this -# file, You can obtain one at http://mozilla.org/MPL/2.0/. - - -"""Functions for iterating over items in a mapping.""" - -from __future__ import annotations - -import typing as t -from operator import itemgetter - -from ._typing import KT -from ._typing import VT -from ._typing import ItemsIter -from ._typing import Maplike -from ._typing import MapOrItems - - -def iteritems(arg: MapOrItems[KT, VT] = (), /, **kw: VT) -> ItemsIter[KT, VT]: - """Yield the items from *arg* and *kw* in the order given.""" - if isinstance(arg, t.Mapping): - yield from arg.items() - elif isinstance(arg, Maplike): - yield from ((k, arg[k]) for k in arg.keys()) - else: - yield from arg - yield from t.cast(ItemsIter[KT, VT], kw.items()) - - -swap: t.Final = itemgetter(1, 0) - - -def inverted(arg: MapOrItems[KT, VT]) -> ItemsIter[VT, KT]: - """Yield the inverse items of the provided object. - - If *arg* has a :func:`callable` ``__inverted__`` attribute, - return the result of calling it. - - Otherwise, return an iterator over the items in `arg`, - inverting each item on the fly. - - *See also* :attr:`bidict.BidirectionalMapping.__inverted__` - """ - invattr = getattr(arg, '__inverted__', None) - if callable(invattr): - inv: ItemsIter[VT, KT] = invattr() - return inv - return map(swap, iteritems(arg)) diff --git a/env/lib/python3.10/site-packages/bidict/_orderedbase.py b/env/lib/python3.10/site-packages/bidict/_orderedbase.py deleted file mode 100644 index 92f2633..0000000 --- a/env/lib/python3.10/site-packages/bidict/_orderedbase.py +++ /dev/null @@ -1,238 +0,0 @@ -# Copyright 2009-2024 Joshua Bronson. All rights reserved. -# -# This Source Code Form is subject to the terms of the Mozilla Public -# License, v. 2.0. If a copy of the MPL was not distributed with this -# file, You can obtain one at http://mozilla.org/MPL/2.0/. - - -# * Code review nav * -# (see comments in __init__.py) -# ============================================================================ -# ← Prev: _bidict.py Current: _orderedbase.py Next: _orderedbidict.py → -# ============================================================================ - - -"""Provide :class:`OrderedBidictBase`.""" - -from __future__ import annotations - -import typing as t -from weakref import ref as weakref - -from ._base import BidictBase -from ._base import Unwrites -from ._bidict import bidict -from ._iter import iteritems -from ._typing import KT -from ._typing import MISSING -from ._typing import OKT -from ._typing import OVT -from ._typing import VT -from ._typing import MapOrItems - - -AT = t.TypeVar('AT') # attr type - - -class WeakAttr(t.Generic[AT]): - """Descriptor to automatically manage (de)referencing the given slot as a weakref. - - See https://docs.python.org/3/howto/descriptor.html#managed-attributes - for an intro to using descriptors like this for managed attributes. - """ - - def __init__(self, *, slot: str) -> None: - self.slot = slot - - def __set__(self, instance: t.Any, value: AT) -> None: - setattr(instance, self.slot, weakref(value)) - - def __get__(self, instance: t.Any, __owner: t.Any = None) -> AT: - return t.cast(AT, getattr(instance, self.slot)()) - - -class Node: - """A node in a circular doubly-linked list - used to encode the order of items in an ordered bidict. - - A weak reference to the previous node is stored - to avoid creating strong reference cycles. - Referencing/dereferencing the weakref is handled automatically by :class:`WeakAttr`. - """ - - prv: WeakAttr[Node] = WeakAttr(slot='_prv_weak') - __slots__ = ('__weakref__', '_prv_weak', 'nxt') - - nxt: Node | WeakAttr[Node] # Allow subclasses to use a WeakAttr for nxt too (see SentinelNode) - - def __init__(self, prv: Node, nxt: Node) -> None: - self.prv = prv - self.nxt = nxt - - def unlink(self) -> None: - """Remove self from in between prv and nxt. - Self's references to prv and nxt are retained so it can be relinked (see below). - """ - self.prv.nxt = self.nxt - self.nxt.prv = self.prv - - def relink(self) -> None: - """Restore self between prv and nxt after unlinking (see above).""" - self.prv.nxt = self.nxt.prv = self - - -class SentinelNode(Node): - """Special node in a circular doubly-linked list - that links the first node with the last node. - When its next and previous references point back to itself - it represents an empty list. - """ - - nxt: WeakAttr[Node] = WeakAttr(slot='_nxt_weak') - __slots__ = ('_nxt_weak',) - - def __init__(self) -> None: - super().__init__(self, self) - - def iternodes(self, *, reverse: bool = False) -> t.Iterator[Node]: - """Iterator yielding nodes in the requested order.""" - attr = 'prv' if reverse else 'nxt' - node = getattr(self, attr) - while node is not self: - yield node - node = getattr(node, attr) - - def new_last_node(self) -> Node: - """Create and return a new terminal node.""" - old_last = self.prv - new_last = Node(old_last, self) - old_last.nxt = self.prv = new_last - return new_last - - -class OrderedBidictBase(BidictBase[KT, VT]): - """Base class implementing an ordered :class:`BidirectionalMapping`.""" - - _node_by_korv: bidict[t.Any, Node] - _bykey: bool - - def __init__(self, arg: MapOrItems[KT, VT] = (), /, **kw: VT) -> None: - """Make a new ordered bidirectional mapping. - The signature behaves like that of :class:`dict`. - Items passed in are added in the order they are passed, - respecting the :attr:`~bidict.BidictBase.on_dup` - class attribute in the process. - - The order in which items are inserted is remembered, - similar to :class:`collections.OrderedDict`. - """ - self._sntl = SentinelNode() - self._node_by_korv = bidict() - self._bykey = True - super().__init__(arg, **kw) - - if t.TYPE_CHECKING: - - @property - def inverse(self) -> OrderedBidictBase[VT, KT]: ... - - @property - def inv(self) -> OrderedBidictBase[VT, KT]: ... - - def _make_inverse(self) -> OrderedBidictBase[VT, KT]: - inv = t.cast(OrderedBidictBase[VT, KT], super()._make_inverse()) - inv._sntl = self._sntl - inv._node_by_korv = self._node_by_korv - inv._bykey = not self._bykey - return inv - - def _assoc_node(self, node: Node, key: KT, val: VT) -> None: - korv = key if self._bykey else val - self._node_by_korv.forceput(korv, node) - - def _dissoc_node(self, node: Node) -> None: - del self._node_by_korv.inverse[node] - node.unlink() - - def _init_from(self, other: MapOrItems[KT, VT]) -> None: - """See :meth:`BidictBase._init_from`.""" - super()._init_from(other) - bykey = self._bykey - korv_by_node = self._node_by_korv.inverse - korv_by_node.clear() - korv_by_node_set = korv_by_node.__setitem__ - self._sntl.nxt = self._sntl.prv = self._sntl - new_node = self._sntl.new_last_node - for k, v in iteritems(other): - korv_by_node_set(new_node(), k if bykey else v) - - def _write(self, newkey: KT, newval: VT, oldkey: OKT[KT], oldval: OVT[VT], unwrites: Unwrites | None) -> None: - """See :meth:`bidict.BidictBase._spec_write`.""" - super()._write(newkey, newval, oldkey, oldval, unwrites) - assoc, dissoc = self._assoc_node, self._dissoc_node - node_by_korv, bykey = self._node_by_korv, self._bykey - if oldval is MISSING and oldkey is MISSING: # no key or value duplication - # {0: 1, 2: 3} | {4: 5} => {0: 1, 2: 3, 4: 5} - newnode = self._sntl.new_last_node() - assoc(newnode, newkey, newval) - if unwrites is not None: - unwrites.append((dissoc, newnode)) - elif oldval is not MISSING and oldkey is not MISSING: # key and value duplication across two different items - # {0: 1, 2: 3} | {0: 3} => {0: 3} - # n1, n2 => n1 (collapse n1 and n2 into n1) - # oldkey: 2, oldval: 1, oldnode: n2, newkey: 0, newval: 3, newnode: n1 - if bykey: - oldnode = node_by_korv[oldkey] - newnode = node_by_korv[newkey] - else: - oldnode = node_by_korv[newval] - newnode = node_by_korv[oldval] - dissoc(oldnode) - assoc(newnode, newkey, newval) - if unwrites is not None: - unwrites.extend(( - (assoc, newnode, newkey, oldval), - (assoc, oldnode, oldkey, newval), - (oldnode.relink,), - )) - elif oldval is not MISSING: # just key duplication - # {0: 1, 2: 3} | {2: 4} => {0: 1, 2: 4} - # oldkey: MISSING, oldval: 3, newkey: 2, newval: 4 - node = node_by_korv[newkey if bykey else oldval] - assoc(node, newkey, newval) - if unwrites is not None: - unwrites.append((assoc, node, newkey, oldval)) - else: - assert oldkey is not MISSING # just value duplication - # {0: 1, 2: 3} | {4: 3} => {0: 1, 4: 3} - # oldkey: 2, oldval: MISSING, newkey: 4, newval: 3 - node = node_by_korv[oldkey if bykey else newval] - assoc(node, newkey, newval) - if unwrites is not None: - unwrites.append((assoc, node, oldkey, newval)) - - def __iter__(self) -> t.Iterator[KT]: - """Iterator over the contained keys in insertion order.""" - return self._iter(reverse=False) - - def __reversed__(self) -> t.Iterator[KT]: - """Iterator over the contained keys in reverse insertion order.""" - return self._iter(reverse=True) - - def _iter(self, *, reverse: bool = False) -> t.Iterator[KT]: - nodes = self._sntl.iternodes(reverse=reverse) - korv_by_node = self._node_by_korv.inverse - if self._bykey: - for node in nodes: - yield korv_by_node[node] - else: - key_by_val = self._invm - for node in nodes: - val = korv_by_node[node] - yield key_by_val[val] - - -# * Code review nav * -# ============================================================================ -# ← Prev: _bidict.py Current: _orderedbase.py Next: _orderedbidict.py → -# ============================================================================ diff --git a/env/lib/python3.10/site-packages/bidict/_orderedbidict.py b/env/lib/python3.10/site-packages/bidict/_orderedbidict.py deleted file mode 100644 index 2fb1757..0000000 --- a/env/lib/python3.10/site-packages/bidict/_orderedbidict.py +++ /dev/null @@ -1,172 +0,0 @@ -# Copyright 2009-2024 Joshua Bronson. All rights reserved. -# -# This Source Code Form is subject to the terms of the Mozilla Public -# License, v. 2.0. If a copy of the MPL was not distributed with this -# file, You can obtain one at http://mozilla.org/MPL/2.0/. - - -# * Code review nav * -# (see comments in __init__.py) -# ============================================================================ -# ← Prev: _orderedbase.py Current: _orderedbidict.py -# ============================================================================ - - -"""Provide :class:`OrderedBidict`.""" - -from __future__ import annotations - -import typing as t -from collections.abc import Set - -from ._base import BidictKeysView -from ._bidict import MutableBidict -from ._orderedbase import OrderedBidictBase -from ._typing import KT -from ._typing import VT - - -class OrderedBidict(OrderedBidictBase[KT, VT], MutableBidict[KT, VT]): - """Mutable bidict type that maintains items in insertion order.""" - - if t.TYPE_CHECKING: - - @property - def inverse(self) -> OrderedBidict[VT, KT]: ... - - @property - def inv(self) -> OrderedBidict[VT, KT]: ... - - def clear(self) -> None: - """Remove all items.""" - super().clear() - self._node_by_korv.clear() - self._sntl.nxt = self._sntl.prv = self._sntl - - def _pop(self, key: KT) -> VT: - val = super()._pop(key) - node = self._node_by_korv[key if self._bykey else val] - self._dissoc_node(node) - return val - - def popitem(self, last: bool = True) -> tuple[KT, VT]: - """*b.popitem() → (k, v)* - - If *last* is true, - remove and return the most recently added item as a (key, value) pair. - Otherwise, remove and return the least recently added item. - - :raises KeyError: if *b* is empty. - """ - if not self: - raise KeyError('OrderedBidict is empty') - node = getattr(self._sntl, 'prv' if last else 'nxt') - korv = self._node_by_korv.inverse[node] - if self._bykey: - return korv, self._pop(korv) - return self.inverse._pop(korv), korv - - def move_to_end(self, key: KT, last: bool = True) -> None: - """Move the item with the given key to the end if *last* is true, else to the beginning. - - :raises KeyError: if *key* is missing - """ - korv = key if self._bykey else self._fwdm[key] - node = self._node_by_korv[korv] - node.prv.nxt = node.nxt - node.nxt.prv = node.prv - sntl = self._sntl - if last: - lastnode = sntl.prv - node.prv = lastnode - node.nxt = sntl - sntl.prv = lastnode.nxt = node - else: - firstnode = sntl.nxt - node.prv = sntl - node.nxt = firstnode - sntl.nxt = firstnode.prv = node - - # Override the keys() and items() implementations inherited from BidictBase, - # which may delegate to the backing _fwdm dict, since this is a mutable ordered bidict, - # and therefore the ordering of items can get out of sync with the backing mappings - # after mutation. (Need not override values() because it delegates to .inverse.keys().) - def keys(self) -> t.KeysView[KT]: - """A set-like object providing a view on the contained keys.""" - return _OrderedBidictKeysView(self) - - def items(self) -> t.ItemsView[KT, VT]: - """A set-like object providing a view on the contained items.""" - return _OrderedBidictItemsView(self) - - -# The following MappingView implementations use the __iter__ implementations -# inherited from their superclass counterparts in collections.abc, so they -# continue to yield items in the correct order even after an OrderedBidict -# is mutated. They also provide a __reversed__ implementation, which is not -# provided by the collections.abc superclasses. -class _OrderedBidictKeysView(BidictKeysView[KT]): - _mapping: OrderedBidict[KT, t.Any] - - def __reversed__(self) -> t.Iterator[KT]: - return reversed(self._mapping) - - -class _OrderedBidictItemsView(t.ItemsView[KT, VT]): - _mapping: OrderedBidict[KT, VT] - - def __reversed__(self) -> t.Iterator[tuple[KT, VT]]: - ob = self._mapping - for key in reversed(ob): - yield key, ob[key] - - -# For better performance, make _OrderedBidictKeysView and _OrderedBidictItemsView delegate -# to backing dicts for the methods they inherit from collections.abc.Set. (Cannot delegate -# for __iter__ and __reversed__ since they are order-sensitive.) See also: https://bugs.python.org/issue46713 -_OView = t.Union[t.Type[_OrderedBidictKeysView[KT]], t.Type[_OrderedBidictItemsView[KT, t.Any]]] -_setmethodnames: t.Iterable[str] = ( - '__lt__ __le__ __gt__ __ge__ __eq__ __ne__ __sub__ __rsub__ ' - '__or__ __ror__ __xor__ __rxor__ __and__ __rand__ isdisjoint' -).split() - - -def _override_set_methods_to_use_backing_dict(cls: _OView[KT], viewname: str) -> None: - def make_proxy_method(methodname: str) -> t.Any: - def method(self: _OrderedBidictKeysView[KT] | _OrderedBidictItemsView[KT, t.Any], *args: t.Any) -> t.Any: - fwdm = self._mapping._fwdm - if not isinstance(fwdm, dict): # dict view speedup not available, fall back to Set's implementation. - return getattr(Set, methodname)(self, *args) - fwdm_dict_view = getattr(fwdm, viewname)() - fwdm_dict_view_method = getattr(fwdm_dict_view, methodname) - if ( - len(args) != 1 - or not isinstance((arg := args[0]), self.__class__) - or not isinstance(arg._mapping._fwdm, dict) - ): - return fwdm_dict_view_method(*args) - # self and arg are both _OrderedBidictKeysViews or _OrderedBidictItemsViews whose bidicts are backed by - # a dict. Use arg's backing dict's corresponding view instead of arg. Otherwise, e.g. `ob1.keys() - # < ob2.keys()` would give "TypeError: '<' not supported between instances of '_OrderedBidictKeysView' and - # '_OrderedBidictKeysView'", because both `dict_keys(ob1).__lt__(ob2.keys()) is NotImplemented` and - # `dict_keys(ob2).__gt__(ob1.keys()) is NotImplemented`. - arg_dict = arg._mapping._fwdm - arg_dict_view = getattr(arg_dict, viewname)() - return fwdm_dict_view_method(arg_dict_view) - - method.__name__ = methodname - method.__qualname__ = f'{cls.__qualname__}.{methodname}' - return method - - for name in _setmethodnames: - setattr(cls, name, make_proxy_method(name)) - - -_override_set_methods_to_use_backing_dict(_OrderedBidictKeysView, 'keys') -_override_set_methods_to_use_backing_dict(_OrderedBidictItemsView, 'items') - - -# * Code review nav * -# ============================================================================ -# ← Prev: _orderedbase.py Current: _orderedbidict.py -# ============================================================================ diff --git a/env/lib/python3.10/site-packages/bidict/_typing.py b/env/lib/python3.10/site-packages/bidict/_typing.py deleted file mode 100644 index ce95053..0000000 --- a/env/lib/python3.10/site-packages/bidict/_typing.py +++ /dev/null @@ -1,49 +0,0 @@ -# Copyright 2009-2024 Joshua Bronson. All rights reserved. -# -# This Source Code Form is subject to the terms of the Mozilla Public -# License, v. 2.0. If a copy of the MPL was not distributed with this -# file, You can obtain one at http://mozilla.org/MPL/2.0/. - - -"""Provide typing-related objects.""" - -from __future__ import annotations - -import typing as t -from enum import Enum - - -KT = t.TypeVar('KT') -VT = t.TypeVar('VT') -VT_co = t.TypeVar('VT_co', covariant=True) - - -Items = t.Iterable[t.Tuple[KT, VT]] - - -@t.runtime_checkable -class Maplike(t.Protocol[KT, VT_co]): - """Like typeshed's SupportsKeysAndGetItem, but usable at runtime.""" - - def keys(self) -> t.Iterable[KT]: ... - - def __getitem__(self, __key: KT) -> VT_co: ... - - -MapOrItems = t.Union[Maplike[KT, VT], Items[KT, VT]] -MappOrItems = t.Union[t.Mapping[KT, VT], Items[KT, VT]] -ItemsIter = t.Iterator[t.Tuple[KT, VT]] - - -class MissingT(Enum): - """Sentinel used to represent none/missing when None itself can't be used.""" - - MISSING = 'MISSING' - - -MISSING: t.Final[t.Literal[MissingT.MISSING]] = MissingT.MISSING -OKT = t.Union[KT, MissingT] #: optional key type -OVT = t.Union[VT, MissingT] #: optional value type - -DT = t.TypeVar('DT') #: for default arguments -ODT = t.Union[DT, MissingT] #: optional default arg type diff --git a/env/lib/python3.10/site-packages/bidict/metadata.py b/env/lib/python3.10/site-packages/bidict/metadata.py deleted file mode 100644 index 30ad836..0000000 --- a/env/lib/python3.10/site-packages/bidict/metadata.py +++ /dev/null @@ -1,14 +0,0 @@ -# Copyright 2009-2024 Joshua Bronson. All rights reserved. -# -# This Source Code Form is subject to the terms of the Mozilla Public -# License, v. 2.0. If a copy of the MPL was not distributed with this -# file, You can obtain one at http://mozilla.org/MPL/2.0/. - -"""Define bidict package metadata.""" - -__version__ = '0.23.1' -__author__ = {'name': 'Joshua Bronson', 'email': 'jabronson@gmail.com'} -__copyright__ = '© 2009-2024 Joshua Bronson' -__description__ = 'The bidirectional mapping library for Python.' -__license__ = 'MPL 2.0' -__url__ = 'https://bidict.readthedocs.io' diff --git a/env/lib/python3.10/site-packages/bidict/py.typed b/env/lib/python3.10/site-packages/bidict/py.typed deleted file mode 100644 index 342ea76..0000000 --- a/env/lib/python3.10/site-packages/bidict/py.typed +++ /dev/null @@ -1 +0,0 @@ -PEP-561 marker. diff --git a/env/lib/python3.10/site-packages/certifi-2024.8.30.dist-info/INSTALLER b/env/lib/python3.10/site-packages/certifi-2024.8.30.dist-info/INSTALLER deleted file mode 100644 index a1b589e..0000000 --- a/env/lib/python3.10/site-packages/certifi-2024.8.30.dist-info/INSTALLER +++ /dev/null @@ -1 +0,0 @@ -pip diff --git a/env/lib/python3.10/site-packages/certifi-2024.8.30.dist-info/LICENSE b/env/lib/python3.10/site-packages/certifi-2024.8.30.dist-info/LICENSE deleted file mode 100644 index 62b076c..0000000 --- a/env/lib/python3.10/site-packages/certifi-2024.8.30.dist-info/LICENSE +++ /dev/null @@ -1,20 +0,0 @@ -This package contains a modified version of ca-bundle.crt: - -ca-bundle.crt -- Bundle of CA Root Certificates - -This is a bundle of X.509 certificates of public Certificate Authorities -(CA). These were automatically extracted from Mozilla's root certificates -file (certdata.txt). This file can be found in the mozilla source tree: -https://hg.mozilla.org/mozilla-central/file/tip/security/nss/lib/ckfw/builtins/certdata.txt -It contains the certificates in PEM format and therefore -can be directly used with curl / libcurl / php_curl, or with -an Apache+mod_ssl webserver for SSL client authentication. -Just configure this file as the SSLCACertificateFile.# - -***** BEGIN LICENSE BLOCK ***** -This Source Code Form is subject to the terms of the Mozilla Public License, -v. 2.0. If a copy of the MPL was not distributed with this file, You can obtain -one at http://mozilla.org/MPL/2.0/. - -***** END LICENSE BLOCK ***** -@(#) $RCSfile: certdata.txt,v $ $Revision: 1.80 $ $Date: 2011/11/03 15:11:58 $ diff --git a/env/lib/python3.10/site-packages/certifi-2024.8.30.dist-info/METADATA b/env/lib/python3.10/site-packages/certifi-2024.8.30.dist-info/METADATA deleted file mode 100644 index 0a3a772..0000000 --- a/env/lib/python3.10/site-packages/certifi-2024.8.30.dist-info/METADATA +++ /dev/null @@ -1,67 +0,0 @@ -Metadata-Version: 2.1 -Name: certifi -Version: 2024.8.30 -Summary: Python package for providing Mozilla's CA Bundle. -Home-page: https://github.com/certifi/python-certifi -Author: Kenneth Reitz -Author-email: me@kennethreitz.com -License: MPL-2.0 -Project-URL: Source, https://github.com/certifi/python-certifi -Classifier: Development Status :: 5 - Production/Stable -Classifier: Intended Audience :: Developers -Classifier: License :: OSI Approved :: Mozilla Public License 2.0 (MPL 2.0) -Classifier: Natural Language :: English -Classifier: Programming Language :: Python -Classifier: Programming Language :: Python :: 3 -Classifier: Programming Language :: Python :: 3 :: Only -Classifier: Programming Language :: Python :: 3.6 -Classifier: Programming Language :: Python :: 3.7 -Classifier: Programming Language :: Python :: 3.8 -Classifier: Programming Language :: Python :: 3.9 -Classifier: Programming Language :: Python :: 3.10 -Classifier: Programming Language :: Python :: 3.11 -Classifier: Programming Language :: Python :: 3.12 -Requires-Python: >=3.6 -License-File: LICENSE - -Certifi: Python SSL Certificates -================================ - -Certifi provides Mozilla's carefully curated collection of Root Certificates for -validating the trustworthiness of SSL certificates while verifying the identity -of TLS hosts. It has been extracted from the `Requests`_ project. - -Installation ------------- - -``certifi`` is available on PyPI. Simply install it with ``pip``:: - - $ pip install certifi - -Usage ------ - -To reference the installed certificate authority (CA) bundle, you can use the -built-in function:: - - >>> import certifi - - >>> certifi.where() - '/usr/local/lib/python3.7/site-packages/certifi/cacert.pem' - -Or from the command line:: - - $ python -m certifi - /usr/local/lib/python3.7/site-packages/certifi/cacert.pem - -Enjoy! - -.. _`Requests`: https://requests.readthedocs.io/en/master/ - -Addition/Removal of Certificates --------------------------------- - -Certifi does not support any addition/removal or other modification of the -CA trust store content. This project is intended to provide a reliable and -highly portable root of trust to python deployments. Look to upstream projects -for methods to use alternate trust. diff --git a/env/lib/python3.10/site-packages/certifi-2024.8.30.dist-info/RECORD b/env/lib/python3.10/site-packages/certifi-2024.8.30.dist-info/RECORD deleted file mode 100644 index 5b9da16..0000000 --- a/env/lib/python3.10/site-packages/certifi-2024.8.30.dist-info/RECORD +++ /dev/null @@ -1,14 +0,0 @@ -certifi-2024.8.30.dist-info/INSTALLER,sha256=zuuue4knoyJ-UwPPXg8fezS7VCrXJQrAP7zeNuwvFQg,4 -certifi-2024.8.30.dist-info/LICENSE,sha256=6TcW2mucDVpKHfYP5pWzcPBpVgPSH2-D8FPkLPwQyvc,989 -certifi-2024.8.30.dist-info/METADATA,sha256=GhBHRVUN6a4ZdUgE_N5wmukJfyuoE-QyIl8Y3ifNQBM,2222 -certifi-2024.8.30.dist-info/RECORD,, -certifi-2024.8.30.dist-info/WHEEL,sha256=UvcQYKBHoFqaQd6LKyqHw9fxEolWLQnlzP0h_LgJAfI,91 -certifi-2024.8.30.dist-info/top_level.txt,sha256=KMu4vUCfsjLrkPbSNdgdekS-pVJzBAJFO__nI8NF6-U,8 -certifi/__init__.py,sha256=p_GYZrjUwPBUhpLlCZoGb0miKBKSqDAyZC5DvIuqbHQ,94 -certifi/__main__.py,sha256=xBBoj905TUWBLRGANOcf7oi6e-3dMP4cEoG9OyMs11g,243 -certifi/__pycache__/__init__.cpython-310.pyc,, -certifi/__pycache__/__main__.cpython-310.pyc,, -certifi/__pycache__/core.cpython-310.pyc,, -certifi/cacert.pem,sha256=lO3rZukXdPyuk6BWUJFOKQliWaXH6HGh9l1GGrUgG0c,299427 -certifi/core.py,sha256=qRDDFyXVJwTB_EmoGppaXU_R9qCZvhl-EzxPMuV3nTA,4426 -certifi/py.typed,sha256=47DEQpj8HBSa-_TImW-5JCeuQeRkm5NMpJWZG3hSuFU,0 diff --git a/env/lib/python3.10/site-packages/certifi-2024.8.30.dist-info/WHEEL b/env/lib/python3.10/site-packages/certifi-2024.8.30.dist-info/WHEEL deleted file mode 100644 index 57e56b7..0000000 --- a/env/lib/python3.10/site-packages/certifi-2024.8.30.dist-info/WHEEL +++ /dev/null @@ -1,5 +0,0 @@ -Wheel-Version: 1.0 -Generator: setuptools (74.0.0) -Root-Is-Purelib: true -Tag: py3-none-any - diff --git a/env/lib/python3.10/site-packages/certifi-2024.8.30.dist-info/top_level.txt b/env/lib/python3.10/site-packages/certifi-2024.8.30.dist-info/top_level.txt deleted file mode 100644 index 963eac5..0000000 --- a/env/lib/python3.10/site-packages/certifi-2024.8.30.dist-info/top_level.txt +++ /dev/null @@ -1 +0,0 @@ -certifi diff --git a/env/lib/python3.10/site-packages/certifi/__init__.py b/env/lib/python3.10/site-packages/certifi/__init__.py deleted file mode 100644 index f61d77f..0000000 --- a/env/lib/python3.10/site-packages/certifi/__init__.py +++ /dev/null @@ -1,4 +0,0 @@ -from .core import contents, where - -__all__ = ["contents", "where"] -__version__ = "2024.08.30" diff --git a/env/lib/python3.10/site-packages/certifi/__main__.py b/env/lib/python3.10/site-packages/certifi/__main__.py deleted file mode 100644 index 8945b5d..0000000 --- a/env/lib/python3.10/site-packages/certifi/__main__.py +++ /dev/null @@ -1,12 +0,0 @@ -import argparse - -from certifi import contents, where - -parser = argparse.ArgumentParser() -parser.add_argument("-c", "--contents", action="store_true") -args = parser.parse_args() - -if args.contents: - print(contents()) -else: - print(where()) diff --git a/env/lib/python3.10/site-packages/certifi/__pycache__/__init__.cpython-310.pyc b/env/lib/python3.10/site-packages/certifi/__pycache__/__init__.cpython-310.pyc deleted file mode 100644 index 00a5402..0000000 Binary files a/env/lib/python3.10/site-packages/certifi/__pycache__/__init__.cpython-310.pyc and /dev/null differ diff --git a/env/lib/python3.10/site-packages/certifi/__pycache__/__main__.cpython-310.pyc b/env/lib/python3.10/site-packages/certifi/__pycache__/__main__.cpython-310.pyc deleted file mode 100644 index 48263bb..0000000 Binary files a/env/lib/python3.10/site-packages/certifi/__pycache__/__main__.cpython-310.pyc and /dev/null differ diff --git a/env/lib/python3.10/site-packages/certifi/__pycache__/core.cpython-310.pyc b/env/lib/python3.10/site-packages/certifi/__pycache__/core.cpython-310.pyc deleted file mode 100644 index 2282a5d..0000000 Binary files a/env/lib/python3.10/site-packages/certifi/__pycache__/core.cpython-310.pyc and /dev/null differ diff --git a/env/lib/python3.10/site-packages/certifi/cacert.pem b/env/lib/python3.10/site-packages/certifi/cacert.pem deleted file mode 100644 index 3c165a1..0000000 --- a/env/lib/python3.10/site-packages/certifi/cacert.pem +++ /dev/null @@ -1,4929 +0,0 @@ - -# Issuer: CN=GlobalSign Root CA O=GlobalSign nv-sa OU=Root CA -# Subject: CN=GlobalSign Root CA O=GlobalSign nv-sa OU=Root CA -# Label: "GlobalSign Root CA" -# Serial: 4835703278459707669005204 -# MD5 Fingerprint: 3e:45:52:15:09:51:92:e1:b7:5d:37:9f:b1:87:29:8a -# SHA1 Fingerprint: b1:bc:96:8b:d4:f4:9d:62:2a:a8:9a:81:f2:15:01:52:a4:1d:82:9c -# SHA256 Fingerprint: eb:d4:10:40:e4:bb:3e:c7:42:c9:e3:81:d3:1e:f2:a4:1a:48:b6:68:5c:96:e7:ce:f3:c1:df:6c:d4:33:1c:99 ------BEGIN CERTIFICATE----- -MIIDdTCCAl2gAwIBAgILBAAAAAABFUtaw5QwDQYJKoZIhvcNAQEFBQAwVzELMAkG -A1UEBhMCQkUxGTAXBgNVBAoTEEdsb2JhbFNpZ24gbnYtc2ExEDAOBgNVBAsTB1Jv -b3QgQ0ExGzAZBgNVBAMTEkdsb2JhbFNpZ24gUm9vdCBDQTAeFw05ODA5MDExMjAw -MDBaFw0yODAxMjgxMjAwMDBaMFcxCzAJBgNVBAYTAkJFMRkwFwYDVQQKExBHbG9i -YWxTaWduIG52LXNhMRAwDgYDVQQLEwdSb290IENBMRswGQYDVQQDExJHbG9iYWxT -aWduIFJvb3QgQ0EwggEiMA0GCSqGSIb3DQEBAQUAA4IBDwAwggEKAoIBAQDaDuaZ -jc6j40+Kfvvxi4Mla+pIH/EqsLmVEQS98GPR4mdmzxzdzxtIK+6NiY6arymAZavp -xy0Sy6scTHAHoT0KMM0VjU/43dSMUBUc71DuxC73/OlS8pF94G3VNTCOXkNz8kHp -1Wrjsok6Vjk4bwY8iGlbKk3Fp1S4bInMm/k8yuX9ifUSPJJ4ltbcdG6TRGHRjcdG -snUOhugZitVtbNV4FpWi6cgKOOvyJBNPc1STE4U6G7weNLWLBYy5d4ux2x8gkasJ -U26Qzns3dLlwR5EiUWMWea6xrkEmCMgZK9FGqkjWZCrXgzT/LCrBbBlDSgeF59N8 -9iFo7+ryUp9/k5DPAgMBAAGjQjBAMA4GA1UdDwEB/wQEAwIBBjAPBgNVHRMBAf8E -BTADAQH/MB0GA1UdDgQWBBRge2YaRQ2XyolQL30EzTSo//z9SzANBgkqhkiG9w0B -AQUFAAOCAQEA1nPnfE920I2/7LqivjTFKDK1fPxsnCwrvQmeU79rXqoRSLblCKOz -yj1hTdNGCbM+w6DjY1Ub8rrvrTnhQ7k4o+YviiY776BQVvnGCv04zcQLcFGUl5gE -38NflNUVyRRBnMRddWQVDf9VMOyGj/8N7yy5Y0b2qvzfvGn9LhJIZJrglfCm7ymP -AbEVtQwdpf5pLGkkeB6zpxxxYu7KyJesF12KwvhHhm4qxFYxldBniYUr+WymXUad -DKqC5JlR3XC321Y9YeRq4VzW9v493kHMB65jUr9TU/Qr6cf9tveCX4XSQRjbgbME -HMUfpIBvFSDJ3gyICh3WZlXi/EjJKSZp4A== ------END CERTIFICATE----- - -# Issuer: CN=Entrust.net Certification Authority (2048) O=Entrust.net OU=www.entrust.net/CPS_2048 incorp. by ref. (limits liab.)/(c) 1999 Entrust.net Limited -# Subject: CN=Entrust.net Certification Authority (2048) O=Entrust.net OU=www.entrust.net/CPS_2048 incorp. by ref. (limits liab.)/(c) 1999 Entrust.net Limited -# Label: "Entrust.net Premium 2048 Secure Server CA" -# Serial: 946069240 -# MD5 Fingerprint: ee:29:31:bc:32:7e:9a:e6:e8:b5:f7:51:b4:34:71:90 -# SHA1 Fingerprint: 50:30:06:09:1d:97:d4:f5:ae:39:f7:cb:e7:92:7d:7d:65:2d:34:31 -# SHA256 Fingerprint: 6d:c4:71:72:e0:1c:bc:b0:bf:62:58:0d:89:5f:e2:b8:ac:9a:d4:f8:73:80:1e:0c:10:b9:c8:37:d2:1e:b1:77 ------BEGIN CERTIFICATE----- -MIIEKjCCAxKgAwIBAgIEOGPe+DANBgkqhkiG9w0BAQUFADCBtDEUMBIGA1UEChML -RW50cnVzdC5uZXQxQDA+BgNVBAsUN3d3dy5lbnRydXN0Lm5ldC9DUFNfMjA0OCBp -bmNvcnAuIGJ5IHJlZi4gKGxpbWl0cyBsaWFiLikxJTAjBgNVBAsTHChjKSAxOTk5 -IEVudHJ1c3QubmV0IExpbWl0ZWQxMzAxBgNVBAMTKkVudHJ1c3QubmV0IENlcnRp -ZmljYXRpb24gQXV0aG9yaXR5ICgyMDQ4KTAeFw05OTEyMjQxNzUwNTFaFw0yOTA3 -MjQxNDE1MTJaMIG0MRQwEgYDVQQKEwtFbnRydXN0Lm5ldDFAMD4GA1UECxQ3d3d3 -LmVudHJ1c3QubmV0L0NQU18yMDQ4IGluY29ycC4gYnkgcmVmLiAobGltaXRzIGxp -YWIuKTElMCMGA1UECxMcKGMpIDE5OTkgRW50cnVzdC5uZXQgTGltaXRlZDEzMDEG -A1UEAxMqRW50cnVzdC5uZXQgQ2VydGlmaWNhdGlvbiBBdXRob3JpdHkgKDIwNDgp -MIIBIjANBgkqhkiG9w0BAQEFAAOCAQ8AMIIBCgKCAQEArU1LqRKGsuqjIAcVFmQq -K0vRvwtKTY7tgHalZ7d4QMBzQshowNtTK91euHaYNZOLGp18EzoOH1u3Hs/lJBQe -sYGpjX24zGtLA/ECDNyrpUAkAH90lKGdCCmziAv1h3edVc3kw37XamSrhRSGlVuX -MlBvPci6Zgzj/L24ScF2iUkZ/cCovYmjZy/Gn7xxGWC4LeksyZB2ZnuU4q941mVT -XTzWnLLPKQP5L6RQstRIzgUyVYr9smRMDuSYB3Xbf9+5CFVghTAp+XtIpGmG4zU/ -HoZdenoVve8AjhUiVBcAkCaTvA5JaJG/+EfTnZVCwQ5N328mz8MYIWJmQ3DW1cAH -4QIDAQABo0IwQDAOBgNVHQ8BAf8EBAMCAQYwDwYDVR0TAQH/BAUwAwEB/zAdBgNV -HQ4EFgQUVeSB0RGAvtiJuQijMfmhJAkWuXAwDQYJKoZIhvcNAQEFBQADggEBADub -j1abMOdTmXx6eadNl9cZlZD7Bh/KM3xGY4+WZiT6QBshJ8rmcnPyT/4xmf3IDExo -U8aAghOY+rat2l098c5u9hURlIIM7j+VrxGrD9cv3h8Dj1csHsm7mhpElesYT6Yf -zX1XEC+bBAlahLVu2B064dae0Wx5XnkcFMXj0EyTO2U87d89vqbllRrDtRnDvV5b -u/8j72gZyxKTJ1wDLW8w0B62GqzeWvfRqqgnpv55gcR5mTNXuhKwqeBCbJPKVt7+ -bYQLCIt+jerXmCHG8+c8eS9enNFMFY3h7CI3zJpDC5fcgJCNs2ebb0gIFVbPv/Er -fF6adulZkMV8gzURZVE= ------END CERTIFICATE----- - -# Issuer: CN=Baltimore CyberTrust Root O=Baltimore OU=CyberTrust -# Subject: CN=Baltimore CyberTrust Root O=Baltimore OU=CyberTrust -# Label: "Baltimore CyberTrust Root" -# Serial: 33554617 -# MD5 Fingerprint: ac:b6:94:a5:9c:17:e0:d7:91:52:9b:b1:97:06:a6:e4 -# SHA1 Fingerprint: d4:de:20:d0:5e:66:fc:53:fe:1a:50:88:2c:78:db:28:52:ca:e4:74 -# SHA256 Fingerprint: 16:af:57:a9:f6:76:b0:ab:12:60:95:aa:5e:ba:de:f2:2a:b3:11:19:d6:44:ac:95:cd:4b:93:db:f3:f2:6a:eb ------BEGIN CERTIFICATE----- -MIIDdzCCAl+gAwIBAgIEAgAAuTANBgkqhkiG9w0BAQUFADBaMQswCQYDVQQGEwJJ -RTESMBAGA1UEChMJQmFsdGltb3JlMRMwEQYDVQQLEwpDeWJlclRydXN0MSIwIAYD -VQQDExlCYWx0aW1vcmUgQ3liZXJUcnVzdCBSb290MB4XDTAwMDUxMjE4NDYwMFoX -DTI1MDUxMjIzNTkwMFowWjELMAkGA1UEBhMCSUUxEjAQBgNVBAoTCUJhbHRpbW9y -ZTETMBEGA1UECxMKQ3liZXJUcnVzdDEiMCAGA1UEAxMZQmFsdGltb3JlIEN5YmVy -VHJ1c3QgUm9vdDCCASIwDQYJKoZIhvcNAQEBBQADggEPADCCAQoCggEBAKMEuyKr -mD1X6CZymrV51Cni4eiVgLGw41uOKymaZN+hXe2wCQVt2yguzmKiYv60iNoS6zjr -IZ3AQSsBUnuId9Mcj8e6uYi1agnnc+gRQKfRzMpijS3ljwumUNKoUMMo6vWrJYeK -mpYcqWe4PwzV9/lSEy/CG9VwcPCPwBLKBsua4dnKM3p31vjsufFoREJIE9LAwqSu -XmD+tqYF/LTdB1kC1FkYmGP1pWPgkAx9XbIGevOF6uvUA65ehD5f/xXtabz5OTZy -dc93Uk3zyZAsuT3lySNTPx8kmCFcB5kpvcY67Oduhjprl3RjM71oGDHweI12v/ye -jl0qhqdNkNwnGjkCAwEAAaNFMEMwHQYDVR0OBBYEFOWdWTCCR1jMrPoIVDaGezq1 -BE3wMBIGA1UdEwEB/wQIMAYBAf8CAQMwDgYDVR0PAQH/BAQDAgEGMA0GCSqGSIb3 -DQEBBQUAA4IBAQCFDF2O5G9RaEIFoN27TyclhAO992T9Ldcw46QQF+vaKSm2eT92 -9hkTI7gQCvlYpNRhcL0EYWoSihfVCr3FvDB81ukMJY2GQE/szKN+OMY3EU/t3Wgx -jkzSswF07r51XgdIGn9w/xZchMB5hbgF/X++ZRGjD8ACtPhSNzkE1akxehi/oCr0 -Epn3o0WC4zxe9Z2etciefC7IpJ5OCBRLbf1wbWsaY71k5h+3zvDyny67G7fyUIhz -ksLi4xaNmjICq44Y3ekQEe5+NauQrz4wlHrQMz2nZQ/1/I6eYs9HRCwBXbsdtTLS -R9I4LtD+gdwyah617jzV/OeBHRnDJELqYzmp ------END CERTIFICATE----- - -# Issuer: CN=Entrust Root Certification Authority O=Entrust, Inc. OU=www.entrust.net/CPS is incorporated by reference/(c) 2006 Entrust, Inc. -# Subject: CN=Entrust Root Certification Authority O=Entrust, Inc. OU=www.entrust.net/CPS is incorporated by reference/(c) 2006 Entrust, Inc. -# Label: "Entrust Root Certification Authority" -# Serial: 1164660820 -# MD5 Fingerprint: d6:a5:c3:ed:5d:dd:3e:00:c1:3d:87:92:1f:1d:3f:e4 -# SHA1 Fingerprint: b3:1e:b1:b7:40:e3:6c:84:02:da:dc:37:d4:4d:f5:d4:67:49:52:f9 -# SHA256 Fingerprint: 73:c1:76:43:4f:1b:c6:d5:ad:f4:5b:0e:76:e7:27:28:7c:8d:e5:76:16:c1:e6:e6:14:1a:2b:2c:bc:7d:8e:4c ------BEGIN CERTIFICATE----- -MIIEkTCCA3mgAwIBAgIERWtQVDANBgkqhkiG9w0BAQUFADCBsDELMAkGA1UEBhMC -VVMxFjAUBgNVBAoTDUVudHJ1c3QsIEluYy4xOTA3BgNVBAsTMHd3dy5lbnRydXN0 -Lm5ldC9DUFMgaXMgaW5jb3Jwb3JhdGVkIGJ5IHJlZmVyZW5jZTEfMB0GA1UECxMW -KGMpIDIwMDYgRW50cnVzdCwgSW5jLjEtMCsGA1UEAxMkRW50cnVzdCBSb290IENl -cnRpZmljYXRpb24gQXV0aG9yaXR5MB4XDTA2MTEyNzIwMjM0MloXDTI2MTEyNzIw -NTM0MlowgbAxCzAJBgNVBAYTAlVTMRYwFAYDVQQKEw1FbnRydXN0LCBJbmMuMTkw -NwYDVQQLEzB3d3cuZW50cnVzdC5uZXQvQ1BTIGlzIGluY29ycG9yYXRlZCBieSBy -ZWZlcmVuY2UxHzAdBgNVBAsTFihjKSAyMDA2IEVudHJ1c3QsIEluYy4xLTArBgNV -BAMTJEVudHJ1c3QgUm9vdCBDZXJ0aWZpY2F0aW9uIEF1dGhvcml0eTCCASIwDQYJ -KoZIhvcNAQEBBQADggEPADCCAQoCggEBALaVtkNC+sZtKm9I35RMOVcF7sN5EUFo -Nu3s/poBj6E4KPz3EEZmLk0eGrEaTsbRwJWIsMn/MYszA9u3g3s+IIRe7bJWKKf4 -4LlAcTfFy0cOlypowCKVYhXbR9n10Cv/gkvJrT7eTNuQgFA/CYqEAOwwCj0Yzfv9 -KlmaI5UXLEWeH25DeW0MXJj+SKfFI0dcXv1u5x609mhF0YaDW6KKjbHjKYD+JXGI -rb68j6xSlkuqUY3kEzEZ6E5Nn9uss2rVvDlUccp6en+Q3X0dgNmBu1kmwhH+5pPi -94DkZfs0Nw4pgHBNrziGLp5/V6+eF67rHMsoIV+2HNjnogQi+dPa2MsCAwEAAaOB -sDCBrTAOBgNVHQ8BAf8EBAMCAQYwDwYDVR0TAQH/BAUwAwEB/zArBgNVHRAEJDAi -gA8yMDA2MTEyNzIwMjM0MlqBDzIwMjYxMTI3MjA1MzQyWjAfBgNVHSMEGDAWgBRo -kORnpKZTgMeGZqTx90tD+4S9bTAdBgNVHQ4EFgQUaJDkZ6SmU4DHhmak8fdLQ/uE -vW0wHQYJKoZIhvZ9B0EABBAwDhsIVjcuMTo0LjADAgSQMA0GCSqGSIb3DQEBBQUA -A4IBAQCT1DCw1wMgKtD5Y+iRDAUgqV8ZyntyTtSx29CW+1RaGSwMCPeyvIWonX9t -O1KzKtvn1ISMY/YPyyYBkVBs9F8U4pN0wBOeMDpQ47RgxRzwIkSNcUesyBrJ6Zua -AGAT/3B+XxFNSRuzFVJ7yVTav52Vr2ua2J7p8eRDjeIRRDq/r72DQnNSi6q7pynP -9WQcCk3RvKqsnyrQ/39/2n3qse0wJcGE2jTSW3iDVuycNsMm4hH2Z0kdkquM++v/ -eu6FSqdQgPCnXEqULl8FmTxSQeDNtGPPAUO6nIPcj2A781q0tHuu2guQOHXvgR1m -0vdXcDazv/wor3ElhVsT/h5/WrQ8 ------END CERTIFICATE----- - -# Issuer: CN=AAA Certificate Services O=Comodo CA Limited -# Subject: CN=AAA Certificate Services O=Comodo CA Limited -# Label: "Comodo AAA Services root" -# Serial: 1 -# MD5 Fingerprint: 49:79:04:b0:eb:87:19:ac:47:b0:bc:11:51:9b:74:d0 -# SHA1 Fingerprint: d1:eb:23:a4:6d:17:d6:8f:d9:25:64:c2:f1:f1:60:17:64:d8:e3:49 -# SHA256 Fingerprint: d7:a7:a0:fb:5d:7e:27:31:d7:71:e9:48:4e:bc:de:f7:1d:5f:0c:3e:0a:29:48:78:2b:c8:3e:e0:ea:69:9e:f4 ------BEGIN CERTIFICATE----- -MIIEMjCCAxqgAwIBAgIBATANBgkqhkiG9w0BAQUFADB7MQswCQYDVQQGEwJHQjEb -MBkGA1UECAwSR3JlYXRlciBNYW5jaGVzdGVyMRAwDgYDVQQHDAdTYWxmb3JkMRow -GAYDVQQKDBFDb21vZG8gQ0EgTGltaXRlZDEhMB8GA1UEAwwYQUFBIENlcnRpZmlj -YXRlIFNlcnZpY2VzMB4XDTA0MDEwMTAwMDAwMFoXDTI4MTIzMTIzNTk1OVowezEL -MAkGA1UEBhMCR0IxGzAZBgNVBAgMEkdyZWF0ZXIgTWFuY2hlc3RlcjEQMA4GA1UE -BwwHU2FsZm9yZDEaMBgGA1UECgwRQ29tb2RvIENBIExpbWl0ZWQxITAfBgNVBAMM -GEFBQSBDZXJ0aWZpY2F0ZSBTZXJ2aWNlczCCASIwDQYJKoZIhvcNAQEBBQADggEP -ADCCAQoCggEBAL5AnfRu4ep2hxxNRUSOvkbIgwadwSr+GB+O5AL686tdUIoWMQua -BtDFcCLNSS1UY8y2bmhGC1Pqy0wkwLxyTurxFa70VJoSCsN6sjNg4tqJVfMiWPPe -3M/vg4aijJRPn2jymJBGhCfHdr/jzDUsi14HZGWCwEiwqJH5YZ92IFCokcdmtet4 -YgNW8IoaE+oxox6gmf049vYnMlhvB/VruPsUK6+3qszWY19zjNoFmag4qMsXeDZR -rOme9Hg6jc8P2ULimAyrL58OAd7vn5lJ8S3frHRNG5i1R8XlKdH5kBjHYpy+g8cm -ez6KJcfA3Z3mNWgQIJ2P2N7Sw4ScDV7oL8kCAwEAAaOBwDCBvTAdBgNVHQ4EFgQU -oBEKIz6W8Qfs4q8p74Klf9AwpLQwDgYDVR0PAQH/BAQDAgEGMA8GA1UdEwEB/wQF -MAMBAf8wewYDVR0fBHQwcjA4oDagNIYyaHR0cDovL2NybC5jb21vZG9jYS5jb20v -QUFBQ2VydGlmaWNhdGVTZXJ2aWNlcy5jcmwwNqA0oDKGMGh0dHA6Ly9jcmwuY29t -b2RvLm5ldC9BQUFDZXJ0aWZpY2F0ZVNlcnZpY2VzLmNybDANBgkqhkiG9w0BAQUF -AAOCAQEACFb8AvCb6P+k+tZ7xkSAzk/ExfYAWMymtrwUSWgEdujm7l3sAg9g1o1Q -GE8mTgHj5rCl7r+8dFRBv/38ErjHT1r0iWAFf2C3BUrz9vHCv8S5dIa2LX1rzNLz -Rt0vxuBqw8M0Ayx9lt1awg6nCpnBBYurDC/zXDrPbDdVCYfeU0BsWO/8tqtlbgT2 -G9w84FoVxp7Z8VlIMCFlA2zs6SFz7JsDoeA3raAVGI/6ugLOpyypEBMs1OUIJqsi -l2D4kF501KKaU73yqWjgom7C12yxow+ev+to51byrvLjKzg6CYG1a4XXvi3tPxq3 -smPi9WIsgtRqAEFQ8TmDn5XpNpaYbg== ------END CERTIFICATE----- - -# Issuer: CN=QuoVadis Root CA 2 O=QuoVadis Limited -# Subject: CN=QuoVadis Root CA 2 O=QuoVadis Limited -# Label: "QuoVadis Root CA 2" -# Serial: 1289 -# MD5 Fingerprint: 5e:39:7b:dd:f8:ba:ec:82:e9:ac:62:ba:0c:54:00:2b -# SHA1 Fingerprint: ca:3a:fb:cf:12:40:36:4b:44:b2:16:20:88:80:48:39:19:93:7c:f7 -# SHA256 Fingerprint: 85:a0:dd:7d:d7:20:ad:b7:ff:05:f8:3d:54:2b:20:9d:c7:ff:45:28:f7:d6:77:b1:83:89:fe:a5:e5:c4:9e:86 ------BEGIN CERTIFICATE----- -MIIFtzCCA5+gAwIBAgICBQkwDQYJKoZIhvcNAQEFBQAwRTELMAkGA1UEBhMCQk0x -GTAXBgNVBAoTEFF1b1ZhZGlzIExpbWl0ZWQxGzAZBgNVBAMTElF1b1ZhZGlzIFJv -b3QgQ0EgMjAeFw0wNjExMjQxODI3MDBaFw0zMTExMjQxODIzMzNaMEUxCzAJBgNV -BAYTAkJNMRkwFwYDVQQKExBRdW9WYWRpcyBMaW1pdGVkMRswGQYDVQQDExJRdW9W -YWRpcyBSb290IENBIDIwggIiMA0GCSqGSIb3DQEBAQUAA4ICDwAwggIKAoICAQCa -GMpLlA0ALa8DKYrwD4HIrkwZhR0In6spRIXzL4GtMh6QRr+jhiYaHv5+HBg6XJxg -Fyo6dIMzMH1hVBHL7avg5tKifvVrbxi3Cgst/ek+7wrGsxDp3MJGF/hd/aTa/55J -WpzmM+Yklvc/ulsrHHo1wtZn/qtmUIttKGAr79dgw8eTvI02kfN/+NsRE8Scd3bB -rrcCaoF6qUWD4gXmuVbBlDePSHFjIuwXZQeVikvfj8ZaCuWw419eaxGrDPmF60Tp -+ARz8un+XJiM9XOva7R+zdRcAitMOeGylZUtQofX1bOQQ7dsE/He3fbE+Ik/0XX1 -ksOR1YqI0JDs3G3eicJlcZaLDQP9nL9bFqyS2+r+eXyt66/3FsvbzSUr5R/7mp/i -Ucw6UwxI5g69ybR2BlLmEROFcmMDBOAENisgGQLodKcftslWZvB1JdxnwQ5hYIiz -PtGo/KPaHbDRsSNU30R2be1B2MGyIrZTHN81Hdyhdyox5C315eXbyOD/5YDXC2Og -/zOhD7osFRXql7PSorW+8oyWHhqPHWykYTe5hnMz15eWniN9gqRMgeKh0bpnX5UH -oycR7hYQe7xFSkyyBNKr79X9DFHOUGoIMfmR2gyPZFwDwzqLID9ujWc9Otb+fVuI -yV77zGHcizN300QyNQliBJIWENieJ0f7OyHj+OsdWwIDAQABo4GwMIGtMA8GA1Ud -EwEB/wQFMAMBAf8wCwYDVR0PBAQDAgEGMB0GA1UdDgQWBBQahGK8SEwzJQTU7tD2 -A8QZRtGUazBuBgNVHSMEZzBlgBQahGK8SEwzJQTU7tD2A8QZRtGUa6FJpEcwRTEL -MAkGA1UEBhMCQk0xGTAXBgNVBAoTEFF1b1ZhZGlzIExpbWl0ZWQxGzAZBgNVBAMT -ElF1b1ZhZGlzIFJvb3QgQ0EgMoICBQkwDQYJKoZIhvcNAQEFBQADggIBAD4KFk2f -BluornFdLwUvZ+YTRYPENvbzwCYMDbVHZF34tHLJRqUDGCdViXh9duqWNIAXINzn -g/iN/Ae42l9NLmeyhP3ZRPx3UIHmfLTJDQtyU/h2BwdBR5YM++CCJpNVjP4iH2Bl -fF/nJrP3MpCYUNQ3cVX2kiF495V5+vgtJodmVjB3pjd4M1IQWK4/YY7yarHvGH5K -WWPKjaJW1acvvFYfzznB4vsKqBUsfU16Y8Zsl0Q80m/DShcK+JDSV6IZUaUtl0Ha -B0+pUNqQjZRG4T7wlP0QADj1O+hA4bRuVhogzG9Yje0uRY/W6ZM/57Es3zrWIozc -hLsib9D45MY56QSIPMO661V6bYCZJPVsAfv4l7CUW+v90m/xd2gNNWQjrLhVoQPR -TUIZ3Ph1WVaj+ahJefivDrkRoHy3au000LYmYjgahwz46P0u05B/B5EqHdZ+XIWD -mbA4CD/pXvk1B+TJYm5Xf6dQlfe6yJvmjqIBxdZmv3lh8zwc4bmCXF2gw+nYSL0Z -ohEUGW6yhhtoPkg3Goi3XZZenMfvJ2II4pEZXNLxId26F0KCl3GBUzGpn/Z9Yr9y -4aOTHcyKJloJONDO1w2AFrR4pTqHTI2KpdVGl/IsELm8VCLAAVBpQ570su9t+Oza -8eOx79+Rj1QqCyXBJhnEUhAFZdWCEOrCMc0u ------END CERTIFICATE----- - -# Issuer: CN=QuoVadis Root CA 3 O=QuoVadis Limited -# Subject: CN=QuoVadis Root CA 3 O=QuoVadis Limited -# Label: "QuoVadis Root CA 3" -# Serial: 1478 -# MD5 Fingerprint: 31:85:3c:62:94:97:63:b9:aa:fd:89:4e:af:6f:e0:cf -# SHA1 Fingerprint: 1f:49:14:f7:d8:74:95:1d:dd:ae:02:c0:be:fd:3a:2d:82:75:51:85 -# SHA256 Fingerprint: 18:f1:fc:7f:20:5d:f8:ad:dd:eb:7f:e0:07:dd:57:e3:af:37:5a:9c:4d:8d:73:54:6b:f4:f1:fe:d1:e1:8d:35 ------BEGIN CERTIFICATE----- -MIIGnTCCBIWgAwIBAgICBcYwDQYJKoZIhvcNAQEFBQAwRTELMAkGA1UEBhMCQk0x -GTAXBgNVBAoTEFF1b1ZhZGlzIExpbWl0ZWQxGzAZBgNVBAMTElF1b1ZhZGlzIFJv -b3QgQ0EgMzAeFw0wNjExMjQxOTExMjNaFw0zMTExMjQxOTA2NDRaMEUxCzAJBgNV -BAYTAkJNMRkwFwYDVQQKExBRdW9WYWRpcyBMaW1pdGVkMRswGQYDVQQDExJRdW9W -YWRpcyBSb290IENBIDMwggIiMA0GCSqGSIb3DQEBAQUAA4ICDwAwggIKAoICAQDM -V0IWVJzmmNPTTe7+7cefQzlKZbPoFog02w1ZkXTPkrgEQK0CSzGrvI2RaNggDhoB -4hp7Thdd4oq3P5kazethq8Jlph+3t723j/z9cI8LoGe+AaJZz3HmDyl2/7FWeUUr -H556VOijKTVopAFPD6QuN+8bv+OPEKhyq1hX51SGyMnzW9os2l2ObjyjPtr7guXd -8lyyBTNvijbO0BNO/79KDDRMpsMhvVAEVeuxu537RR5kFd5VAYwCdrXLoT9Cabwv -vWhDFlaJKjdhkf2mrk7AyxRllDdLkgbvBNDInIjbC3uBr7E9KsRlOni27tyAsdLT -mZw67mtaa7ONt9XOnMK+pUsvFrGeaDsGb659n/je7Mwpp5ijJUMv7/FfJuGITfhe -btfZFG4ZM2mnO4SJk8RTVROhUXhA+LjJou57ulJCg54U7QVSWllWp5f8nT8KKdjc -T5EOE7zelaTfi5m+rJsziO+1ga8bxiJTyPbH7pcUsMV8eFLI8M5ud2CEpukqdiDt -WAEXMJPpGovgc2PZapKUSU60rUqFxKMiMPwJ7Wgic6aIDFUhWMXhOp8q3crhkODZ -c6tsgLjoC2SToJyMGf+z0gzskSaHirOi4XCPLArlzW1oUevaPwV/izLmE1xr/l9A -4iLItLRkT9a6fUg+qGkM17uGcclzuD87nSVL2v9A6wIDAQABo4IBlTCCAZEwDwYD -VR0TAQH/BAUwAwEB/zCB4QYDVR0gBIHZMIHWMIHTBgkrBgEEAb5YAAMwgcUwgZMG -CCsGAQUFBwICMIGGGoGDQW55IHVzZSBvZiB0aGlzIENlcnRpZmljYXRlIGNvbnN0 -aXR1dGVzIGFjY2VwdGFuY2Ugb2YgdGhlIFF1b1ZhZGlzIFJvb3QgQ0EgMyBDZXJ0 -aWZpY2F0ZSBQb2xpY3kgLyBDZXJ0aWZpY2F0aW9uIFByYWN0aWNlIFN0YXRlbWVu -dC4wLQYIKwYBBQUHAgEWIWh0dHA6Ly93d3cucXVvdmFkaXNnbG9iYWwuY29tL2Nw -czALBgNVHQ8EBAMCAQYwHQYDVR0OBBYEFPLAE+CCQz777i9nMpY1XNu4ywLQMG4G -A1UdIwRnMGWAFPLAE+CCQz777i9nMpY1XNu4ywLQoUmkRzBFMQswCQYDVQQGEwJC -TTEZMBcGA1UEChMQUXVvVmFkaXMgTGltaXRlZDEbMBkGA1UEAxMSUXVvVmFkaXMg -Um9vdCBDQSAzggIFxjANBgkqhkiG9w0BAQUFAAOCAgEAT62gLEz6wPJv92ZVqyM0 -7ucp2sNbtrCD2dDQ4iH782CnO11gUyeim/YIIirnv6By5ZwkajGxkHon24QRiSem -d1o417+shvzuXYO8BsbRd2sPbSQvS3pspweWyuOEn62Iix2rFo1bZhfZFvSLgNLd -+LJ2w/w4E6oM3kJpK27zPOuAJ9v1pkQNn1pVWQvVDVJIxa6f8i+AxeoyUDUSly7B -4f/xI4hROJ/yZlZ25w9Rl6VSDE1JUZU2Pb+iSwwQHYaZTKrzchGT5Or2m9qoXadN -t54CrnMAyNojA+j56hl0YgCUyyIgvpSnWbWCar6ZeXqp8kokUvd0/bpO5qgdAm6x -DYBEwa7TIzdfu4V8K5Iu6H6li92Z4b8nby1dqnuH/grdS/yO9SbkbnBCbjPsMZ57 -k8HkyWkaPcBrTiJt7qtYTcbQQcEr6k8Sh17rRdhs9ZgC06DYVYoGmRmioHfRMJ6s -zHXug/WwYjnPbFfiTNKRCw51KBuav/0aQ/HKd/s7j2G4aSgWQgRecCocIdiP4b0j -Wy10QJLZYxkNc91pvGJHvOB0K7Lrfb5BG7XARsWhIstfTsEokt4YutUqKLsRixeT -mJlglFwjz1onl14LBQaTNx47aTbrqZ5hHY8y2o4M1nQ+ewkk2gF3R8Q7zTSMmfXK -4SVhM7JZG+Ju1zdXtg2pEto= ------END CERTIFICATE----- - -# Issuer: CN=XRamp Global Certification Authority O=XRamp Security Services Inc OU=www.xrampsecurity.com -# Subject: CN=XRamp Global Certification Authority O=XRamp Security Services Inc OU=www.xrampsecurity.com -# Label: "XRamp Global CA Root" -# Serial: 107108908803651509692980124233745014957 -# MD5 Fingerprint: a1:0b:44:b3:ca:10:d8:00:6e:9d:0f:d8:0f:92:0a:d1 -# SHA1 Fingerprint: b8:01:86:d1:eb:9c:86:a5:41:04:cf:30:54:f3:4c:52:b7:e5:58:c6 -# SHA256 Fingerprint: ce:cd:dc:90:50:99:d8:da:df:c5:b1:d2:09:b7:37:cb:e2:c1:8c:fb:2c:10:c0:ff:0b:cf:0d:32:86:fc:1a:a2 ------BEGIN CERTIFICATE----- -MIIEMDCCAxigAwIBAgIQUJRs7Bjq1ZxN1ZfvdY+grTANBgkqhkiG9w0BAQUFADCB -gjELMAkGA1UEBhMCVVMxHjAcBgNVBAsTFXd3dy54cmFtcHNlY3VyaXR5LmNvbTEk -MCIGA1UEChMbWFJhbXAgU2VjdXJpdHkgU2VydmljZXMgSW5jMS0wKwYDVQQDEyRY -UmFtcCBHbG9iYWwgQ2VydGlmaWNhdGlvbiBBdXRob3JpdHkwHhcNMDQxMTAxMTcx -NDA0WhcNMzUwMTAxMDUzNzE5WjCBgjELMAkGA1UEBhMCVVMxHjAcBgNVBAsTFXd3 -dy54cmFtcHNlY3VyaXR5LmNvbTEkMCIGA1UEChMbWFJhbXAgU2VjdXJpdHkgU2Vy -dmljZXMgSW5jMS0wKwYDVQQDEyRYUmFtcCBHbG9iYWwgQ2VydGlmaWNhdGlvbiBB -dXRob3JpdHkwggEiMA0GCSqGSIb3DQEBAQUAA4IBDwAwggEKAoIBAQCYJB69FbS6 -38eMpSe2OAtp87ZOqCwuIR1cRN8hXX4jdP5efrRKt6atH67gBhbim1vZZ3RrXYCP -KZ2GG9mcDZhtdhAoWORlsH9KmHmf4MMxfoArtYzAQDsRhtDLooY2YKTVMIJt2W7Q -DxIEM5dfT2Fa8OT5kavnHTu86M/0ay00fOJIYRyO82FEzG+gSqmUsE3a56k0enI4 -qEHMPJQRfevIpoy3hsvKMzvZPTeL+3o+hiznc9cKV6xkmxnr9A8ECIqsAxcZZPRa -JSKNNCyy9mgdEm3Tih4U2sSPpuIjhdV6Db1q4Ons7Be7QhtnqiXtRYMh/MHJfNVi -PvryxS3T/dRlAgMBAAGjgZ8wgZwwEwYJKwYBBAGCNxQCBAYeBABDAEEwCwYDVR0P -BAQDAgGGMA8GA1UdEwEB/wQFMAMBAf8wHQYDVR0OBBYEFMZPoj0GY4QJnM5i5ASs -jVy16bYbMDYGA1UdHwQvMC0wK6ApoCeGJWh0dHA6Ly9jcmwueHJhbXBzZWN1cml0 -eS5jb20vWEdDQS5jcmwwEAYJKwYBBAGCNxUBBAMCAQEwDQYJKoZIhvcNAQEFBQAD -ggEBAJEVOQMBG2f7Shz5CmBbodpNl2L5JFMn14JkTpAuw0kbK5rc/Kh4ZzXxHfAR -vbdI4xD2Dd8/0sm2qlWkSLoC295ZLhVbO50WfUfXN+pfTXYSNrsf16GBBEYgoyxt -qZ4Bfj8pzgCT3/3JknOJiWSe5yvkHJEs0rnOfc5vMZnT5r7SHpDwCRR5XCOrTdLa -IR9NmXmd4c8nnxCbHIgNsIpkQTG4DmyQJKSbXHGPurt+HBvbaoAPIbzp26a3QPSy -i6mx5O+aGtA9aZnuqCij4Tyz8LIRnM98QObd50N9otg6tamN8jSZxNQQ4Qb9CYQQ -O+7ETPTsJ3xCwnR8gooJybQDJbw= ------END CERTIFICATE----- - -# Issuer: O=The Go Daddy Group, Inc. OU=Go Daddy Class 2 Certification Authority -# Subject: O=The Go Daddy Group, Inc. OU=Go Daddy Class 2 Certification Authority -# Label: "Go Daddy Class 2 CA" -# Serial: 0 -# MD5 Fingerprint: 91:de:06:25:ab:da:fd:32:17:0c:bb:25:17:2a:84:67 -# SHA1 Fingerprint: 27:96:ba:e6:3f:18:01:e2:77:26:1b:a0:d7:77:70:02:8f:20:ee:e4 -# SHA256 Fingerprint: c3:84:6b:f2:4b:9e:93:ca:64:27:4c:0e:c6:7c:1e:cc:5e:02:4f:fc:ac:d2:d7:40:19:35:0e:81:fe:54:6a:e4 ------BEGIN CERTIFICATE----- -MIIEADCCAuigAwIBAgIBADANBgkqhkiG9w0BAQUFADBjMQswCQYDVQQGEwJVUzEh -MB8GA1UEChMYVGhlIEdvIERhZGR5IEdyb3VwLCBJbmMuMTEwLwYDVQQLEyhHbyBE -YWRkeSBDbGFzcyAyIENlcnRpZmljYXRpb24gQXV0aG9yaXR5MB4XDTA0MDYyOTE3 -MDYyMFoXDTM0MDYyOTE3MDYyMFowYzELMAkGA1UEBhMCVVMxITAfBgNVBAoTGFRo -ZSBHbyBEYWRkeSBHcm91cCwgSW5jLjExMC8GA1UECxMoR28gRGFkZHkgQ2xhc3Mg -MiBDZXJ0aWZpY2F0aW9uIEF1dGhvcml0eTCCASAwDQYJKoZIhvcNAQEBBQADggEN -ADCCAQgCggEBAN6d1+pXGEmhW+vXX0iG6r7d/+TvZxz0ZWizV3GgXne77ZtJ6XCA -PVYYYwhv2vLM0D9/AlQiVBDYsoHUwHU9S3/Hd8M+eKsaA7Ugay9qK7HFiH7Eux6w -wdhFJ2+qN1j3hybX2C32qRe3H3I2TqYXP2WYktsqbl2i/ojgC95/5Y0V4evLOtXi -EqITLdiOr18SPaAIBQi2XKVlOARFmR6jYGB0xUGlcmIbYsUfb18aQr4CUWWoriMY -avx4A6lNf4DD+qta/KFApMoZFv6yyO9ecw3ud72a9nmYvLEHZ6IVDd2gWMZEewo+ -YihfukEHU1jPEX44dMX4/7VpkI+EdOqXG68CAQOjgcAwgb0wHQYDVR0OBBYEFNLE -sNKR1EwRcbNhyz2h/t2oatTjMIGNBgNVHSMEgYUwgYKAFNLEsNKR1EwRcbNhyz2h -/t2oatTjoWekZTBjMQswCQYDVQQGEwJVUzEhMB8GA1UEChMYVGhlIEdvIERhZGR5 -IEdyb3VwLCBJbmMuMTEwLwYDVQQLEyhHbyBEYWRkeSBDbGFzcyAyIENlcnRpZmlj -YXRpb24gQXV0aG9yaXR5ggEAMAwGA1UdEwQFMAMBAf8wDQYJKoZIhvcNAQEFBQAD -ggEBADJL87LKPpH8EsahB4yOd6AzBhRckB4Y9wimPQoZ+YeAEW5p5JYXMP80kWNy -OO7MHAGjHZQopDH2esRU1/blMVgDoszOYtuURXO1v0XJJLXVggKtI3lpjbi2Tc7P -TMozI+gciKqdi0FuFskg5YmezTvacPd+mSYgFFQlq25zheabIZ0KbIIOqPjCDPoQ -HmyW74cNxA9hi63ugyuV+I6ShHI56yDqg+2DzZduCLzrTia2cyvk0/ZM/iZx4mER -dEr/VxqHD3VILs9RaRegAhJhldXRQLIQTO7ErBBDpqWeCtWVYpoNz4iCxTIM5Cuf -ReYNnyicsbkqWletNw+vHX/bvZ8= ------END CERTIFICATE----- - -# Issuer: O=Starfield Technologies, Inc. OU=Starfield Class 2 Certification Authority -# Subject: O=Starfield Technologies, Inc. OU=Starfield Class 2 Certification Authority -# Label: "Starfield Class 2 CA" -# Serial: 0 -# MD5 Fingerprint: 32:4a:4b:bb:c8:63:69:9b:be:74:9a:c6:dd:1d:46:24 -# SHA1 Fingerprint: ad:7e:1c:28:b0:64:ef:8f:60:03:40:20:14:c3:d0:e3:37:0e:b5:8a -# SHA256 Fingerprint: 14:65:fa:20:53:97:b8:76:fa:a6:f0:a9:95:8e:55:90:e4:0f:cc:7f:aa:4f:b7:c2:c8:67:75:21:fb:5f:b6:58 ------BEGIN CERTIFICATE----- -MIIEDzCCAvegAwIBAgIBADANBgkqhkiG9w0BAQUFADBoMQswCQYDVQQGEwJVUzEl -MCMGA1UEChMcU3RhcmZpZWxkIFRlY2hub2xvZ2llcywgSW5jLjEyMDAGA1UECxMp -U3RhcmZpZWxkIENsYXNzIDIgQ2VydGlmaWNhdGlvbiBBdXRob3JpdHkwHhcNMDQw -NjI5MTczOTE2WhcNMzQwNjI5MTczOTE2WjBoMQswCQYDVQQGEwJVUzElMCMGA1UE -ChMcU3RhcmZpZWxkIFRlY2hub2xvZ2llcywgSW5jLjEyMDAGA1UECxMpU3RhcmZp -ZWxkIENsYXNzIDIgQ2VydGlmaWNhdGlvbiBBdXRob3JpdHkwggEgMA0GCSqGSIb3 -DQEBAQUAA4IBDQAwggEIAoIBAQC3Msj+6XGmBIWtDBFk385N78gDGIc/oav7PKaf -8MOh2tTYbitTkPskpD6E8J7oX+zlJ0T1KKY/e97gKvDIr1MvnsoFAZMej2YcOadN -+lq2cwQlZut3f+dZxkqZJRRU6ybH838Z1TBwj6+wRir/resp7defqgSHo9T5iaU0 -X9tDkYI22WY8sbi5gv2cOj4QyDvvBmVmepsZGD3/cVE8MC5fvj13c7JdBmzDI1aa -K4UmkhynArPkPw2vCHmCuDY96pzTNbO8acr1zJ3o/WSNF4Azbl5KXZnJHoe0nRrA -1W4TNSNe35tfPe/W93bC6j67eA0cQmdrBNj41tpvi/JEoAGrAgEDo4HFMIHCMB0G -A1UdDgQWBBS/X7fRzt0fhvRbVazc1xDCDqmI5zCBkgYDVR0jBIGKMIGHgBS/X7fR -zt0fhvRbVazc1xDCDqmI56FspGowaDELMAkGA1UEBhMCVVMxJTAjBgNVBAoTHFN0 -YXJmaWVsZCBUZWNobm9sb2dpZXMsIEluYy4xMjAwBgNVBAsTKVN0YXJmaWVsZCBD -bGFzcyAyIENlcnRpZmljYXRpb24gQXV0aG9yaXR5ggEAMAwGA1UdEwQFMAMBAf8w -DQYJKoZIhvcNAQEFBQADggEBAAWdP4id0ckaVaGsafPzWdqbAYcaT1epoXkJKtv3 -L7IezMdeatiDh6GX70k1PncGQVhiv45YuApnP+yz3SFmH8lU+nLMPUxA2IGvd56D -eruix/U0F47ZEUD0/CwqTRV/p2JdLiXTAAsgGh1o+Re49L2L7ShZ3U0WixeDyLJl -xy16paq8U4Zt3VekyvggQQto8PT7dL5WXXp59fkdheMtlb71cZBDzI0fmgAKhynp -VSJYACPq4xJDKVtHCN2MQWplBqjlIapBtJUhlbl90TSrE9atvNziPTnNvT51cKEY -WQPJIrSPnNVeKtelttQKbfi3QBFGmh95DmK/D5fs4C8fF5Q= ------END CERTIFICATE----- - -# Issuer: CN=DigiCert Assured ID Root CA O=DigiCert Inc OU=www.digicert.com -# Subject: CN=DigiCert Assured ID Root CA O=DigiCert Inc OU=www.digicert.com -# Label: "DigiCert Assured ID Root CA" -# Serial: 17154717934120587862167794914071425081 -# MD5 Fingerprint: 87:ce:0b:7b:2a:0e:49:00:e1:58:71:9b:37:a8:93:72 -# SHA1 Fingerprint: 05:63:b8:63:0d:62:d7:5a:bb:c8:ab:1e:4b:df:b5:a8:99:b2:4d:43 -# SHA256 Fingerprint: 3e:90:99:b5:01:5e:8f:48:6c:00:bc:ea:9d:11:1e:e7:21:fa:ba:35:5a:89:bc:f1:df:69:56:1e:3d:c6:32:5c ------BEGIN CERTIFICATE----- -MIIDtzCCAp+gAwIBAgIQDOfg5RfYRv6P5WD8G/AwOTANBgkqhkiG9w0BAQUFADBl -MQswCQYDVQQGEwJVUzEVMBMGA1UEChMMRGlnaUNlcnQgSW5jMRkwFwYDVQQLExB3 -d3cuZGlnaWNlcnQuY29tMSQwIgYDVQQDExtEaWdpQ2VydCBBc3N1cmVkIElEIFJv -b3QgQ0EwHhcNMDYxMTEwMDAwMDAwWhcNMzExMTEwMDAwMDAwWjBlMQswCQYDVQQG -EwJVUzEVMBMGA1UEChMMRGlnaUNlcnQgSW5jMRkwFwYDVQQLExB3d3cuZGlnaWNl -cnQuY29tMSQwIgYDVQQDExtEaWdpQ2VydCBBc3N1cmVkIElEIFJvb3QgQ0EwggEi -MA0GCSqGSIb3DQEBAQUAA4IBDwAwggEKAoIBAQCtDhXO5EOAXLGH87dg+XESpa7c -JpSIqvTO9SA5KFhgDPiA2qkVlTJhPLWxKISKityfCgyDF3qPkKyK53lTXDGEKvYP -mDI2dsze3Tyoou9q+yHyUmHfnyDXH+Kx2f4YZNISW1/5WBg1vEfNoTb5a3/UsDg+ -wRvDjDPZ2C8Y/igPs6eD1sNuRMBhNZYW/lmci3Zt1/GiSw0r/wty2p5g0I6QNcZ4 -VYcgoc/lbQrISXwxmDNsIumH0DJaoroTghHtORedmTpyoeb6pNnVFzF1roV9Iq4/ -AUaG9ih5yLHa5FcXxH4cDrC0kqZWs72yl+2qp/C3xag/lRbQ/6GW6whfGHdPAgMB -AAGjYzBhMA4GA1UdDwEB/wQEAwIBhjAPBgNVHRMBAf8EBTADAQH/MB0GA1UdDgQW -BBRF66Kv9JLLgjEtUYunpyGd823IDzAfBgNVHSMEGDAWgBRF66Kv9JLLgjEtUYun -pyGd823IDzANBgkqhkiG9w0BAQUFAAOCAQEAog683+Lt8ONyc3pklL/3cmbYMuRC -dWKuh+vy1dneVrOfzM4UKLkNl2BcEkxY5NM9g0lFWJc1aRqoR+pWxnmrEthngYTf -fwk8lOa4JiwgvT2zKIn3X/8i4peEH+ll74fg38FnSbNd67IJKusm7Xi+fT8r87cm -NW1fiQG2SVufAQWbqz0lwcy2f8Lxb4bG+mRo64EtlOtCt/qMHt1i8b5QZ7dsvfPx -H2sMNgcWfzd8qVttevESRmCD1ycEvkvOl77DZypoEd+A5wwzZr8TDRRu838fYxAe -+o0bJW1sj6W3YQGx0qMmoRBxna3iw/nDmVG3KwcIzi7mULKn+gpFL6Lw8g== ------END CERTIFICATE----- - -# Issuer: CN=DigiCert Global Root CA O=DigiCert Inc OU=www.digicert.com -# Subject: CN=DigiCert Global Root CA O=DigiCert Inc OU=www.digicert.com -# Label: "DigiCert Global Root CA" -# Serial: 10944719598952040374951832963794454346 -# MD5 Fingerprint: 79:e4:a9:84:0d:7d:3a:96:d7:c0:4f:e2:43:4c:89:2e -# SHA1 Fingerprint: a8:98:5d:3a:65:e5:e5:c4:b2:d7:d6:6d:40:c6:dd:2f:b1:9c:54:36 -# SHA256 Fingerprint: 43:48:a0:e9:44:4c:78:cb:26:5e:05:8d:5e:89:44:b4:d8:4f:96:62:bd:26:db:25:7f:89:34:a4:43:c7:01:61 ------BEGIN CERTIFICATE----- -MIIDrzCCApegAwIBAgIQCDvgVpBCRrGhdWrJWZHHSjANBgkqhkiG9w0BAQUFADBh -MQswCQYDVQQGEwJVUzEVMBMGA1UEChMMRGlnaUNlcnQgSW5jMRkwFwYDVQQLExB3 -d3cuZGlnaWNlcnQuY29tMSAwHgYDVQQDExdEaWdpQ2VydCBHbG9iYWwgUm9vdCBD -QTAeFw0wNjExMTAwMDAwMDBaFw0zMTExMTAwMDAwMDBaMGExCzAJBgNVBAYTAlVT -MRUwEwYDVQQKEwxEaWdpQ2VydCBJbmMxGTAXBgNVBAsTEHd3dy5kaWdpY2VydC5j -b20xIDAeBgNVBAMTF0RpZ2lDZXJ0IEdsb2JhbCBSb290IENBMIIBIjANBgkqhkiG -9w0BAQEFAAOCAQ8AMIIBCgKCAQEA4jvhEXLeqKTTo1eqUKKPC3eQyaKl7hLOllsB -CSDMAZOnTjC3U/dDxGkAV53ijSLdhwZAAIEJzs4bg7/fzTtxRuLWZscFs3YnFo97 -nh6Vfe63SKMI2tavegw5BmV/Sl0fvBf4q77uKNd0f3p4mVmFaG5cIzJLv07A6Fpt -43C/dxC//AH2hdmoRBBYMql1GNXRor5H4idq9Joz+EkIYIvUX7Q6hL+hqkpMfT7P -T19sdl6gSzeRntwi5m3OFBqOasv+zbMUZBfHWymeMr/y7vrTC0LUq7dBMtoM1O/4 -gdW7jVg/tRvoSSiicNoxBN33shbyTApOB6jtSj1etX+jkMOvJwIDAQABo2MwYTAO -BgNVHQ8BAf8EBAMCAYYwDwYDVR0TAQH/BAUwAwEB/zAdBgNVHQ4EFgQUA95QNVbR -TLtm8KPiGxvDl7I90VUwHwYDVR0jBBgwFoAUA95QNVbRTLtm8KPiGxvDl7I90VUw -DQYJKoZIhvcNAQEFBQADggEBAMucN6pIExIK+t1EnE9SsPTfrgT1eXkIoyQY/Esr -hMAtudXH/vTBH1jLuG2cenTnmCmrEbXjcKChzUyImZOMkXDiqw8cvpOp/2PV5Adg -06O/nVsJ8dWO41P0jmP6P6fbtGbfYmbW0W5BjfIttep3Sp+dWOIrWcBAI+0tKIJF -PnlUkiaY4IBIqDfv8NZ5YBberOgOzW6sRBc4L0na4UU+Krk2U886UAb3LujEV0ls -YSEY1QSteDwsOoBrp+uvFRTp2InBuThs4pFsiv9kuXclVzDAGySj4dzp30d8tbQk -CAUw7C29C79Fv1C5qfPrmAESrciIxpg0X40KPMbp1ZWVbd4= ------END CERTIFICATE----- - -# Issuer: CN=DigiCert High Assurance EV Root CA O=DigiCert Inc OU=www.digicert.com -# Subject: CN=DigiCert High Assurance EV Root CA O=DigiCert Inc OU=www.digicert.com -# Label: "DigiCert High Assurance EV Root CA" -# Serial: 3553400076410547919724730734378100087 -# MD5 Fingerprint: d4:74:de:57:5c:39:b2:d3:9c:85:83:c5:c0:65:49:8a -# SHA1 Fingerprint: 5f:b7:ee:06:33:e2:59:db:ad:0c:4c:9a:e6:d3:8f:1a:61:c7:dc:25 -# SHA256 Fingerprint: 74:31:e5:f4:c3:c1:ce:46:90:77:4f:0b:61:e0:54:40:88:3b:a9:a0:1e:d0:0b:a6:ab:d7:80:6e:d3:b1:18:cf ------BEGIN CERTIFICATE----- -MIIDxTCCAq2gAwIBAgIQAqxcJmoLQJuPC3nyrkYldzANBgkqhkiG9w0BAQUFADBs -MQswCQYDVQQGEwJVUzEVMBMGA1UEChMMRGlnaUNlcnQgSW5jMRkwFwYDVQQLExB3 -d3cuZGlnaWNlcnQuY29tMSswKQYDVQQDEyJEaWdpQ2VydCBIaWdoIEFzc3VyYW5j -ZSBFViBSb290IENBMB4XDTA2MTExMDAwMDAwMFoXDTMxMTExMDAwMDAwMFowbDEL -MAkGA1UEBhMCVVMxFTATBgNVBAoTDERpZ2lDZXJ0IEluYzEZMBcGA1UECxMQd3d3 -LmRpZ2ljZXJ0LmNvbTErMCkGA1UEAxMiRGlnaUNlcnQgSGlnaCBBc3N1cmFuY2Ug -RVYgUm9vdCBDQTCCASIwDQYJKoZIhvcNAQEBBQADggEPADCCAQoCggEBAMbM5XPm -+9S75S0tMqbf5YE/yc0lSbZxKsPVlDRnogocsF9ppkCxxLeyj9CYpKlBWTrT3JTW -PNt0OKRKzE0lgvdKpVMSOO7zSW1xkX5jtqumX8OkhPhPYlG++MXs2ziS4wblCJEM -xChBVfvLWokVfnHoNb9Ncgk9vjo4UFt3MRuNs8ckRZqnrG0AFFoEt7oT61EKmEFB -Ik5lYYeBQVCmeVyJ3hlKV9Uu5l0cUyx+mM0aBhakaHPQNAQTXKFx01p8VdteZOE3 -hzBWBOURtCmAEvF5OYiiAhF8J2a3iLd48soKqDirCmTCv2ZdlYTBoSUeh10aUAsg -EsxBu24LUTi4S8sCAwEAAaNjMGEwDgYDVR0PAQH/BAQDAgGGMA8GA1UdEwEB/wQF -MAMBAf8wHQYDVR0OBBYEFLE+w2kD+L9HAdSYJhoIAu9jZCvDMB8GA1UdIwQYMBaA -FLE+w2kD+L9HAdSYJhoIAu9jZCvDMA0GCSqGSIb3DQEBBQUAA4IBAQAcGgaX3Nec -nzyIZgYIVyHbIUf4KmeqvxgydkAQV8GK83rZEWWONfqe/EW1ntlMMUu4kehDLI6z -eM7b41N5cdblIZQB2lWHmiRk9opmzN6cN82oNLFpmyPInngiK3BD41VHMWEZ71jF -hS9OMPagMRYjyOfiZRYzy78aG6A9+MpeizGLYAiJLQwGXFK3xPkKmNEVX58Svnw2 -Yzi9RKR/5CYrCsSXaQ3pjOLAEFe4yHYSkVXySGnYvCoCWw9E1CAx2/S6cCZdkGCe -vEsXCS+0yx5DaMkHJ8HSXPfqIbloEpw8nL+e/IBcm2PN7EeqJSdnoDfzAIJ9VNep -+OkuE6N36B9K ------END CERTIFICATE----- - -# Issuer: CN=SwissSign Gold CA - G2 O=SwissSign AG -# Subject: CN=SwissSign Gold CA - G2 O=SwissSign AG -# Label: "SwissSign Gold CA - G2" -# Serial: 13492815561806991280 -# MD5 Fingerprint: 24:77:d9:a8:91:d1:3b:fa:88:2d:c2:ff:f8:cd:33:93 -# SHA1 Fingerprint: d8:c5:38:8a:b7:30:1b:1b:6e:d4:7a:e6:45:25:3a:6f:9f:1a:27:61 -# SHA256 Fingerprint: 62:dd:0b:e9:b9:f5:0a:16:3e:a0:f8:e7:5c:05:3b:1e:ca:57:ea:55:c8:68:8f:64:7c:68:81:f2:c8:35:7b:95 ------BEGIN CERTIFICATE----- -MIIFujCCA6KgAwIBAgIJALtAHEP1Xk+wMA0GCSqGSIb3DQEBBQUAMEUxCzAJBgNV -BAYTAkNIMRUwEwYDVQQKEwxTd2lzc1NpZ24gQUcxHzAdBgNVBAMTFlN3aXNzU2ln -biBHb2xkIENBIC0gRzIwHhcNMDYxMDI1MDgzMDM1WhcNMzYxMDI1MDgzMDM1WjBF -MQswCQYDVQQGEwJDSDEVMBMGA1UEChMMU3dpc3NTaWduIEFHMR8wHQYDVQQDExZT -d2lzc1NpZ24gR29sZCBDQSAtIEcyMIICIjANBgkqhkiG9w0BAQEFAAOCAg8AMIIC -CgKCAgEAr+TufoskDhJuqVAtFkQ7kpJcyrhdhJJCEyq8ZVeCQD5XJM1QiyUqt2/8 -76LQwB8CJEoTlo8jE+YoWACjR8cGp4QjK7u9lit/VcyLwVcfDmJlD909Vopz2q5+ -bbqBHH5CjCA12UNNhPqE21Is8w4ndwtrvxEvcnifLtg+5hg3Wipy+dpikJKVyh+c -6bM8K8vzARO/Ws/BtQpgvd21mWRTuKCWs2/iJneRjOBiEAKfNA+k1ZIzUd6+jbqE -emA8atufK+ze3gE/bk3lUIbLtK/tREDFylqM2tIrfKjuvqblCqoOpd8FUrdVxyJd -MmqXl2MT28nbeTZ7hTpKxVKJ+STnnXepgv9VHKVxaSvRAiTysybUa9oEVeXBCsdt -MDeQKuSeFDNeFhdVxVu1yzSJkvGdJo+hB9TGsnhQ2wwMC3wLjEHXuendjIj3o02y -MszYF9rNt85mndT9Xv+9lz4pded+p2JYryU0pUHHPbwNUMoDAw8IWh+Vc3hiv69y -FGkOpeUDDniOJihC8AcLYiAQZzlG+qkDzAQ4embvIIO1jEpWjpEA/I5cgt6IoMPi -aG59je883WX0XaxR7ySArqpWl2/5rX3aYT+YdzylkbYcjCbaZaIJbcHiVOO5ykxM -gI93e2CaHt+28kgeDrpOVG2Y4OGiGqJ3UM/EY5LsRxmd6+ZrzsECAwEAAaOBrDCB -qTAOBgNVHQ8BAf8EBAMCAQYwDwYDVR0TAQH/BAUwAwEB/zAdBgNVHQ4EFgQUWyV7 -lqRlUX64OfPAeGZe6Drn8O4wHwYDVR0jBBgwFoAUWyV7lqRlUX64OfPAeGZe6Drn -8O4wRgYDVR0gBD8wPTA7BglghXQBWQECAQEwLjAsBggrBgEFBQcCARYgaHR0cDov -L3JlcG9zaXRvcnkuc3dpc3NzaWduLmNvbS8wDQYJKoZIhvcNAQEFBQADggIBACe6 -45R88a7A3hfm5djV9VSwg/S7zV4Fe0+fdWavPOhWfvxyeDgD2StiGwC5+OlgzczO -UYrHUDFu4Up+GC9pWbY9ZIEr44OE5iKHjn3g7gKZYbge9LgriBIWhMIxkziWMaa5 -O1M/wySTVltpkuzFwbs4AOPsF6m43Md8AYOfMke6UiI0HTJ6CVanfCU2qT1L2sCC -bwq7EsiHSycR+R4tx5M/nttfJmtS2S6K8RTGRI0Vqbe/vd6mGu6uLftIdxf+u+yv -GPUqUfA5hJeVbG4bwyvEdGB5JbAKJ9/fXtI5z0V9QkvfsywexcZdylU6oJxpmo/a -77KwPJ+HbBIrZXAVUjEaJM9vMSNQH4xPjyPDdEFjHFWoFN0+4FFQz/EbMFYOkrCC -hdiDyyJkvC24JdVUorgG6q2SpCSgwYa1ShNqR88uC1aVVMvOmttqtKay20EIhid3 -92qgQmwLOM7XdVAyksLfKzAiSNDVQTglXaTpXZ/GlHXQRf0wl0OPkKsKx4ZzYEpp -Ld6leNcG2mqeSz53OiATIgHQv2ieY2BrNU0LbbqhPcCT4H8js1WtciVORvnSFu+w -ZMEBnunKoGqYDs/YYPIvSbjkQuE4NRb0yG5P94FW6LqjviOvrv1vA+ACOzB2+htt -Qc8Bsem4yWb02ybzOqR08kkkW8mw0FfB+j564ZfJ ------END CERTIFICATE----- - -# Issuer: CN=SwissSign Silver CA - G2 O=SwissSign AG -# Subject: CN=SwissSign Silver CA - G2 O=SwissSign AG -# Label: "SwissSign Silver CA - G2" -# Serial: 5700383053117599563 -# MD5 Fingerprint: e0:06:a1:c9:7d:cf:c9:fc:0d:c0:56:75:96:d8:62:13 -# SHA1 Fingerprint: 9b:aa:e5:9f:56:ee:21:cb:43:5a:be:25:93:df:a7:f0:40:d1:1d:cb -# SHA256 Fingerprint: be:6c:4d:a2:bb:b9:ba:59:b6:f3:93:97:68:37:42:46:c3:c0:05:99:3f:a9:8f:02:0d:1d:ed:be:d4:8a:81:d5 ------BEGIN CERTIFICATE----- -MIIFvTCCA6WgAwIBAgIITxvUL1S7L0swDQYJKoZIhvcNAQEFBQAwRzELMAkGA1UE -BhMCQ0gxFTATBgNVBAoTDFN3aXNzU2lnbiBBRzEhMB8GA1UEAxMYU3dpc3NTaWdu -IFNpbHZlciBDQSAtIEcyMB4XDTA2MTAyNTA4MzI0NloXDTM2MTAyNTA4MzI0Nlow -RzELMAkGA1UEBhMCQ0gxFTATBgNVBAoTDFN3aXNzU2lnbiBBRzEhMB8GA1UEAxMY -U3dpc3NTaWduIFNpbHZlciBDQSAtIEcyMIICIjANBgkqhkiG9w0BAQEFAAOCAg8A -MIICCgKCAgEAxPGHf9N4Mfc4yfjDmUO8x/e8N+dOcbpLj6VzHVxumK4DV644N0Mv -Fz0fyM5oEMF4rhkDKxD6LHmD9ui5aLlV8gREpzn5/ASLHvGiTSf5YXu6t+WiE7br -YT7QbNHm+/pe7R20nqA1W6GSy/BJkv6FCgU+5tkL4k+73JU3/JHpMjUi0R86TieF -nbAVlDLaYQ1HTWBCrpJH6INaUFjpiou5XaHc3ZlKHzZnu0jkg7Y360g6rw9njxcH -6ATK72oxh9TAtvmUcXtnZLi2kUpCe2UuMGoM9ZDulebyzYLs2aFK7PayS+VFheZt -eJMELpyCbTapxDFkH4aDCyr0NQp4yVXPQbBH6TCfmb5hqAaEuSh6XzjZG6k4sIN/ -c8HDO0gqgg8hm7jMqDXDhBuDsz6+pJVpATqJAHgE2cn0mRmrVn5bi4Y5FZGkECwJ -MoBgs5PAKrYYC51+jUnyEEp/+dVGLxmSo5mnJqy7jDzmDrxHB9xzUfFwZC8I+bRH -HTBsROopN4WSaGa8gzj+ezku01DwH/teYLappvonQfGbGHLy9YR0SslnxFSuSGTf -jNFusB3hB48IHpmccelM2KX3RxIfdNFRnobzwqIjQAtz20um53MGjMGg6cFZrEb6 -5i/4z3GcRm25xBWNOHkDRUjvxF3XCO6HOSKGsg0PWEP3calILv3q1h8CAwEAAaOB -rDCBqTAOBgNVHQ8BAf8EBAMCAQYwDwYDVR0TAQH/BAUwAwEB/zAdBgNVHQ4EFgQU -F6DNweRBtjpbO8tFnb0cwpj6hlgwHwYDVR0jBBgwFoAUF6DNweRBtjpbO8tFnb0c -wpj6hlgwRgYDVR0gBD8wPTA7BglghXQBWQEDAQEwLjAsBggrBgEFBQcCARYgaHR0 -cDovL3JlcG9zaXRvcnkuc3dpc3NzaWduLmNvbS8wDQYJKoZIhvcNAQEFBQADggIB -AHPGgeAn0i0P4JUw4ppBf1AsX19iYamGamkYDHRJ1l2E6kFSGG9YrVBWIGrGvShp -WJHckRE1qTodvBqlYJ7YH39FkWnZfrt4csEGDyrOj4VwYaygzQu4OSlWhDJOhrs9 -xCrZ1x9y7v5RoSJBsXECYxqCsGKrXlcSH9/L3XWgwF15kIwb4FDm3jH+mHtwX6WQ -2K34ArZv02DdQEsixT2tOnqfGhpHkXkzuoLcMmkDlm4fS/Bx/uNncqCxv1yL5PqZ -IseEuRuNI5c/7SXgz2W79WEE790eslpBIlqhn10s6FvJbakMDHiqYMZWjwFaDGi8 -aRl5xB9+lwW/xekkUV7U1UtT7dkjWjYDZaPBA61BMPNGG4WQr2W11bHkFlt4dR2X -em1ZqSqPe97Dh4kQmUlzeMg9vVE1dCrV8X5pGyq7O70luJpaPXJhkGaH7gzWTdQR -dAtq/gsD/KNVV4n+SsuuWxcFyPKNIzFTONItaj+CuY0IavdeQXRuwxF+B6wpYJE/ -OMpXEA29MC/HpeZBoNquBYeaoKRlbEwJDIm6uNO5wJOKMPqN5ZprFQFOZ6raYlY+ -hAhm0sQ2fac+EPyI4NSA5QC9qvNOBqN6avlicuMJT+ubDgEj8Z+7fNzcbBGXJbLy -tGMU0gYqZ4yD9c7qB9iaah7s5Aq7KkzrCWA5zspi2C5u ------END CERTIFICATE----- - -# Issuer: CN=SecureTrust CA O=SecureTrust Corporation -# Subject: CN=SecureTrust CA O=SecureTrust Corporation -# Label: "SecureTrust CA" -# Serial: 17199774589125277788362757014266862032 -# MD5 Fingerprint: dc:32:c3:a7:6d:25:57:c7:68:09:9d:ea:2d:a9:a2:d1 -# SHA1 Fingerprint: 87:82:c6:c3:04:35:3b:cf:d2:96:92:d2:59:3e:7d:44:d9:34:ff:11 -# SHA256 Fingerprint: f1:c1:b5:0a:e5:a2:0d:d8:03:0e:c9:f6:bc:24:82:3d:d3:67:b5:25:57:59:b4:e7:1b:61:fc:e9:f7:37:5d:73 ------BEGIN CERTIFICATE----- -MIIDuDCCAqCgAwIBAgIQDPCOXAgWpa1Cf/DrJxhZ0DANBgkqhkiG9w0BAQUFADBI -MQswCQYDVQQGEwJVUzEgMB4GA1UEChMXU2VjdXJlVHJ1c3QgQ29ycG9yYXRpb24x -FzAVBgNVBAMTDlNlY3VyZVRydXN0IENBMB4XDTA2MTEwNzE5MzExOFoXDTI5MTIz -MTE5NDA1NVowSDELMAkGA1UEBhMCVVMxIDAeBgNVBAoTF1NlY3VyZVRydXN0IENv -cnBvcmF0aW9uMRcwFQYDVQQDEw5TZWN1cmVUcnVzdCBDQTCCASIwDQYJKoZIhvcN -AQEBBQADggEPADCCAQoCggEBAKukgeWVzfX2FI7CT8rU4niVWJxB4Q2ZQCQXOZEz -Zum+4YOvYlyJ0fwkW2Gz4BERQRwdbvC4u/jep4G6pkjGnx29vo6pQT64lO0pGtSO -0gMdA+9tDWccV9cGrcrI9f4Or2YlSASWC12juhbDCE/RRvgUXPLIXgGZbf2IzIao -wW8xQmxSPmjL8xk037uHGFaAJsTQ3MBv396gwpEWoGQRS0S8Hvbn+mPeZqx2pHGj -7DaUaHp3pLHnDi+BeuK1cobvomuL8A/b01k/unK8RCSc43Oz969XL0Imnal0ugBS -8kvNU3xHCzaFDmapCJcWNFfBZveA4+1wVMeT4C4oFVmHursCAwEAAaOBnTCBmjAT -BgkrBgEEAYI3FAIEBh4EAEMAQTALBgNVHQ8EBAMCAYYwDwYDVR0TAQH/BAUwAwEB -/zAdBgNVHQ4EFgQUQjK2FvoE/f5dS3rD/fdMQB1aQ68wNAYDVR0fBC0wKzApoCeg -JYYjaHR0cDovL2NybC5zZWN1cmV0cnVzdC5jb20vU1RDQS5jcmwwEAYJKwYBBAGC -NxUBBAMCAQAwDQYJKoZIhvcNAQEFBQADggEBADDtT0rhWDpSclu1pqNlGKa7UTt3 -6Z3q059c4EVlew3KW+JwULKUBRSuSceNQQcSc5R+DCMh/bwQf2AQWnL1mA6s7Ll/ -3XpvXdMc9P+IBWlCqQVxyLesJugutIxq/3HcuLHfmbx8IVQr5Fiiu1cprp6poxkm -D5kuCLDv/WnPmRoJjeOnnyvJNjR7JLN4TJUXpAYmHrZkUjZfYGfZnMUFdAvnZyPS -CPyI6a6Lf+Ew9Dd+/cYy2i2eRDAwbO4H3tI0/NL/QPZL9GZGBlSm8jIKYyYwa5vR -3ItHuuG51WLQoqD0ZwV4KWMabwTW+MZMo5qxN7SN5ShLHZ4swrhovO0C7jE= ------END CERTIFICATE----- - -# Issuer: CN=Secure Global CA O=SecureTrust Corporation -# Subject: CN=Secure Global CA O=SecureTrust Corporation -# Label: "Secure Global CA" -# Serial: 9751836167731051554232119481456978597 -# MD5 Fingerprint: cf:f4:27:0d:d4:ed:dc:65:16:49:6d:3d:da:bf:6e:de -# SHA1 Fingerprint: 3a:44:73:5a:e5:81:90:1f:24:86:61:46:1e:3b:9c:c4:5f:f5:3a:1b -# SHA256 Fingerprint: 42:00:f5:04:3a:c8:59:0e:bb:52:7d:20:9e:d1:50:30:29:fb:cb:d4:1c:a1:b5:06:ec:27:f1:5a:de:7d:ac:69 ------BEGIN CERTIFICATE----- -MIIDvDCCAqSgAwIBAgIQB1YipOjUiolN9BPI8PjqpTANBgkqhkiG9w0BAQUFADBK -MQswCQYDVQQGEwJVUzEgMB4GA1UEChMXU2VjdXJlVHJ1c3QgQ29ycG9yYXRpb24x -GTAXBgNVBAMTEFNlY3VyZSBHbG9iYWwgQ0EwHhcNMDYxMTA3MTk0MjI4WhcNMjkx -MjMxMTk1MjA2WjBKMQswCQYDVQQGEwJVUzEgMB4GA1UEChMXU2VjdXJlVHJ1c3Qg -Q29ycG9yYXRpb24xGTAXBgNVBAMTEFNlY3VyZSBHbG9iYWwgQ0EwggEiMA0GCSqG -SIb3DQEBAQUAA4IBDwAwggEKAoIBAQCvNS7YrGxVaQZx5RNoJLNP2MwhR/jxYDiJ -iQPpvepeRlMJ3Fz1Wuj3RSoC6zFh1ykzTM7HfAo3fg+6MpjhHZevj8fcyTiW89sa -/FHtaMbQbqR8JNGuQsiWUGMu4P51/pinX0kuleM5M2SOHqRfkNJnPLLZ/kG5VacJ -jnIFHovdRIWCQtBJwB1g8NEXLJXr9qXBkqPFwqcIYA1gBBCWeZ4WNOaptvolRTnI -HmX5k/Wq8VLcmZg9pYYaDDUz+kulBAYVHDGA76oYa8J719rO+TMg1fW9ajMtgQT7 -sFzUnKPiXB3jqUJ1XnvUd+85VLrJChgbEplJL4hL/VBi0XPnj3pDAgMBAAGjgZ0w -gZowEwYJKwYBBAGCNxQCBAYeBABDAEEwCwYDVR0PBAQDAgGGMA8GA1UdEwEB/wQF -MAMBAf8wHQYDVR0OBBYEFK9EBMJBfkiD2045AuzshHrmzsmkMDQGA1UdHwQtMCsw -KaAnoCWGI2h0dHA6Ly9jcmwuc2VjdXJldHJ1c3QuY29tL1NHQ0EuY3JsMBAGCSsG -AQQBgjcVAQQDAgEAMA0GCSqGSIb3DQEBBQUAA4IBAQBjGghAfaReUw132HquHw0L -URYD7xh8yOOvaliTFGCRsoTciE6+OYo68+aCiV0BN7OrJKQVDpI1WkpEXk5X+nXO -H0jOZvQ8QCaSmGwb7iRGDBezUqXbpZGRzzfTb+cnCDpOGR86p1hcF895P4vkp9Mm -I50mD1hp/Ed+stCNi5O/KU9DaXR2Z0vPB4zmAve14bRDtUstFJ/53CYNv6ZHdAbY -iNE6KTCEztI5gGIbqMdXSbxqVVFnFUq+NQfk1XWYN3kwFNspnWzFacxHVaIw98xc -f8LDmBxrThaA63p4ZUWiABqvDA1VZDRIuJK58bRQKfJPIx/abKwfROHdI3hRW8cW ------END CERTIFICATE----- - -# Issuer: CN=COMODO Certification Authority O=COMODO CA Limited -# Subject: CN=COMODO Certification Authority O=COMODO CA Limited -# Label: "COMODO Certification Authority" -# Serial: 104350513648249232941998508985834464573 -# MD5 Fingerprint: 5c:48:dc:f7:42:72:ec:56:94:6d:1c:cc:71:35:80:75 -# SHA1 Fingerprint: 66:31:bf:9e:f7:4f:9e:b6:c9:d5:a6:0c:ba:6a:be:d1:f7:bd:ef:7b -# SHA256 Fingerprint: 0c:2c:d6:3d:f7:80:6f:a3:99:ed:e8:09:11:6b:57:5b:f8:79:89:f0:65:18:f9:80:8c:86:05:03:17:8b:af:66 ------BEGIN CERTIFICATE----- -MIIEHTCCAwWgAwIBAgIQToEtioJl4AsC7j41AkblPTANBgkqhkiG9w0BAQUFADCB -gTELMAkGA1UEBhMCR0IxGzAZBgNVBAgTEkdyZWF0ZXIgTWFuY2hlc3RlcjEQMA4G -A1UEBxMHU2FsZm9yZDEaMBgGA1UEChMRQ09NT0RPIENBIExpbWl0ZWQxJzAlBgNV -BAMTHkNPTU9ETyBDZXJ0aWZpY2F0aW9uIEF1dGhvcml0eTAeFw0wNjEyMDEwMDAw -MDBaFw0yOTEyMzEyMzU5NTlaMIGBMQswCQYDVQQGEwJHQjEbMBkGA1UECBMSR3Jl -YXRlciBNYW5jaGVzdGVyMRAwDgYDVQQHEwdTYWxmb3JkMRowGAYDVQQKExFDT01P -RE8gQ0EgTGltaXRlZDEnMCUGA1UEAxMeQ09NT0RPIENlcnRpZmljYXRpb24gQXV0 -aG9yaXR5MIIBIjANBgkqhkiG9w0BAQEFAAOCAQ8AMIIBCgKCAQEA0ECLi3LjkRv3 -UcEbVASY06m/weaKXTuH+7uIzg3jLz8GlvCiKVCZrts7oVewdFFxze1CkU1B/qnI -2GqGd0S7WWaXUF601CxwRM/aN5VCaTwwxHGzUvAhTaHYujl8HJ6jJJ3ygxaYqhZ8 -Q5sVW7euNJH+1GImGEaaP+vB+fGQV+useg2L23IwambV4EajcNxo2f8ESIl33rXp -+2dtQem8Ob0y2WIC8bGoPW43nOIv4tOiJovGuFVDiOEjPqXSJDlqR6sA1KGzqSX+ -DT+nHbrTUcELpNqsOO9VUCQFZUaTNE8tja3G1CEZ0o7KBWFxB3NH5YoZEr0ETc5O -nKVIrLsm9wIDAQABo4GOMIGLMB0GA1UdDgQWBBQLWOWLxkwVN6RAqTCpIb5HNlpW -/zAOBgNVHQ8BAf8EBAMCAQYwDwYDVR0TAQH/BAUwAwEB/zBJBgNVHR8EQjBAMD6g -PKA6hjhodHRwOi8vY3JsLmNvbW9kb2NhLmNvbS9DT01PRE9DZXJ0aWZpY2F0aW9u -QXV0aG9yaXR5LmNybDANBgkqhkiG9w0BAQUFAAOCAQEAPpiem/Yb6dc5t3iuHXIY -SdOH5EOC6z/JqvWote9VfCFSZfnVDeFs9D6Mk3ORLgLETgdxb8CPOGEIqB6BCsAv -IC9Bi5HcSEW88cbeunZrM8gALTFGTO3nnc+IlP8zwFboJIYmuNg4ON8qa90SzMc/ -RxdMosIGlgnW2/4/PEZB31jiVg88O8EckzXZOFKs7sjsLjBOlDW0JB9LeGna8gI4 -zJVSk/BwJVmcIGfE7vmLV2H0knZ9P4SNVbfo5azV8fUZVqZa+5Acr5Pr5RzUZ5dd -BA6+C4OmF4O5MBKgxTMVBbkN+8cFduPYSo38NBejxiEovjBFMR7HeL5YYTisO+IB -ZQ== ------END CERTIFICATE----- - -# Issuer: CN=COMODO ECC Certification Authority O=COMODO CA Limited -# Subject: CN=COMODO ECC Certification Authority O=COMODO CA Limited -# Label: "COMODO ECC Certification Authority" -# Serial: 41578283867086692638256921589707938090 -# MD5 Fingerprint: 7c:62:ff:74:9d:31:53:5e:68:4a:d5:78:aa:1e:bf:23 -# SHA1 Fingerprint: 9f:74:4e:9f:2b:4d:ba:ec:0f:31:2c:50:b6:56:3b:8e:2d:93:c3:11 -# SHA256 Fingerprint: 17:93:92:7a:06:14:54:97:89:ad:ce:2f:8f:34:f7:f0:b6:6d:0f:3a:e3:a3:b8:4d:21:ec:15:db:ba:4f:ad:c7 ------BEGIN CERTIFICATE----- -MIICiTCCAg+gAwIBAgIQH0evqmIAcFBUTAGem2OZKjAKBggqhkjOPQQDAzCBhTEL -MAkGA1UEBhMCR0IxGzAZBgNVBAgTEkdyZWF0ZXIgTWFuY2hlc3RlcjEQMA4GA1UE -BxMHU2FsZm9yZDEaMBgGA1UEChMRQ09NT0RPIENBIExpbWl0ZWQxKzApBgNVBAMT -IkNPTU9ETyBFQ0MgQ2VydGlmaWNhdGlvbiBBdXRob3JpdHkwHhcNMDgwMzA2MDAw -MDAwWhcNMzgwMTE4MjM1OTU5WjCBhTELMAkGA1UEBhMCR0IxGzAZBgNVBAgTEkdy -ZWF0ZXIgTWFuY2hlc3RlcjEQMA4GA1UEBxMHU2FsZm9yZDEaMBgGA1UEChMRQ09N -T0RPIENBIExpbWl0ZWQxKzApBgNVBAMTIkNPTU9ETyBFQ0MgQ2VydGlmaWNhdGlv -biBBdXRob3JpdHkwdjAQBgcqhkjOPQIBBgUrgQQAIgNiAAQDR3svdcmCFYX7deSR -FtSrYpn1PlILBs5BAH+X4QokPB0BBO490o0JlwzgdeT6+3eKKvUDYEs2ixYjFq0J -cfRK9ChQtP6IHG4/bC8vCVlbpVsLM5niwz2J+Wos77LTBumjQjBAMB0GA1UdDgQW -BBR1cacZSBm8nZ3qQUfflMRId5nTeTAOBgNVHQ8BAf8EBAMCAQYwDwYDVR0TAQH/ -BAUwAwEB/zAKBggqhkjOPQQDAwNoADBlAjEA7wNbeqy3eApyt4jf/7VGFAkK+qDm -fQjGGoe9GKhzvSbKYAydzpmfz1wPMOG+FDHqAjAU9JM8SaczepBGR7NjfRObTrdv -GDeAU/7dIOA1mjbRxwG55tzd8/8dLDoWV9mSOdY= ------END CERTIFICATE----- - -# Issuer: CN=Certigna O=Dhimyotis -# Subject: CN=Certigna O=Dhimyotis -# Label: "Certigna" -# Serial: 18364802974209362175 -# MD5 Fingerprint: ab:57:a6:5b:7d:42:82:19:b5:d8:58:26:28:5e:fd:ff -# SHA1 Fingerprint: b1:2e:13:63:45:86:a4:6f:1a:b2:60:68:37:58:2d:c4:ac:fd:94:97 -# SHA256 Fingerprint: e3:b6:a2:db:2e:d7:ce:48:84:2f:7a:c5:32:41:c7:b7:1d:54:14:4b:fb:40:c1:1f:3f:1d:0b:42:f5:ee:a1:2d ------BEGIN CERTIFICATE----- -MIIDqDCCApCgAwIBAgIJAP7c4wEPyUj/MA0GCSqGSIb3DQEBBQUAMDQxCzAJBgNV -BAYTAkZSMRIwEAYDVQQKDAlEaGlteW90aXMxETAPBgNVBAMMCENlcnRpZ25hMB4X -DTA3MDYyOTE1MTMwNVoXDTI3MDYyOTE1MTMwNVowNDELMAkGA1UEBhMCRlIxEjAQ -BgNVBAoMCURoaW15b3RpczERMA8GA1UEAwwIQ2VydGlnbmEwggEiMA0GCSqGSIb3 -DQEBAQUAA4IBDwAwggEKAoIBAQDIaPHJ1tazNHUmgh7stL7qXOEm7RFHYeGifBZ4 -QCHkYJ5ayGPhxLGWkv8YbWkj4Sti993iNi+RB7lIzw7sebYs5zRLcAglozyHGxny -gQcPOJAZ0xH+hrTy0V4eHpbNgGzOOzGTtvKg0KmVEn2lmsxryIRWijOp5yIVUxbw -zBfsV1/pogqYCd7jX5xv3EjjhQsVWqa6n6xI4wmy9/Qy3l40vhx4XUJbzg4ij02Q -130yGLMLLGq/jj8UEYkgDncUtT2UCIf3JR7VsmAA7G8qKCVuKj4YYxclPz5EIBb2 -JsglrgVKtOdjLPOMFlN+XPsRGgjBRmKfIrjxwo1p3Po6WAbfAgMBAAGjgbwwgbkw -DwYDVR0TAQH/BAUwAwEB/zAdBgNVHQ4EFgQUGu3+QTmQtCRZvgHyUtVF9lo53BEw -ZAYDVR0jBF0wW4AUGu3+QTmQtCRZvgHyUtVF9lo53BGhOKQ2MDQxCzAJBgNVBAYT -AkZSMRIwEAYDVQQKDAlEaGlteW90aXMxETAPBgNVBAMMCENlcnRpZ25hggkA/tzj -AQ/JSP8wDgYDVR0PAQH/BAQDAgEGMBEGCWCGSAGG+EIBAQQEAwIABzANBgkqhkiG -9w0BAQUFAAOCAQEAhQMeknH2Qq/ho2Ge6/PAD/Kl1NqV5ta+aDY9fm4fTIrv0Q8h -bV6lUmPOEvjvKtpv6zf+EwLHyzs+ImvaYS5/1HI93TDhHkxAGYwP15zRgzB7mFnc -fca5DClMoTOi62c6ZYTTluLtdkVwj7Ur3vkj1kluPBS1xp81HlDQwY9qcEQCYsuu -HWhBp6pX6FOqB9IG9tUUBguRA3UsbHK1YZWaDYu5Def131TN3ubY1gkIl2PlwS6w -t0QmwCbAr1UwnjvVNioZBPRcHv/PLLf/0P2HQBHVESO7SMAhqaQoLf0V+LBOK/Qw -WyH8EZE0vkHve52Xdf+XlcCWWC/qu0bXu+TZLg== ------END CERTIFICATE----- - -# Issuer: O=Chunghwa Telecom Co., Ltd. OU=ePKI Root Certification Authority -# Subject: O=Chunghwa Telecom Co., Ltd. OU=ePKI Root Certification Authority -# Label: "ePKI Root Certification Authority" -# Serial: 28956088682735189655030529057352760477 -# MD5 Fingerprint: 1b:2e:00:ca:26:06:90:3d:ad:fe:6f:15:68:d3:6b:b3 -# SHA1 Fingerprint: 67:65:0d:f1:7e:8e:7e:5b:82:40:a4:f4:56:4b:cf:e2:3d:69:c6:f0 -# SHA256 Fingerprint: c0:a6:f4:dc:63:a2:4b:fd:cf:54:ef:2a:6a:08:2a:0a:72:de:35:80:3e:2f:f5:ff:52:7a:e5:d8:72:06:df:d5 ------BEGIN CERTIFICATE----- -MIIFsDCCA5igAwIBAgIQFci9ZUdcr7iXAF7kBtK8nTANBgkqhkiG9w0BAQUFADBe -MQswCQYDVQQGEwJUVzEjMCEGA1UECgwaQ2h1bmdod2EgVGVsZWNvbSBDby4sIEx0 -ZC4xKjAoBgNVBAsMIWVQS0kgUm9vdCBDZXJ0aWZpY2F0aW9uIEF1dGhvcml0eTAe -Fw0wNDEyMjAwMjMxMjdaFw0zNDEyMjAwMjMxMjdaMF4xCzAJBgNVBAYTAlRXMSMw -IQYDVQQKDBpDaHVuZ2h3YSBUZWxlY29tIENvLiwgTHRkLjEqMCgGA1UECwwhZVBL -SSBSb290IENlcnRpZmljYXRpb24gQXV0aG9yaXR5MIICIjANBgkqhkiG9w0BAQEF -AAOCAg8AMIICCgKCAgEA4SUP7o3biDN1Z82tH306Tm2d0y8U82N0ywEhajfqhFAH -SyZbCUNsIZ5qyNUD9WBpj8zwIuQf5/dqIjG3LBXy4P4AakP/h2XGtRrBp0xtInAh -ijHyl3SJCRImHJ7K2RKilTza6We/CKBk49ZCt0Xvl/T29de1ShUCWH2YWEtgvM3X -DZoTM1PRYfl61dd4s5oz9wCGzh1NlDivqOx4UXCKXBCDUSH3ET00hl7lSM2XgYI1 -TBnsZfZrxQWh7kcT1rMhJ5QQCtkkO7q+RBNGMD+XPNjX12ruOzjjK9SXDrkb5wdJ -fzcq+Xd4z1TtW0ado4AOkUPB1ltfFLqfpo0kR0BZv3I4sjZsN/+Z0V0OWQqraffA -sgRFelQArr5T9rXn4fg8ozHSqf4hUmTFpmfwdQcGlBSBVcYn5AGPF8Fqcde+S/uU -WH1+ETOxQvdibBjWzwloPn9s9h6PYq2lY9sJpx8iQkEeb5mKPtf5P0B6ebClAZLS -nT0IFaUQAS2zMnaolQ2zepr7BxB4EW/hj8e6DyUadCrlHJhBmd8hh+iVBmoKs2pH -dmX2Os+PYhcZewoozRrSgx4hxyy/vv9haLdnG7t4TY3OZ+XkwY63I2binZB1NJip -NiuKmpS5nezMirH4JYlcWrYvjB9teSSnUmjDhDXiZo1jDiVN1Rmy5nk3pyKdVDEC -AwEAAaNqMGgwHQYDVR0OBBYEFB4M97Zn8uGSJglFwFU5Lnc/QkqiMAwGA1UdEwQF -MAMBAf8wOQYEZyoHAAQxMC8wLQIBADAJBgUrDgMCGgUAMAcGBWcqAwAABBRFsMLH -ClZ87lt4DJX5GFPBphzYEDANBgkqhkiG9w0BAQUFAAOCAgEACbODU1kBPpVJufGB -uvl2ICO1J2B01GqZNF5sAFPZn/KmsSQHRGoqxqWOeBLoR9lYGxMqXnmbnwoqZ6Yl -PwZpVnPDimZI+ymBV3QGypzqKOg4ZyYr8dW1P2WT+DZdjo2NQCCHGervJ8A9tDkP -JXtoUHRVnAxZfVo9QZQlUgjgRywVMRnVvwdVxrsStZf0X4OFunHB2WyBEXYKCrC/ -gpf36j36+uwtqSiUO1bd0lEursC9CBWMd1I0ltabrNMdjmEPNXubrjlpC2JgQCA2 -j6/7Nu4tCEoduL+bXPjqpRugc6bY+G7gMwRfaKonh+3ZwZCc7b3jajWvY9+rGNm6 -5ulK6lCKD2GTHuItGeIwlDWSXQ62B68ZgI9HkFFLLk3dheLSClIKF5r8GrBQAuUB -o2M3IUxExJtRmREOc5wGj1QupyheRDmHVi03vYVElOEMSyycw5KFNGHLD7ibSkNS -/jQ6fbjpKdx2qcgw+BRxgMYeNkh0IkFch4LoGHGLQYlE535YW6i4jRPpp2zDR+2z -Gp1iro2C6pSe3VkQw63d4k3jMdXH7OjysP6SHhYKGvzZ8/gntsm+HbRsZJB/9OTE -W9c3rkIO3aQab3yIVMUWbuF6aC74Or8NpDyJO3inTmODBCEIZ43ygknQW/2xzQ+D -hNQ+IIX3Sj0rnP0qCglN6oH4EZw= ------END CERTIFICATE----- - -# Issuer: O=certSIGN OU=certSIGN ROOT CA -# Subject: O=certSIGN OU=certSIGN ROOT CA -# Label: "certSIGN ROOT CA" -# Serial: 35210227249154 -# MD5 Fingerprint: 18:98:c0:d6:e9:3a:fc:f9:b0:f5:0c:f7:4b:01:44:17 -# SHA1 Fingerprint: fa:b7:ee:36:97:26:62:fb:2d:b0:2a:f6:bf:03:fd:e8:7c:4b:2f:9b -# SHA256 Fingerprint: ea:a9:62:c4:fa:4a:6b:af:eb:e4:15:19:6d:35:1c:cd:88:8d:4f:53:f3:fa:8a:e6:d7:c4:66:a9:4e:60:42:bb ------BEGIN CERTIFICATE----- -MIIDODCCAiCgAwIBAgIGIAYFFnACMA0GCSqGSIb3DQEBBQUAMDsxCzAJBgNVBAYT -AlJPMREwDwYDVQQKEwhjZXJ0U0lHTjEZMBcGA1UECxMQY2VydFNJR04gUk9PVCBD -QTAeFw0wNjA3MDQxNzIwMDRaFw0zMTA3MDQxNzIwMDRaMDsxCzAJBgNVBAYTAlJP -MREwDwYDVQQKEwhjZXJ0U0lHTjEZMBcGA1UECxMQY2VydFNJR04gUk9PVCBDQTCC -ASIwDQYJKoZIhvcNAQEBBQADggEPADCCAQoCggEBALczuX7IJUqOtdu0KBuqV5Do -0SLTZLrTk+jUrIZhQGpgV2hUhE28alQCBf/fm5oqrl0Hj0rDKH/v+yv6efHHrfAQ -UySQi2bJqIirr1qjAOm+ukbuW3N7LBeCgV5iLKECZbO9xSsAfsT8AzNXDe3i+s5d -RdY4zTW2ssHQnIFKquSyAVwdj1+ZxLGt24gh65AIgoDzMKND5pCCrlUoSe1b16kQ -OA7+j0xbm0bqQfWwCHTD0IgztnzXdN/chNFDDnU5oSVAKOp4yw4sLjmdjItuFhwv -JoIQ4uNllAoEwF73XVv4EOLQunpL+943AAAaWyjj0pxzPjKHmKHJUS/X3qwzs08C -AwEAAaNCMEAwDwYDVR0TAQH/BAUwAwEB/zAOBgNVHQ8BAf8EBAMCAcYwHQYDVR0O -BBYEFOCMm9slSbPxfIbWskKHC9BroNnkMA0GCSqGSIb3DQEBBQUAA4IBAQA+0hyJ -LjX8+HXd5n9liPRyTMks1zJO890ZeUe9jjtbkw9QSSQTaxQGcu8J06Gh40CEyecY -MnQ8SG4Pn0vU9x7Tk4ZkVJdjclDVVc/6IJMCopvDI5NOFlV2oHB5bc0hH88vLbwZ -44gx+FkagQnIl6Z0x2DEW8xXjrJ1/RsCCdtZb3KTafcxQdaIOL+Hsr0Wefmq5L6I -Jd1hJyMctTEHBDa0GpC9oHRxUIltvBTjD4au8as+x6AJzKNI0eDbZOeStc+vckNw -i/nDhDwTqn6Sm1dTk/pwwpEOMfmbZ13pljheX7NzTogVZ96edhBiIL5VaZVDADlN -9u6wWk5JRFRYX0KD ------END CERTIFICATE----- - -# Issuer: CN=NetLock Arany (Class Gold) F\u0151tan\xfas\xedtv\xe1ny O=NetLock Kft. OU=Tan\xfas\xedtv\xe1nykiad\xf3k (Certification Services) -# Subject: CN=NetLock Arany (Class Gold) F\u0151tan\xfas\xedtv\xe1ny O=NetLock Kft. OU=Tan\xfas\xedtv\xe1nykiad\xf3k (Certification Services) -# Label: "NetLock Arany (Class Gold) F\u0151tan\xfas\xedtv\xe1ny" -# Serial: 80544274841616 -# MD5 Fingerprint: c5:a1:b7:ff:73:dd:d6:d7:34:32:18:df:fc:3c:ad:88 -# SHA1 Fingerprint: 06:08:3f:59:3f:15:a1:04:a0:69:a4:6b:a9:03:d0:06:b7:97:09:91 -# SHA256 Fingerprint: 6c:61:da:c3:a2:de:f0:31:50:6b:e0:36:d2:a6:fe:40:19:94:fb:d1:3d:f9:c8:d4:66:59:92:74:c4:46:ec:98 ------BEGIN CERTIFICATE----- -MIIEFTCCAv2gAwIBAgIGSUEs5AAQMA0GCSqGSIb3DQEBCwUAMIGnMQswCQYDVQQG -EwJIVTERMA8GA1UEBwwIQnVkYXBlc3QxFTATBgNVBAoMDE5ldExvY2sgS2Z0LjE3 -MDUGA1UECwwuVGFuw7pzw610dsOhbnlraWFkw7NrIChDZXJ0aWZpY2F0aW9uIFNl -cnZpY2VzKTE1MDMGA1UEAwwsTmV0TG9jayBBcmFueSAoQ2xhc3MgR29sZCkgRsWR -dGFuw7pzw610dsOhbnkwHhcNMDgxMjExMTUwODIxWhcNMjgxMjA2MTUwODIxWjCB -pzELMAkGA1UEBhMCSFUxETAPBgNVBAcMCEJ1ZGFwZXN0MRUwEwYDVQQKDAxOZXRM -b2NrIEtmdC4xNzA1BgNVBAsMLlRhbsO6c8OtdHbDoW55a2lhZMOzayAoQ2VydGlm -aWNhdGlvbiBTZXJ2aWNlcykxNTAzBgNVBAMMLE5ldExvY2sgQXJhbnkgKENsYXNz -IEdvbGQpIEbFkXRhbsO6c8OtdHbDoW55MIIBIjANBgkqhkiG9w0BAQEFAAOCAQ8A -MIIBCgKCAQEAxCRec75LbRTDofTjl5Bu0jBFHjzuZ9lk4BqKf8owyoPjIMHj9DrT -lF8afFttvzBPhCf2nx9JvMaZCpDyD/V/Q4Q3Y1GLeqVw/HpYzY6b7cNGbIRwXdrz -AZAj/E4wqX7hJ2Pn7WQ8oLjJM2P+FpD/sLj916jAwJRDC7bVWaaeVtAkH3B5r9s5 -VA1lddkVQZQBr17s9o3x/61k/iCa11zr/qYfCGSji3ZVrR47KGAuhyXoqq8fxmRG -ILdwfzzeSNuWU7c5d+Qa4scWhHaXWy+7GRWF+GmF9ZmnqfI0p6m2pgP8b4Y9VHx2 -BJtr+UBdADTHLpl1neWIA6pN+APSQnbAGwIDAKiLo0UwQzASBgNVHRMBAf8ECDAG -AQH/AgEEMA4GA1UdDwEB/wQEAwIBBjAdBgNVHQ4EFgQUzPpnk/C2uNClwB7zU/2M -U9+D15YwDQYJKoZIhvcNAQELBQADggEBAKt/7hwWqZw8UQCgwBEIBaeZ5m8BiFRh -bvG5GK1Krf6BQCOUL/t1fC8oS2IkgYIL9WHxHG64YTjrgfpioTtaYtOUZcTh5m2C -+C8lcLIhJsFyUR+MLMOEkMNaj7rP9KdlpeuY0fsFskZ1FSNqb4VjMIDw1Z4fKRzC -bLBQWV2QWzuoDTDPv31/zvGdg73JRm4gpvlhUbohL3u+pRVjodSVh/GeufOJ8z2F -uLjbvrW5KfnaNwUASZQDhETnv0Mxz3WLJdH0pmT1kvarBes96aULNmLazAZfNou2 -XjG4Kvte9nHfRCaexOYNkbQudZWAUWpLMKawYqGT8ZvYzsRjdT9ZR7E= ------END CERTIFICATE----- - -# Issuer: CN=SecureSign RootCA11 O=Japan Certification Services, Inc. -# Subject: CN=SecureSign RootCA11 O=Japan Certification Services, Inc. -# Label: "SecureSign RootCA11" -# Serial: 1 -# MD5 Fingerprint: b7:52:74:e2:92:b4:80:93:f2:75:e4:cc:d7:f2:ea:26 -# SHA1 Fingerprint: 3b:c4:9f:48:f8:f3:73:a0:9c:1e:bd:f8:5b:b1:c3:65:c7:d8:11:b3 -# SHA256 Fingerprint: bf:0f:ee:fb:9e:3a:58:1a:d5:f9:e9:db:75:89:98:57:43:d2:61:08:5c:4d:31:4f:6f:5d:72:59:aa:42:16:12 ------BEGIN CERTIFICATE----- -MIIDbTCCAlWgAwIBAgIBATANBgkqhkiG9w0BAQUFADBYMQswCQYDVQQGEwJKUDEr -MCkGA1UEChMiSmFwYW4gQ2VydGlmaWNhdGlvbiBTZXJ2aWNlcywgSW5jLjEcMBoG -A1UEAxMTU2VjdXJlU2lnbiBSb290Q0ExMTAeFw0wOTA0MDgwNDU2NDdaFw0yOTA0 -MDgwNDU2NDdaMFgxCzAJBgNVBAYTAkpQMSswKQYDVQQKEyJKYXBhbiBDZXJ0aWZp -Y2F0aW9uIFNlcnZpY2VzLCBJbmMuMRwwGgYDVQQDExNTZWN1cmVTaWduIFJvb3RD -QTExMIIBIjANBgkqhkiG9w0BAQEFAAOCAQ8AMIIBCgKCAQEA/XeqpRyQBTvLTJsz -i1oURaTnkBbR31fSIRCkF/3frNYfp+TbfPfs37gD2pRY/V1yfIw/XwFndBWW4wI8 -h9uuywGOwvNmxoVF9ALGOrVisq/6nL+k5tSAMJjzDbaTj6nU2DbysPyKyiyhFTOV -MdrAG/LuYpmGYz+/3ZMqg6h2uRMft85OQoWPIucuGvKVCbIFtUROd6EgvanyTgp9 -UK31BQ1FT0Zx/Sg+U/sE2C3XZR1KG/rPO7AxmjVuyIsG0wCR8pQIZUyxNAYAeoni -8McDWc/V1uinMrPmmECGxc0nEovMe863ETxiYAcjPitAbpSACW22s293bzUIUPsC -h8U+iQIDAQABo0IwQDAdBgNVHQ4EFgQUW/hNT7KlhtQ60vFjmqC+CfZXt94wDgYD -VR0PAQH/BAQDAgEGMA8GA1UdEwEB/wQFMAMBAf8wDQYJKoZIhvcNAQEFBQADggEB -AKChOBZmLqdWHyGcBvod7bkixTgm2E5P7KN/ed5GIaGHd48HCJqypMWvDzKYC3xm -KbabfSVSSUOrTC4rbnpwrxYO4wJs+0LmGJ1F2FXI6Dvd5+H0LgscNFxsWEr7jIhQ -X5Ucv+2rIrVls4W6ng+4reV6G4pQOh29Dbx7VFALuUKvVaAYga1lme++5Jy/xIWr -QbJUb9wlze144o4MjQlJ3WN7WmmWAiGovVJZ6X01y8hSyn+B/tlr0/cR7SXf+Of5 -pPpyl4RTDaXQMhhRdlkUbA/r7F+AjHVDg8OFmP9Mni0N5HeDk061lgeLKBObjBmN -QSdJQO7e5iNEOdyhIta6A/I= ------END CERTIFICATE----- - -# Issuer: CN=Microsec e-Szigno Root CA 2009 O=Microsec Ltd. -# Subject: CN=Microsec e-Szigno Root CA 2009 O=Microsec Ltd. -# Label: "Microsec e-Szigno Root CA 2009" -# Serial: 14014712776195784473 -# MD5 Fingerprint: f8:49:f4:03:bc:44:2d:83:be:48:69:7d:29:64:fc:b1 -# SHA1 Fingerprint: 89:df:74:fe:5c:f4:0f:4a:80:f9:e3:37:7d:54:da:91:e1:01:31:8e -# SHA256 Fingerprint: 3c:5f:81:fe:a5:fa:b8:2c:64:bf:a2:ea:ec:af:cd:e8:e0:77:fc:86:20:a7:ca:e5:37:16:3d:f3:6e:db:f3:78 ------BEGIN CERTIFICATE----- -MIIECjCCAvKgAwIBAgIJAMJ+QwRORz8ZMA0GCSqGSIb3DQEBCwUAMIGCMQswCQYD -VQQGEwJIVTERMA8GA1UEBwwIQnVkYXBlc3QxFjAUBgNVBAoMDU1pY3Jvc2VjIEx0 -ZC4xJzAlBgNVBAMMHk1pY3Jvc2VjIGUtU3ppZ25vIFJvb3QgQ0EgMjAwOTEfMB0G -CSqGSIb3DQEJARYQaW5mb0BlLXN6aWduby5odTAeFw0wOTA2MTYxMTMwMThaFw0y -OTEyMzAxMTMwMThaMIGCMQswCQYDVQQGEwJIVTERMA8GA1UEBwwIQnVkYXBlc3Qx -FjAUBgNVBAoMDU1pY3Jvc2VjIEx0ZC4xJzAlBgNVBAMMHk1pY3Jvc2VjIGUtU3pp -Z25vIFJvb3QgQ0EgMjAwOTEfMB0GCSqGSIb3DQEJARYQaW5mb0BlLXN6aWduby5o -dTCCASIwDQYJKoZIhvcNAQEBBQADggEPADCCAQoCggEBAOn4j/NjrdqG2KfgQvvP -kd6mJviZpWNwrZuuyjNAfW2WbqEORO7hE52UQlKavXWFdCyoDh2Tthi3jCyoz/tc -cbna7P7ofo/kLx2yqHWH2Leh5TvPmUpG0IMZfcChEhyVbUr02MelTTMuhTlAdX4U -fIASmFDHQWe4oIBhVKZsTh/gnQ4H6cm6M+f+wFUoLAKApxn1ntxVUwOXewdI/5n7 -N4okxFnMUBBjjqqpGrCEGob5X7uxUG6k0QrM1XF+H6cbfPVTbiJfyyvm1HxdrtbC -xkzlBQHZ7Vf8wSN5/PrIJIOV87VqUQHQd9bpEqH5GoP7ghu5sJf0dgYzQ0mg/wu1 -+rUCAwEAAaOBgDB+MA8GA1UdEwEB/wQFMAMBAf8wDgYDVR0PAQH/BAQDAgEGMB0G -A1UdDgQWBBTLD8bfQkPMPcu1SCOhGnqmKrs0aDAfBgNVHSMEGDAWgBTLD8bfQkPM -Pcu1SCOhGnqmKrs0aDAbBgNVHREEFDASgRBpbmZvQGUtc3ppZ25vLmh1MA0GCSqG -SIb3DQEBCwUAA4IBAQDJ0Q5eLtXMs3w+y/w9/w0olZMEyL/azXm4Q5DwpL7v8u8h -mLzU1F0G9u5C7DBsoKqpyvGvivo/C3NqPuouQH4frlRheesuCDfXI/OMn74dseGk -ddug4lQUsbocKaQY9hK6ohQU4zE1yED/t+AFdlfBHFny+L/k7SViXITwfn4fs775 -tyERzAMBVnCnEJIeGzSBHq2cGsMEPO0CYdYeBvNfOofyK/FFh+U9rNHHV4S9a67c -2Pm2G2JwCz02yULyMtd6YebS2z3PyKnJm9zbWETXbzivf3jTo60adbocwTZ8jx5t -HMN1Rq41Bab2XD0h7lbwyYIiLXpUq3DDfSJlgnCW ------END CERTIFICATE----- - -# Issuer: CN=GlobalSign O=GlobalSign OU=GlobalSign Root CA - R3 -# Subject: CN=GlobalSign O=GlobalSign OU=GlobalSign Root CA - R3 -# Label: "GlobalSign Root CA - R3" -# Serial: 4835703278459759426209954 -# MD5 Fingerprint: c5:df:b8:49:ca:05:13:55:ee:2d:ba:1a:c3:3e:b0:28 -# SHA1 Fingerprint: d6:9b:56:11:48:f0:1c:77:c5:45:78:c1:09:26:df:5b:85:69:76:ad -# SHA256 Fingerprint: cb:b5:22:d7:b7:f1:27:ad:6a:01:13:86:5b:df:1c:d4:10:2e:7d:07:59:af:63:5a:7c:f4:72:0d:c9:63:c5:3b ------BEGIN CERTIFICATE----- -MIIDXzCCAkegAwIBAgILBAAAAAABIVhTCKIwDQYJKoZIhvcNAQELBQAwTDEgMB4G -A1UECxMXR2xvYmFsU2lnbiBSb290IENBIC0gUjMxEzARBgNVBAoTCkdsb2JhbFNp -Z24xEzARBgNVBAMTCkdsb2JhbFNpZ24wHhcNMDkwMzE4MTAwMDAwWhcNMjkwMzE4 -MTAwMDAwWjBMMSAwHgYDVQQLExdHbG9iYWxTaWduIFJvb3QgQ0EgLSBSMzETMBEG -A1UEChMKR2xvYmFsU2lnbjETMBEGA1UEAxMKR2xvYmFsU2lnbjCCASIwDQYJKoZI -hvcNAQEBBQADggEPADCCAQoCggEBAMwldpB5BngiFvXAg7aEyiie/QV2EcWtiHL8 -RgJDx7KKnQRfJMsuS+FggkbhUqsMgUdwbN1k0ev1LKMPgj0MK66X17YUhhB5uzsT -gHeMCOFJ0mpiLx9e+pZo34knlTifBtc+ycsmWQ1z3rDI6SYOgxXG71uL0gRgykmm -KPZpO/bLyCiR5Z2KYVc3rHQU3HTgOu5yLy6c+9C7v/U9AOEGM+iCK65TpjoWc4zd -QQ4gOsC0p6Hpsk+QLjJg6VfLuQSSaGjlOCZgdbKfd/+RFO+uIEn8rUAVSNECMWEZ -XriX7613t2Saer9fwRPvm2L7DWzgVGkWqQPabumDk3F2xmmFghcCAwEAAaNCMEAw -DgYDVR0PAQH/BAQDAgEGMA8GA1UdEwEB/wQFMAMBAf8wHQYDVR0OBBYEFI/wS3+o -LkUkrk1Q+mOai97i3Ru8MA0GCSqGSIb3DQEBCwUAA4IBAQBLQNvAUKr+yAzv95ZU -RUm7lgAJQayzE4aGKAczymvmdLm6AC2upArT9fHxD4q/c2dKg8dEe3jgr25sbwMp -jjM5RcOO5LlXbKr8EpbsU8Yt5CRsuZRj+9xTaGdWPoO4zzUhw8lo/s7awlOqzJCK -6fBdRoyV3XpYKBovHd7NADdBj+1EbddTKJd+82cEHhXXipa0095MJ6RMG3NzdvQX -mcIfeg7jLQitChws/zyrVQ4PkX4268NXSb7hLi18YIvDQVETI53O9zJrlAGomecs -Mx86OyXShkDOOyyGeMlhLxS67ttVb9+E7gUJTb0o2HLO02JQZR7rkpeDMdmztcpH -WD9f ------END CERTIFICATE----- - -# Issuer: CN=Izenpe.com O=IZENPE S.A. -# Subject: CN=Izenpe.com O=IZENPE S.A. -# Label: "Izenpe.com" -# Serial: 917563065490389241595536686991402621 -# MD5 Fingerprint: a6:b0:cd:85:80:da:5c:50:34:a3:39:90:2f:55:67:73 -# SHA1 Fingerprint: 2f:78:3d:25:52:18:a7:4a:65:39:71:b5:2c:a2:9c:45:15:6f:e9:19 -# SHA256 Fingerprint: 25:30:cc:8e:98:32:15:02:ba:d9:6f:9b:1f:ba:1b:09:9e:2d:29:9e:0f:45:48:bb:91:4f:36:3b:c0:d4:53:1f ------BEGIN CERTIFICATE----- -MIIF8TCCA9mgAwIBAgIQALC3WhZIX7/hy/WL1xnmfTANBgkqhkiG9w0BAQsFADA4 -MQswCQYDVQQGEwJFUzEUMBIGA1UECgwLSVpFTlBFIFMuQS4xEzARBgNVBAMMCkl6 -ZW5wZS5jb20wHhcNMDcxMjEzMTMwODI4WhcNMzcxMjEzMDgyNzI1WjA4MQswCQYD -VQQGEwJFUzEUMBIGA1UECgwLSVpFTlBFIFMuQS4xEzARBgNVBAMMCkl6ZW5wZS5j -b20wggIiMA0GCSqGSIb3DQEBAQUAA4ICDwAwggIKAoICAQDJ03rKDx6sp4boFmVq -scIbRTJxldn+EFvMr+eleQGPicPK8lVx93e+d5TzcqQsRNiekpsUOqHnJJAKClaO -xdgmlOHZSOEtPtoKct2jmRXagaKH9HtuJneJWK3W6wyyQXpzbm3benhB6QiIEn6H -LmYRY2xU+zydcsC8Lv/Ct90NduM61/e0aL6i9eOBbsFGb12N4E3GVFWJGjMxCrFX -uaOKmMPsOzTFlUFpfnXCPCDFYbpRR6AgkJOhkEvzTnyFRVSa0QUmQbC1TR0zvsQD -yCV8wXDbO/QJLVQnSKwv4cSsPsjLkkxTOTcj7NMB+eAJRE1NZMDhDVqHIrytG6P+ -JrUV86f8hBnp7KGItERphIPzidF0BqnMC9bC3ieFUCbKF7jJeodWLBoBHmy+E60Q -rLUk9TiRodZL2vG70t5HtfG8gfZZa88ZU+mNFctKy6lvROUbQc/hhqfK0GqfvEyN -BjNaooXlkDWgYlwWTvDjovoDGrQscbNYLN57C9saD+veIR8GdwYDsMnvmfzAuU8L -hij+0rnq49qlw0dpEuDb8PYZi+17cNcC1u2HGCgsBCRMd+RIihrGO5rUD8r6ddIB -QFqNeb+Lz0vPqhbBleStTIo+F5HUsWLlguWABKQDfo2/2n+iD5dPDNMN+9fR5XJ+ -HMh3/1uaD7euBUbl8agW7EekFwIDAQABo4H2MIHzMIGwBgNVHREEgagwgaWBD2lu -Zm9AaXplbnBlLmNvbaSBkTCBjjFHMEUGA1UECgw+SVpFTlBFIFMuQS4gLSBDSUYg -QTAxMzM3MjYwLVJNZXJjLlZpdG9yaWEtR2FzdGVpeiBUMTA1NSBGNjIgUzgxQzBB -BgNVBAkMOkF2ZGEgZGVsIE1lZGl0ZXJyYW5lbyBFdG9yYmlkZWEgMTQgLSAwMTAx -MCBWaXRvcmlhLUdhc3RlaXowDwYDVR0TAQH/BAUwAwEB/zAOBgNVHQ8BAf8EBAMC -AQYwHQYDVR0OBBYEFB0cZQ6o8iV7tJHP5LGx5r1VdGwFMA0GCSqGSIb3DQEBCwUA -A4ICAQB4pgwWSp9MiDrAyw6lFn2fuUhfGI8NYjb2zRlrrKvV9pF9rnHzP7MOeIWb -laQnIUdCSnxIOvVFfLMMjlF4rJUT3sb9fbgakEyrkgPH7UIBzg/YsfqikuFgba56 -awmqxinuaElnMIAkejEWOVt+8Rwu3WwJrfIxwYJOubv5vr8qhT/AQKM6WfxZSzwo -JNu0FXWuDYi6LnPAvViH5ULy617uHjAimcs30cQhbIHsvm0m5hzkQiCeR7Csg1lw -LDXWrzY0tM07+DKo7+N4ifuNRSzanLh+QBxh5z6ikixL8s36mLYp//Pye6kfLqCT -VyvehQP5aTfLnnhqBbTFMXiJ7HqnheG5ezzevh55hM6fcA5ZwjUukCox2eRFekGk -LhObNA5me0mrZJfQRsN5nXJQY6aYWwa9SG3YOYNw6DXwBdGqvOPbyALqfP2C2sJb -UjWumDqtujWTI6cfSN01RpiyEGjkpTHCClguGYEQyVB1/OpaFs4R1+7vUIgtYf8/ -QnMFlEPVjjxOAToZpR9GTnfQXeWBIiGH/pR9hNiTrdZoQ0iy2+tzJOeRf1SktoA+ -naM8THLCV8Sg1Mw4J87VBp6iSNnpn86CcDaTmjvfliHjWbcM2pE38P1ZWrOZyGls -QyYBNWNgVYkDOnXYukrZVP/u3oDYLdE41V4tC5h9Pmzb/CaIxw== ------END CERTIFICATE----- - -# Issuer: CN=Go Daddy Root Certificate Authority - G2 O=GoDaddy.com, Inc. -# Subject: CN=Go Daddy Root Certificate Authority - G2 O=GoDaddy.com, Inc. -# Label: "Go Daddy Root Certificate Authority - G2" -# Serial: 0 -# MD5 Fingerprint: 80:3a:bc:22:c1:e6:fb:8d:9b:3b:27:4a:32:1b:9a:01 -# SHA1 Fingerprint: 47:be:ab:c9:22:ea:e8:0e:78:78:34:62:a7:9f:45:c2:54:fd:e6:8b -# SHA256 Fingerprint: 45:14:0b:32:47:eb:9c:c8:c5:b4:f0:d7:b5:30:91:f7:32:92:08:9e:6e:5a:63:e2:74:9d:d3:ac:a9:19:8e:da ------BEGIN CERTIFICATE----- -MIIDxTCCAq2gAwIBAgIBADANBgkqhkiG9w0BAQsFADCBgzELMAkGA1UEBhMCVVMx -EDAOBgNVBAgTB0FyaXpvbmExEzARBgNVBAcTClNjb3R0c2RhbGUxGjAYBgNVBAoT -EUdvRGFkZHkuY29tLCBJbmMuMTEwLwYDVQQDEyhHbyBEYWRkeSBSb290IENlcnRp -ZmljYXRlIEF1dGhvcml0eSAtIEcyMB4XDTA5MDkwMTAwMDAwMFoXDTM3MTIzMTIz -NTk1OVowgYMxCzAJBgNVBAYTAlVTMRAwDgYDVQQIEwdBcml6b25hMRMwEQYDVQQH -EwpTY290dHNkYWxlMRowGAYDVQQKExFHb0RhZGR5LmNvbSwgSW5jLjExMC8GA1UE -AxMoR28gRGFkZHkgUm9vdCBDZXJ0aWZpY2F0ZSBBdXRob3JpdHkgLSBHMjCCASIw -DQYJKoZIhvcNAQEBBQADggEPADCCAQoCggEBAL9xYgjx+lk09xvJGKP3gElY6SKD -E6bFIEMBO4Tx5oVJnyfq9oQbTqC023CYxzIBsQU+B07u9PpPL1kwIuerGVZr4oAH -/PMWdYA5UXvl+TW2dE6pjYIT5LY/qQOD+qK+ihVqf94Lw7YZFAXK6sOoBJQ7Rnwy -DfMAZiLIjWltNowRGLfTshxgtDj6AozO091GB94KPutdfMh8+7ArU6SSYmlRJQVh -GkSBjCypQ5Yj36w6gZoOKcUcqeldHraenjAKOc7xiID7S13MMuyFYkMlNAJWJwGR -tDtwKj9useiciAF9n9T521NtYJ2/LOdYq7hfRvzOxBsDPAnrSTFcaUaz4EcCAwEA -AaNCMEAwDwYDVR0TAQH/BAUwAwEB/zAOBgNVHQ8BAf8EBAMCAQYwHQYDVR0OBBYE -FDqahQcQZyi27/a9BUFuIMGU2g/eMA0GCSqGSIb3DQEBCwUAA4IBAQCZ21151fmX -WWcDYfF+OwYxdS2hII5PZYe096acvNjpL9DbWu7PdIxztDhC2gV7+AJ1uP2lsdeu -9tfeE8tTEH6KRtGX+rcuKxGrkLAngPnon1rpN5+r5N9ss4UXnT3ZJE95kTXWXwTr -gIOrmgIttRD02JDHBHNA7XIloKmf7J6raBKZV8aPEjoJpL1E/QYVN8Gb5DKj7Tjo -2GTzLH4U/ALqn83/B2gX2yKQOC16jdFU8WnjXzPKej17CuPKf1855eJ1usV2GDPO -LPAvTK33sefOT6jEm0pUBsV/fdUID+Ic/n4XuKxe9tQWskMJDE32p2u0mYRlynqI -4uJEvlz36hz1 ------END CERTIFICATE----- - -# Issuer: CN=Starfield Root Certificate Authority - G2 O=Starfield Technologies, Inc. -# Subject: CN=Starfield Root Certificate Authority - G2 O=Starfield Technologies, Inc. -# Label: "Starfield Root Certificate Authority - G2" -# Serial: 0 -# MD5 Fingerprint: d6:39:81:c6:52:7e:96:69:fc:fc:ca:66:ed:05:f2:96 -# SHA1 Fingerprint: b5:1c:06:7c:ee:2b:0c:3d:f8:55:ab:2d:92:f4:fe:39:d4:e7:0f:0e -# SHA256 Fingerprint: 2c:e1:cb:0b:f9:d2:f9:e1:02:99:3f:be:21:51:52:c3:b2:dd:0c:ab:de:1c:68:e5:31:9b:83:91:54:db:b7:f5 ------BEGIN CERTIFICATE----- -MIID3TCCAsWgAwIBAgIBADANBgkqhkiG9w0BAQsFADCBjzELMAkGA1UEBhMCVVMx -EDAOBgNVBAgTB0FyaXpvbmExEzARBgNVBAcTClNjb3R0c2RhbGUxJTAjBgNVBAoT -HFN0YXJmaWVsZCBUZWNobm9sb2dpZXMsIEluYy4xMjAwBgNVBAMTKVN0YXJmaWVs -ZCBSb290IENlcnRpZmljYXRlIEF1dGhvcml0eSAtIEcyMB4XDTA5MDkwMTAwMDAw -MFoXDTM3MTIzMTIzNTk1OVowgY8xCzAJBgNVBAYTAlVTMRAwDgYDVQQIEwdBcml6 -b25hMRMwEQYDVQQHEwpTY290dHNkYWxlMSUwIwYDVQQKExxTdGFyZmllbGQgVGVj -aG5vbG9naWVzLCBJbmMuMTIwMAYDVQQDEylTdGFyZmllbGQgUm9vdCBDZXJ0aWZp -Y2F0ZSBBdXRob3JpdHkgLSBHMjCCASIwDQYJKoZIhvcNAQEBBQADggEPADCCAQoC -ggEBAL3twQP89o/8ArFvW59I2Z154qK3A2FWGMNHttfKPTUuiUP3oWmb3ooa/RMg -nLRJdzIpVv257IzdIvpy3Cdhl+72WoTsbhm5iSzchFvVdPtrX8WJpRBSiUZV9Lh1 -HOZ/5FSuS/hVclcCGfgXcVnrHigHdMWdSL5stPSksPNkN3mSwOxGXn/hbVNMYq/N -Hwtjuzqd+/x5AJhhdM8mgkBj87JyahkNmcrUDnXMN/uLicFZ8WJ/X7NfZTD4p7dN -dloedl40wOiWVpmKs/B/pM293DIxfJHP4F8R+GuqSVzRmZTRouNjWwl2tVZi4Ut0 -HZbUJtQIBFnQmA4O5t78w+wfkPECAwEAAaNCMEAwDwYDVR0TAQH/BAUwAwEB/zAO -BgNVHQ8BAf8EBAMCAQYwHQYDVR0OBBYEFHwMMh+n2TB/xH1oo2Kooc6rB1snMA0G -CSqGSIb3DQEBCwUAA4IBAQARWfolTwNvlJk7mh+ChTnUdgWUXuEok21iXQnCoKjU -sHU48TRqneSfioYmUeYs0cYtbpUgSpIB7LiKZ3sx4mcujJUDJi5DnUox9g61DLu3 -4jd/IroAow57UvtruzvE03lRTs2Q9GcHGcg8RnoNAX3FWOdt5oUwF5okxBDgBPfg -8n/Uqgr/Qh037ZTlZFkSIHc40zI+OIF1lnP6aI+xy84fxez6nH7PfrHxBy22/L/K -pL/QlwVKvOoYKAKQvVR4CSFx09F9HdkWsKlhPdAKACL8x3vLCWRFCztAgfd9fDL1 -mMpYjn0q7pBZc2T5NnReJaH1ZgUufzkVqSr7UIuOhWn0 ------END CERTIFICATE----- - -# Issuer: CN=Starfield Services Root Certificate Authority - G2 O=Starfield Technologies, Inc. -# Subject: CN=Starfield Services Root Certificate Authority - G2 O=Starfield Technologies, Inc. -# Label: "Starfield Services Root Certificate Authority - G2" -# Serial: 0 -# MD5 Fingerprint: 17:35:74:af:7b:61:1c:eb:f4:f9:3c:e2:ee:40:f9:a2 -# SHA1 Fingerprint: 92:5a:8f:8d:2c:6d:04:e0:66:5f:59:6a:ff:22:d8:63:e8:25:6f:3f -# SHA256 Fingerprint: 56:8d:69:05:a2:c8:87:08:a4:b3:02:51:90:ed:cf:ed:b1:97:4a:60:6a:13:c6:e5:29:0f:cb:2a:e6:3e:da:b5 ------BEGIN CERTIFICATE----- -MIID7zCCAtegAwIBAgIBADANBgkqhkiG9w0BAQsFADCBmDELMAkGA1UEBhMCVVMx -EDAOBgNVBAgTB0FyaXpvbmExEzARBgNVBAcTClNjb3R0c2RhbGUxJTAjBgNVBAoT -HFN0YXJmaWVsZCBUZWNobm9sb2dpZXMsIEluYy4xOzA5BgNVBAMTMlN0YXJmaWVs -ZCBTZXJ2aWNlcyBSb290IENlcnRpZmljYXRlIEF1dGhvcml0eSAtIEcyMB4XDTA5 -MDkwMTAwMDAwMFoXDTM3MTIzMTIzNTk1OVowgZgxCzAJBgNVBAYTAlVTMRAwDgYD -VQQIEwdBcml6b25hMRMwEQYDVQQHEwpTY290dHNkYWxlMSUwIwYDVQQKExxTdGFy -ZmllbGQgVGVjaG5vbG9naWVzLCBJbmMuMTswOQYDVQQDEzJTdGFyZmllbGQgU2Vy -dmljZXMgUm9vdCBDZXJ0aWZpY2F0ZSBBdXRob3JpdHkgLSBHMjCCASIwDQYJKoZI -hvcNAQEBBQADggEPADCCAQoCggEBANUMOsQq+U7i9b4Zl1+OiFOxHz/Lz58gE20p -OsgPfTz3a3Y4Y9k2YKibXlwAgLIvWX/2h/klQ4bnaRtSmpDhcePYLQ1Ob/bISdm2 -8xpWriu2dBTrz/sm4xq6HZYuajtYlIlHVv8loJNwU4PahHQUw2eeBGg6345AWh1K -Ts9DkTvnVtYAcMtS7nt9rjrnvDH5RfbCYM8TWQIrgMw0R9+53pBlbQLPLJGmpufe -hRhJfGZOozptqbXuNC66DQO4M99H67FrjSXZm86B0UVGMpZwh94CDklDhbZsc7tk -6mFBrMnUVN+HL8cisibMn1lUaJ/8viovxFUcdUBgF4UCVTmLfwUCAwEAAaNCMEAw -DwYDVR0TAQH/BAUwAwEB/zAOBgNVHQ8BAf8EBAMCAQYwHQYDVR0OBBYEFJxfAN+q -AdcwKziIorhtSpzyEZGDMA0GCSqGSIb3DQEBCwUAA4IBAQBLNqaEd2ndOxmfZyMI -bw5hyf2E3F/YNoHN2BtBLZ9g3ccaaNnRbobhiCPPE95Dz+I0swSdHynVv/heyNXB -ve6SbzJ08pGCL72CQnqtKrcgfU28elUSwhXqvfdqlS5sdJ/PHLTyxQGjhdByPq1z -qwubdQxtRbeOlKyWN7Wg0I8VRw7j6IPdj/3vQQF3zCepYoUz8jcI73HPdwbeyBkd -iEDPfUYd/x7H4c7/I9vG+o1VTqkC50cRRj70/b17KSa7qWFiNyi2LSr2EIZkyXCn -0q23KXB56jzaYyWf/Wi3MOxw+3WKt21gZ7IeyLnp2KhvAotnDU0mV3HaIPzBSlCN -sSi6 ------END CERTIFICATE----- - -# Issuer: CN=AffirmTrust Commercial O=AffirmTrust -# Subject: CN=AffirmTrust Commercial O=AffirmTrust -# Label: "AffirmTrust Commercial" -# Serial: 8608355977964138876 -# MD5 Fingerprint: 82:92:ba:5b:ef:cd:8a:6f:a6:3d:55:f9:84:f6:d6:b7 -# SHA1 Fingerprint: f9:b5:b6:32:45:5f:9c:be:ec:57:5f:80:dc:e9:6e:2c:c7:b2:78:b7 -# SHA256 Fingerprint: 03:76:ab:1d:54:c5:f9:80:3c:e4:b2:e2:01:a0:ee:7e:ef:7b:57:b6:36:e8:a9:3c:9b:8d:48:60:c9:6f:5f:a7 ------BEGIN CERTIFICATE----- -MIIDTDCCAjSgAwIBAgIId3cGJyapsXwwDQYJKoZIhvcNAQELBQAwRDELMAkGA1UE -BhMCVVMxFDASBgNVBAoMC0FmZmlybVRydXN0MR8wHQYDVQQDDBZBZmZpcm1UcnVz -dCBDb21tZXJjaWFsMB4XDTEwMDEyOTE0MDYwNloXDTMwMTIzMTE0MDYwNlowRDEL -MAkGA1UEBhMCVVMxFDASBgNVBAoMC0FmZmlybVRydXN0MR8wHQYDVQQDDBZBZmZp -cm1UcnVzdCBDb21tZXJjaWFsMIIBIjANBgkqhkiG9w0BAQEFAAOCAQ8AMIIBCgKC -AQEA9htPZwcroRX1BiLLHwGy43NFBkRJLLtJJRTWzsO3qyxPxkEylFf6EqdbDuKP -Hx6GGaeqtS25Xw2Kwq+FNXkyLbscYjfysVtKPcrNcV/pQr6U6Mje+SJIZMblq8Yr -ba0F8PrVC8+a5fBQpIs7R6UjW3p6+DM/uO+Zl+MgwdYoic+U+7lF7eNAFxHUdPAL -MeIrJmqbTFeurCA+ukV6BfO9m2kVrn1OIGPENXY6BwLJN/3HR+7o8XYdcxXyl6S1 -yHp52UKqK39c/s4mT6NmgTWvRLpUHhwwMmWd5jyTXlBOeuM61G7MGvv50jeuJCqr -VwMiKA1JdX+3KNp1v47j3A55MQIDAQABo0IwQDAdBgNVHQ4EFgQUnZPGU4teyq8/ -nx4P5ZmVvCT2lI8wDwYDVR0TAQH/BAUwAwEB/zAOBgNVHQ8BAf8EBAMCAQYwDQYJ -KoZIhvcNAQELBQADggEBAFis9AQOzcAN/wr91LoWXym9e2iZWEnStB03TX8nfUYG -XUPGhi4+c7ImfU+TqbbEKpqrIZcUsd6M06uJFdhrJNTxFq7YpFzUf1GO7RgBsZNj -vbz4YYCanrHOQnDiqX0GJX0nof5v7LMeJNrjS1UaADs1tDvZ110w/YETifLCBivt -Z8SOyUOyXGsViQK8YvxO8rUzqrJv0wqiUOP2O+guRMLbZjipM1ZI8W0bM40NjD9g -N53Tym1+NH4Nn3J2ixufcv1SNUFFApYvHLKac0khsUlHRUe072o0EclNmsxZt9YC -nlpOZbWUrhvfKbAW8b8Angc6F2S1BLUjIZkKlTuXfO8= ------END CERTIFICATE----- - -# Issuer: CN=AffirmTrust Networking O=AffirmTrust -# Subject: CN=AffirmTrust Networking O=AffirmTrust -# Label: "AffirmTrust Networking" -# Serial: 8957382827206547757 -# MD5 Fingerprint: 42:65:ca:be:01:9a:9a:4c:a9:8c:41:49:cd:c0:d5:7f -# SHA1 Fingerprint: 29:36:21:02:8b:20:ed:02:f5:66:c5:32:d1:d6:ed:90:9f:45:00:2f -# SHA256 Fingerprint: 0a:81:ec:5a:92:97:77:f1:45:90:4a:f3:8d:5d:50:9f:66:b5:e2:c5:8f:cd:b5:31:05:8b:0e:17:f3:f0:b4:1b ------BEGIN CERTIFICATE----- -MIIDTDCCAjSgAwIBAgIIfE8EORzUmS0wDQYJKoZIhvcNAQEFBQAwRDELMAkGA1UE -BhMCVVMxFDASBgNVBAoMC0FmZmlybVRydXN0MR8wHQYDVQQDDBZBZmZpcm1UcnVz -dCBOZXR3b3JraW5nMB4XDTEwMDEyOTE0MDgyNFoXDTMwMTIzMTE0MDgyNFowRDEL -MAkGA1UEBhMCVVMxFDASBgNVBAoMC0FmZmlybVRydXN0MR8wHQYDVQQDDBZBZmZp -cm1UcnVzdCBOZXR3b3JraW5nMIIBIjANBgkqhkiG9w0BAQEFAAOCAQ8AMIIBCgKC -AQEAtITMMxcua5Rsa2FSoOujz3mUTOWUgJnLVWREZY9nZOIG41w3SfYvm4SEHi3y -YJ0wTsyEheIszx6e/jarM3c1RNg1lho9Nuh6DtjVR6FqaYvZ/Ls6rnla1fTWcbua -kCNrmreIdIcMHl+5ni36q1Mr3Lt2PpNMCAiMHqIjHNRqrSK6mQEubWXLviRmVSRL -QESxG9fhwoXA3hA/Pe24/PHxI1Pcv2WXb9n5QHGNfb2V1M6+oF4nI979ptAmDgAp -6zxG8D1gvz9Q0twmQVGeFDdCBKNwV6gbh+0t+nvujArjqWaJGctB+d1ENmHP4ndG -yH329JKBNv3bNPFyfvMMFr20FQIDAQABo0IwQDAdBgNVHQ4EFgQUBx/S55zawm6i -QLSwelAQUHTEyL0wDwYDVR0TAQH/BAUwAwEB/zAOBgNVHQ8BAf8EBAMCAQYwDQYJ -KoZIhvcNAQEFBQADggEBAIlXshZ6qML91tmbmzTCnLQyFE2npN/svqe++EPbkTfO -tDIuUFUaNU52Q3Eg75N3ThVwLofDwR1t3Mu1J9QsVtFSUzpE0nPIxBsFZVpikpzu -QY0x2+c06lkh1QF612S4ZDnNye2v7UsDSKegmQGA3GWjNq5lWUhPgkvIZfFXHeVZ -Lgo/bNjR9eUJtGxUAArgFU2HdW23WJZa3W3SAKD0m0i+wzekujbgfIeFlxoVot4u -olu9rxj5kFDNcFn4J2dHy8egBzp90SxdbBk6ZrV9/ZFvgrG+CJPbFEfxojfHRZ48 -x3evZKiT3/Zpg4Jg8klCNO1aAFSFHBY2kgxc+qatv9s= ------END CERTIFICATE----- - -# Issuer: CN=AffirmTrust Premium O=AffirmTrust -# Subject: CN=AffirmTrust Premium O=AffirmTrust -# Label: "AffirmTrust Premium" -# Serial: 7893706540734352110 -# MD5 Fingerprint: c4:5d:0e:48:b6:ac:28:30:4e:0a:bc:f9:38:16:87:57 -# SHA1 Fingerprint: d8:a6:33:2c:e0:03:6f:b1:85:f6:63:4f:7d:6a:06:65:26:32:28:27 -# SHA256 Fingerprint: 70:a7:3f:7f:37:6b:60:07:42:48:90:45:34:b1:14:82:d5:bf:0e:69:8e:cc:49:8d:f5:25:77:eb:f2:e9:3b:9a ------BEGIN CERTIFICATE----- -MIIFRjCCAy6gAwIBAgIIbYwURrGmCu4wDQYJKoZIhvcNAQEMBQAwQTELMAkGA1UE -BhMCVVMxFDASBgNVBAoMC0FmZmlybVRydXN0MRwwGgYDVQQDDBNBZmZpcm1UcnVz -dCBQcmVtaXVtMB4XDTEwMDEyOTE0MTAzNloXDTQwMTIzMTE0MTAzNlowQTELMAkG -A1UEBhMCVVMxFDASBgNVBAoMC0FmZmlybVRydXN0MRwwGgYDVQQDDBNBZmZpcm1U -cnVzdCBQcmVtaXVtMIICIjANBgkqhkiG9w0BAQEFAAOCAg8AMIICCgKCAgEAxBLf -qV/+Qd3d9Z+K4/as4Tx4mrzY8H96oDMq3I0gW64tb+eT2TZwamjPjlGjhVtnBKAQ -JG9dKILBl1fYSCkTtuG+kU3fhQxTGJoeJKJPj/CihQvL9Cl/0qRY7iZNyaqoe5rZ -+jjeRFcV5fiMyNlI4g0WJx0eyIOFJbe6qlVBzAMiSy2RjYvmia9mx+n/K+k8rNrS -s8PhaJyJ+HoAVt70VZVs+7pk3WKL3wt3MutizCaam7uqYoNMtAZ6MMgpv+0GTZe5 -HMQxK9VfvFMSF5yZVylmd2EhMQcuJUmdGPLu8ytxjLW6OQdJd/zvLpKQBY0tL3d7 -70O/Nbua2Plzpyzy0FfuKE4mX4+QaAkvuPjcBukumj5Rp9EixAqnOEhss/n/fauG -V+O61oV4d7pD6kh/9ti+I20ev9E2bFhc8e6kGVQa9QPSdubhjL08s9NIS+LI+H+S -qHZGnEJlPqQewQcDWkYtuJfzt9WyVSHvutxMAJf7FJUnM7/oQ0dG0giZFmA7mn7S -5u046uwBHjxIVkkJx0w3AJ6IDsBz4W9m6XJHMD4Q5QsDyZpCAGzFlH5hxIrff4Ia -C1nEWTJ3s7xgaVY5/bQGeyzWZDbZvUjthB9+pSKPKrhC9IK31FOQeE4tGv2Bb0TX -OwF0lkLgAOIua+rF7nKsu7/+6qqo+Nz2snmKtmcCAwEAAaNCMEAwHQYDVR0OBBYE -FJ3AZ6YMItkm9UWrpmVSESfYRaxjMA8GA1UdEwEB/wQFMAMBAf8wDgYDVR0PAQH/ -BAQDAgEGMA0GCSqGSIb3DQEBDAUAA4ICAQCzV00QYk465KzquByvMiPIs0laUZx2 -KI15qldGF9X1Uva3ROgIRL8YhNILgM3FEv0AVQVhh0HctSSePMTYyPtwni94loMg -Nt58D2kTiKV1NpgIpsbfrM7jWNa3Pt668+s0QNiigfV4Py/VpfzZotReBA4Xrf5B -8OWycvpEgjNC6C1Y91aMYj+6QrCcDFx+LmUmXFNPALJ4fqENmS2NuB2OosSw/WDQ -MKSOyARiqcTtNd56l+0OOF6SL5Nwpamcb6d9Ex1+xghIsV5n61EIJenmJWtSKZGc -0jlzCFfemQa0W50QBuHCAKi4HEoCChTQwUHK+4w1IX2COPKpVJEZNZOUbWo6xbLQ -u4mGk+ibyQ86p3q4ofB4Rvr8Ny/lioTz3/4E2aFooC8k4gmVBtWVyuEklut89pMF -u+1z6S3RdTnX5yTb2E5fQ4+e0BQ5v1VwSJlXMbSc7kqYA5YwH2AG7hsj/oFgIxpH -YoWlzBk0gG+zrBrjn/B7SK3VAdlntqlyk+otZrWyuOQ9PLLvTIzq6we/qzWaVYa8 -GKa1qF60g2xraUDTn9zxw2lrueFtCfTxqlB2Cnp9ehehVZZCmTEJ3WARjQUwfuaO -RtGdFNrHF+QFlozEJLUbzxQHskD4o55BhrwE0GuWyCqANP2/7waj3VjFhT0+j/6e -KeC2uAloGRwYQw== ------END CERTIFICATE----- - -# Issuer: CN=AffirmTrust Premium ECC O=AffirmTrust -# Subject: CN=AffirmTrust Premium ECC O=AffirmTrust -# Label: "AffirmTrust Premium ECC" -# Serial: 8401224907861490260 -# MD5 Fingerprint: 64:b0:09:55:cf:b1:d5:99:e2:be:13:ab:a6:5d:ea:4d -# SHA1 Fingerprint: b8:23:6b:00:2f:1d:16:86:53:01:55:6c:11:a4:37:ca:eb:ff:c3:bb -# SHA256 Fingerprint: bd:71:fd:f6:da:97:e4:cf:62:d1:64:7a:dd:25:81:b0:7d:79:ad:f8:39:7e:b4:ec:ba:9c:5e:84:88:82:14:23 ------BEGIN CERTIFICATE----- -MIIB/jCCAYWgAwIBAgIIdJclisc/elQwCgYIKoZIzj0EAwMwRTELMAkGA1UEBhMC -VVMxFDASBgNVBAoMC0FmZmlybVRydXN0MSAwHgYDVQQDDBdBZmZpcm1UcnVzdCBQ -cmVtaXVtIEVDQzAeFw0xMDAxMjkxNDIwMjRaFw00MDEyMzExNDIwMjRaMEUxCzAJ -BgNVBAYTAlVTMRQwEgYDVQQKDAtBZmZpcm1UcnVzdDEgMB4GA1UEAwwXQWZmaXJt -VHJ1c3QgUHJlbWl1bSBFQ0MwdjAQBgcqhkjOPQIBBgUrgQQAIgNiAAQNMF4bFZ0D -0KF5Nbc6PJJ6yhUczWLznCZcBz3lVPqj1swS6vQUX+iOGasvLkjmrBhDeKzQN8O9 -ss0s5kfiGuZjuD0uL3jET9v0D6RoTFVya5UdThhClXjMNzyR4ptlKymjQjBAMB0G -A1UdDgQWBBSaryl6wBE1NSZRMADDav5A1a7WPDAPBgNVHRMBAf8EBTADAQH/MA4G -A1UdDwEB/wQEAwIBBjAKBggqhkjOPQQDAwNnADBkAjAXCfOHiFBar8jAQr9HX/Vs -aobgxCd05DhT1wV/GzTjxi+zygk8N53X57hG8f2h4nECMEJZh0PUUd+60wkyWs6I -flc9nF9Ca/UHLbXwgpP5WW+uZPpY5Yse42O+tYHNbwKMeQ== ------END CERTIFICATE----- - -# Issuer: CN=Certum Trusted Network CA O=Unizeto Technologies S.A. OU=Certum Certification Authority -# Subject: CN=Certum Trusted Network CA O=Unizeto Technologies S.A. OU=Certum Certification Authority -# Label: "Certum Trusted Network CA" -# Serial: 279744 -# MD5 Fingerprint: d5:e9:81:40:c5:18:69:fc:46:2c:89:75:62:0f:aa:78 -# SHA1 Fingerprint: 07:e0:32:e0:20:b7:2c:3f:19:2f:06:28:a2:59:3a:19:a7:0f:06:9e -# SHA256 Fingerprint: 5c:58:46:8d:55:f5:8e:49:7e:74:39:82:d2:b5:00:10:b6:d1:65:37:4a:cf:83:a7:d4:a3:2d:b7:68:c4:40:8e ------BEGIN CERTIFICATE----- -MIIDuzCCAqOgAwIBAgIDBETAMA0GCSqGSIb3DQEBBQUAMH4xCzAJBgNVBAYTAlBM -MSIwIAYDVQQKExlVbml6ZXRvIFRlY2hub2xvZ2llcyBTLkEuMScwJQYDVQQLEx5D -ZXJ0dW0gQ2VydGlmaWNhdGlvbiBBdXRob3JpdHkxIjAgBgNVBAMTGUNlcnR1bSBU -cnVzdGVkIE5ldHdvcmsgQ0EwHhcNMDgxMDIyMTIwNzM3WhcNMjkxMjMxMTIwNzM3 -WjB+MQswCQYDVQQGEwJQTDEiMCAGA1UEChMZVW5pemV0byBUZWNobm9sb2dpZXMg -Uy5BLjEnMCUGA1UECxMeQ2VydHVtIENlcnRpZmljYXRpb24gQXV0aG9yaXR5MSIw -IAYDVQQDExlDZXJ0dW0gVHJ1c3RlZCBOZXR3b3JrIENBMIIBIjANBgkqhkiG9w0B -AQEFAAOCAQ8AMIIBCgKCAQEA4/t9o3K6wvDJFIf1awFO4W5AB7ptJ11/91sts1rH -UV+rpDKmYYe2bg+G0jACl/jXaVehGDldamR5xgFZrDwxSjh80gTSSyjoIF87B6LM -TXPb865Px1bVWqeWifrzq2jUI4ZZJ88JJ7ysbnKDHDBy3+Ci6dLhdHUZvSqeexVU -BBvXQzmtVSjF4hq79MDkrjhJM8x2hZ85RdKknvISjFH4fOQtf/WsX+sWn7Et0brM -kUJ3TCXJkDhv2/DM+44el1k+1WBO5gUo7Ul5E0u6SNsv+XLTOcr+H9g0cvW0QM8x -AcPs3hEtF10fuFDRXhmnad4HMyjKUJX5p1TLVIZQRan5SQIDAQABo0IwQDAPBgNV -HRMBAf8EBTADAQH/MB0GA1UdDgQWBBQIds3LB/8k9sXN7buQvOKEN0Z19zAOBgNV -HQ8BAf8EBAMCAQYwDQYJKoZIhvcNAQEFBQADggEBAKaorSLOAT2mo/9i0Eidi15y -sHhE49wcrwn9I0j6vSrEuVUEtRCjjSfeC4Jj0O7eDDd5QVsisrCaQVymcODU0HfL -I9MA4GxWL+FpDQ3Zqr8hgVDZBqWo/5U30Kr+4rP1mS1FhIrlQgnXdAIv94nYmem8 -J9RHjboNRhx3zxSkHLmkMcScKHQDNP8zGSal6Q10tz6XxnboJ5ajZt3hrvJBW8qY -VoNzcOSGGtIxQbovvi0TWnZvTuhOgQ4/WwMioBK+ZlgRSssDxLQqKi2WF+A5VLxI -03YnnZotBqbJ7DnSq9ufmgsnAjUpsUCV5/nonFWIGUbWtzT1fs45mtk48VH3Tyw= ------END CERTIFICATE----- - -# Issuer: CN=TWCA Root Certification Authority O=TAIWAN-CA OU=Root CA -# Subject: CN=TWCA Root Certification Authority O=TAIWAN-CA OU=Root CA -# Label: "TWCA Root Certification Authority" -# Serial: 1 -# MD5 Fingerprint: aa:08:8f:f6:f9:7b:b7:f2:b1:a7:1e:9b:ea:ea:bd:79 -# SHA1 Fingerprint: cf:9e:87:6d:d3:eb:fc:42:26:97:a3:b5:a3:7a:a0:76:a9:06:23:48 -# SHA256 Fingerprint: bf:d8:8f:e1:10:1c:41:ae:3e:80:1b:f8:be:56:35:0e:e9:ba:d1:a6:b9:bd:51:5e:dc:5c:6d:5b:87:11:ac:44 ------BEGIN CERTIFICATE----- -MIIDezCCAmOgAwIBAgIBATANBgkqhkiG9w0BAQUFADBfMQswCQYDVQQGEwJUVzES -MBAGA1UECgwJVEFJV0FOLUNBMRAwDgYDVQQLDAdSb290IENBMSowKAYDVQQDDCFU -V0NBIFJvb3QgQ2VydGlmaWNhdGlvbiBBdXRob3JpdHkwHhcNMDgwODI4MDcyNDMz -WhcNMzAxMjMxMTU1OTU5WjBfMQswCQYDVQQGEwJUVzESMBAGA1UECgwJVEFJV0FO -LUNBMRAwDgYDVQQLDAdSb290IENBMSowKAYDVQQDDCFUV0NBIFJvb3QgQ2VydGlm -aWNhdGlvbiBBdXRob3JpdHkwggEiMA0GCSqGSIb3DQEBAQUAA4IBDwAwggEKAoIB -AQCwfnK4pAOU5qfeCTiRShFAh6d8WWQUe7UREN3+v9XAu1bihSX0NXIP+FPQQeFE -AcK0HMMxQhZHhTMidrIKbw/lJVBPhYa+v5guEGcevhEFhgWQxFnQfHgQsIBct+HH -K3XLfJ+utdGdIzdjp9xCoi2SBBtQwXu4PhvJVgSLL1KbralW6cH/ralYhzC2gfeX -RfwZVzsrb+RH9JlF/h3x+JejiB03HFyP4HYlmlD4oFT/RJB2I9IyxsOrBr/8+7/z -rX2SYgJbKdM1o5OaQ2RgXbL6Mv87BK9NQGr5x+PvI/1ry+UPizgN7gr8/g+YnzAx -3WxSZfmLgb4i4RxYA7qRG4kHAgMBAAGjQjBAMA4GA1UdDwEB/wQEAwIBBjAPBgNV -HRMBAf8EBTADAQH/MB0GA1UdDgQWBBRqOFsmjd6LWvJPelSDGRjjCDWmujANBgkq -hkiG9w0BAQUFAAOCAQEAPNV3PdrfibqHDAhUaiBQkr6wQT25JmSDCi/oQMCXKCeC -MErJk/9q56YAf4lCmtYR5VPOL8zy2gXE/uJQxDqGfczafhAJO5I1KlOy/usrBdls -XebQ79NqZp4VKIV66IIArB6nCWlWQtNoURi+VJq/REG6Sb4gumlc7rh3zc5sH62D -lhh9DrUUOYTxKOkto557HnpyWoOzeW/vtPzQCqVYT0bf+215WfKEIlKuD8z7fDvn -aspHYcN6+NOSBB+4IIThNlQWx0DeO4pz3N/GCUzf7Nr/1FNCocnyYh0igzyXxfkZ -YiesZSLX0zzG5Y6yU8xJzrww/nsOM5D77dIUkR8Hrw== ------END CERTIFICATE----- - -# Issuer: O=SECOM Trust Systems CO.,LTD. OU=Security Communication RootCA2 -# Subject: O=SECOM Trust Systems CO.,LTD. OU=Security Communication RootCA2 -# Label: "Security Communication RootCA2" -# Serial: 0 -# MD5 Fingerprint: 6c:39:7d:a4:0e:55:59:b2:3f:d6:41:b1:12:50:de:43 -# SHA1 Fingerprint: 5f:3b:8c:f2:f8:10:b3:7d:78:b4:ce:ec:19:19:c3:73:34:b9:c7:74 -# SHA256 Fingerprint: 51:3b:2c:ec:b8:10:d4:cd:e5:dd:85:39:1a:df:c6:c2:dd:60:d8:7b:b7:36:d2:b5:21:48:4a:a4:7a:0e:be:f6 ------BEGIN CERTIFICATE----- -MIIDdzCCAl+gAwIBAgIBADANBgkqhkiG9w0BAQsFADBdMQswCQYDVQQGEwJKUDEl -MCMGA1UEChMcU0VDT00gVHJ1c3QgU3lzdGVtcyBDTy4sTFRELjEnMCUGA1UECxMe -U2VjdXJpdHkgQ29tbXVuaWNhdGlvbiBSb290Q0EyMB4XDTA5MDUyOTA1MDAzOVoX -DTI5MDUyOTA1MDAzOVowXTELMAkGA1UEBhMCSlAxJTAjBgNVBAoTHFNFQ09NIFRy -dXN0IFN5c3RlbXMgQ08uLExURC4xJzAlBgNVBAsTHlNlY3VyaXR5IENvbW11bmlj -YXRpb24gUm9vdENBMjCCASIwDQYJKoZIhvcNAQEBBQADggEPADCCAQoCggEBANAV -OVKxUrO6xVmCxF1SrjpDZYBLx/KWvNs2l9amZIyoXvDjChz335c9S672XewhtUGr -zbl+dp+++T42NKA7wfYxEUV0kz1XgMX5iZnK5atq1LXaQZAQwdbWQonCv/Q4EpVM -VAX3NuRFg3sUZdbcDE3R3n4MqzvEFb46VqZab3ZpUql6ucjrappdUtAtCms1FgkQ -hNBqyjoGADdH5H5XTz+L62e4iKrFvlNVspHEfbmwhRkGeC7bYRr6hfVKkaHnFtWO -ojnflLhwHyg/i/xAXmODPIMqGplrz95Zajv8bxbXH/1KEOtOghY6rCcMU/Gt1SSw -awNQwS08Ft1ENCcadfsCAwEAAaNCMEAwHQYDVR0OBBYEFAqFqXdlBZh8QIH4D5cs -OPEK7DzPMA4GA1UdDwEB/wQEAwIBBjAPBgNVHRMBAf8EBTADAQH/MA0GCSqGSIb3 -DQEBCwUAA4IBAQBMOqNErLlFsceTfsgLCkLfZOoc7llsCLqJX2rKSpWeeo8HxdpF -coJxDjrSzG+ntKEju/Ykn8sX/oymzsLS28yN/HH8AynBbF0zX2S2ZTuJbxh2ePXc -okgfGT+Ok+vx+hfuzU7jBBJV1uXk3fs+BXziHV7Gp7yXT2g69ekuCkO2r1dcYmh8 -t/2jioSgrGK+KwmHNPBqAbubKVY8/gA3zyNs8U6qtnRGEmyR7jTV7JqR50S+kDFy -1UkC9gLl9B/rfNmWVan/7Ir5mUf/NVoCqgTLiluHcSmRvaS0eg29mvVXIwAHIRc/ -SjnRBUkLp7Y3gaVdjKozXoEofKd9J+sAro03 ------END CERTIFICATE----- - -# Issuer: CN=Actalis Authentication Root CA O=Actalis S.p.A./03358520967 -# Subject: CN=Actalis Authentication Root CA O=Actalis S.p.A./03358520967 -# Label: "Actalis Authentication Root CA" -# Serial: 6271844772424770508 -# MD5 Fingerprint: 69:c1:0d:4f:07:a3:1b:c3:fe:56:3d:04:bc:11:f6:a6 -# SHA1 Fingerprint: f3:73:b3:87:06:5a:28:84:8a:f2:f3:4a:ce:19:2b:dd:c7:8e:9c:ac -# SHA256 Fingerprint: 55:92:60:84:ec:96:3a:64:b9:6e:2a:be:01:ce:0b:a8:6a:64:fb:fe:bc:c7:aa:b5:af:c1:55:b3:7f:d7:60:66 ------BEGIN CERTIFICATE----- -MIIFuzCCA6OgAwIBAgIIVwoRl0LE48wwDQYJKoZIhvcNAQELBQAwazELMAkGA1UE -BhMCSVQxDjAMBgNVBAcMBU1pbGFuMSMwIQYDVQQKDBpBY3RhbGlzIFMucC5BLi8w -MzM1ODUyMDk2NzEnMCUGA1UEAwweQWN0YWxpcyBBdXRoZW50aWNhdGlvbiBSb290 -IENBMB4XDTExMDkyMjExMjIwMloXDTMwMDkyMjExMjIwMlowazELMAkGA1UEBhMC -SVQxDjAMBgNVBAcMBU1pbGFuMSMwIQYDVQQKDBpBY3RhbGlzIFMucC5BLi8wMzM1 -ODUyMDk2NzEnMCUGA1UEAwweQWN0YWxpcyBBdXRoZW50aWNhdGlvbiBSb290IENB -MIICIjANBgkqhkiG9w0BAQEFAAOCAg8AMIICCgKCAgEAp8bEpSmkLO/lGMWwUKNv -UTufClrJwkg4CsIcoBh/kbWHuUA/3R1oHwiD1S0eiKD4j1aPbZkCkpAW1V8IbInX -4ay8IMKx4INRimlNAJZaby/ARH6jDuSRzVju3PvHHkVH3Se5CAGfpiEd9UEtL0z9 -KK3giq0itFZljoZUj5NDKd45RnijMCO6zfB9E1fAXdKDa0hMxKufgFpbOr3JpyI/ -gCczWw63igxdBzcIy2zSekciRDXFzMwujt0q7bd9Zg1fYVEiVRvjRuPjPdA1Yprb -rxTIW6HMiRvhMCb8oJsfgadHHwTrozmSBp+Z07/T6k9QnBn+locePGX2oxgkg4YQ -51Q+qDp2JE+BIcXjDwL4k5RHILv+1A7TaLndxHqEguNTVHnd25zS8gebLra8Pu2F -be8lEfKXGkJh90qX6IuxEAf6ZYGyojnP9zz/GPvG8VqLWeICrHuS0E4UT1lF9gxe -KF+w6D9Fz8+vm2/7hNN3WpVvrJSEnu68wEqPSpP4RCHiMUVhUE4Q2OM1fEwZtN4F -v6MGn8i1zeQf1xcGDXqVdFUNaBr8EBtiZJ1t4JWgw5QHVw0U5r0F+7if5t+L4sbn -fpb2U8WANFAoWPASUHEXMLrmeGO89LKtmyuy/uE5jF66CyCU3nuDuP/jVo23Eek7 -jPKxwV2dpAtMK9myGPW1n0sCAwEAAaNjMGEwHQYDVR0OBBYEFFLYiDrIn3hm7Ynz -ezhwlMkCAjbQMA8GA1UdEwEB/wQFMAMBAf8wHwYDVR0jBBgwFoAUUtiIOsifeGbt -ifN7OHCUyQICNtAwDgYDVR0PAQH/BAQDAgEGMA0GCSqGSIb3DQEBCwUAA4ICAQAL -e3KHwGCmSUyIWOYdiPcUZEim2FgKDk8TNd81HdTtBjHIgT5q1d07GjLukD0R0i70 -jsNjLiNmsGe+b7bAEzlgqqI0JZN1Ut6nna0Oh4lScWoWPBkdg/iaKWW+9D+a2fDz -WochcYBNy+A4mz+7+uAwTc+G02UQGRjRlwKxK3JCaKygvU5a2hi/a5iB0P2avl4V -SM0RFbnAKVy06Ij3Pjaut2L9HmLecHgQHEhb2rykOLpn7VU+Xlff1ANATIGk0k9j -pwlCCRT8AKnCgHNPLsBA2RF7SOp6AsDT6ygBJlh0wcBzIm2Tlf05fbsq4/aC4yyX -X04fkZT6/iyj2HYauE2yOE+b+h1IYHkm4vP9qdCa6HCPSXrW5b0KDtst842/6+Ok -fcvHlXHo2qN8xcL4dJIEG4aspCJTQLas/kx2z/uUMsA1n3Y/buWQbqCmJqK4LL7R -K4X9p2jIugErsWx0Hbhzlefut8cl8ABMALJ+tguLHPPAUJ4lueAI3jZm/zel0btU -ZCzJJ7VLkn5l/9Mt4blOvH+kQSGQQXemOR/qnuOf0GZvBeyqdn6/axag67XH/JJU -LysRJyU3eExRarDzzFhdFPFqSBX/wge2sY0PjlxQRrM9vwGYT7JZVEc+NHt4bVaT -LnPqZih4zR0Uv6CPLy64Lo7yFIrM6bV8+2ydDKXhlg== ------END CERTIFICATE----- - -# Issuer: CN=Buypass Class 2 Root CA O=Buypass AS-983163327 -# Subject: CN=Buypass Class 2 Root CA O=Buypass AS-983163327 -# Label: "Buypass Class 2 Root CA" -# Serial: 2 -# MD5 Fingerprint: 46:a7:d2:fe:45:fb:64:5a:a8:59:90:9b:78:44:9b:29 -# SHA1 Fingerprint: 49:0a:75:74:de:87:0a:47:fe:58:ee:f6:c7:6b:eb:c6:0b:12:40:99 -# SHA256 Fingerprint: 9a:11:40:25:19:7c:5b:b9:5d:94:e6:3d:55:cd:43:79:08:47:b6:46:b2:3c:df:11:ad:a4:a0:0e:ff:15:fb:48 ------BEGIN CERTIFICATE----- -MIIFWTCCA0GgAwIBAgIBAjANBgkqhkiG9w0BAQsFADBOMQswCQYDVQQGEwJOTzEd -MBsGA1UECgwUQnV5cGFzcyBBUy05ODMxNjMzMjcxIDAeBgNVBAMMF0J1eXBhc3Mg -Q2xhc3MgMiBSb290IENBMB4XDTEwMTAyNjA4MzgwM1oXDTQwMTAyNjA4MzgwM1ow -TjELMAkGA1UEBhMCTk8xHTAbBgNVBAoMFEJ1eXBhc3MgQVMtOTgzMTYzMzI3MSAw -HgYDVQQDDBdCdXlwYXNzIENsYXNzIDIgUm9vdCBDQTCCAiIwDQYJKoZIhvcNAQEB -BQADggIPADCCAgoCggIBANfHXvfBB9R3+0Mh9PT1aeTuMgHbo4Yf5FkNuud1g1Lr -6hxhFUi7HQfKjK6w3Jad6sNgkoaCKHOcVgb/S2TwDCo3SbXlzwx87vFKu3MwZfPV -L4O2fuPn9Z6rYPnT8Z2SdIrkHJasW4DptfQxh6NR/Md+oW+OU3fUl8FVM5I+GC91 -1K2GScuVr1QGbNgGE41b/+EmGVnAJLqBcXmQRFBoJJRfuLMR8SlBYaNByyM21cHx -MlAQTn/0hpPshNOOvEu/XAFOBz3cFIqUCqTqc/sLUegTBxj6DvEr0VQVfTzh97QZ -QmdiXnfgolXsttlpF9U6r0TtSsWe5HonfOV116rLJeffawrbD02TTqigzXsu8lkB -arcNuAeBfos4GzjmCleZPe4h6KP1DBbdi+w0jpwqHAAVF41og9JwnxgIzRFo1clr -Us3ERo/ctfPYV3Me6ZQ5BL/T3jjetFPsaRyifsSP5BtwrfKi+fv3FmRmaZ9JUaLi -FRhnBkp/1Wy1TbMz4GHrXb7pmA8y1x1LPC5aAVKRCfLf6o3YBkBjqhHk/sM3nhRS -P/TizPJhk9H9Z2vXUq6/aKtAQ6BXNVN48FP4YUIHZMbXb5tMOA1jrGKvNouicwoN -9SG9dKpN6nIDSdvHXx1iY8f93ZHsM+71bbRuMGjeyNYmsHVee7QHIJihdjK4TWxP -AgMBAAGjQjBAMA8GA1UdEwEB/wQFMAMBAf8wHQYDVR0OBBYEFMmAd+BikoL1Rpzz -uvdMw964o605MA4GA1UdDwEB/wQEAwIBBjANBgkqhkiG9w0BAQsFAAOCAgEAU18h -9bqwOlI5LJKwbADJ784g7wbylp7ppHR/ehb8t/W2+xUbP6umwHJdELFx7rxP462s -A20ucS6vxOOto70MEae0/0qyexAQH6dXQbLArvQsWdZHEIjzIVEpMMpghq9Gqx3t -OluwlN5E40EIosHsHdb9T7bWR9AUC8rmyrV7d35BH16Dx7aMOZawP5aBQW9gkOLo -+fsicdl9sz1Gv7SEr5AcD48Saq/v7h56rgJKihcrdv6sVIkkLE8/trKnToyokZf7 -KcZ7XC25y2a2t6hbElGFtQl+Ynhw/qlqYLYdDnkM/crqJIByw5c/8nerQyIKx+u2 -DISCLIBrQYoIwOula9+ZEsuK1V6ADJHgJgg2SMX6OBE1/yWDLfJ6v9r9jv6ly0Us -H8SIU653DtmadsWOLB2jutXsMq7Aqqz30XpN69QH4kj3Io6wpJ9qzo6ysmD0oyLQ -I+uUWnpp3Q+/QFesa1lQ2aOZ4W7+jQF5JyMV3pKdewlNWudLSDBaGOYKbeaP4NK7 -5t98biGCwWg5TbSYWGZizEqQXsP6JwSxeRV0mcy+rSDeJmAc61ZRpqPq5KM/p/9h -3PFaTWwyI0PurKju7koSCTxdccK+efrCh2gdC/1cacwG0Jp9VJkqyTkaGa9LKkPz -Y11aWOIv4x3kqdbQCtCev9eBCfHJxyYNrJgWVqA= ------END CERTIFICATE----- - -# Issuer: CN=Buypass Class 3 Root CA O=Buypass AS-983163327 -# Subject: CN=Buypass Class 3 Root CA O=Buypass AS-983163327 -# Label: "Buypass Class 3 Root CA" -# Serial: 2 -# MD5 Fingerprint: 3d:3b:18:9e:2c:64:5a:e8:d5:88:ce:0e:f9:37:c2:ec -# SHA1 Fingerprint: da:fa:f7:fa:66:84:ec:06:8f:14:50:bd:c7:c2:81:a5:bc:a9:64:57 -# SHA256 Fingerprint: ed:f7:eb:bc:a2:7a:2a:38:4d:38:7b:7d:40:10:c6:66:e2:ed:b4:84:3e:4c:29:b4:ae:1d:5b:93:32:e6:b2:4d ------BEGIN CERTIFICATE----- -MIIFWTCCA0GgAwIBAgIBAjANBgkqhkiG9w0BAQsFADBOMQswCQYDVQQGEwJOTzEd -MBsGA1UECgwUQnV5cGFzcyBBUy05ODMxNjMzMjcxIDAeBgNVBAMMF0J1eXBhc3Mg -Q2xhc3MgMyBSb290IENBMB4XDTEwMTAyNjA4Mjg1OFoXDTQwMTAyNjA4Mjg1OFow -TjELMAkGA1UEBhMCTk8xHTAbBgNVBAoMFEJ1eXBhc3MgQVMtOTgzMTYzMzI3MSAw -HgYDVQQDDBdCdXlwYXNzIENsYXNzIDMgUm9vdCBDQTCCAiIwDQYJKoZIhvcNAQEB -BQADggIPADCCAgoCggIBAKXaCpUWUOOV8l6ddjEGMnqb8RB2uACatVI2zSRHsJ8Y -ZLya9vrVediQYkwiL944PdbgqOkcLNt4EemOaFEVcsfzM4fkoF0LXOBXByow9c3E -N3coTRiR5r/VUv1xLXA+58bEiuPwKAv0dpihi4dVsjoT/Lc+JzeOIuOoTyrvYLs9 -tznDDgFHmV0ST9tD+leh7fmdvhFHJlsTmKtdFoqwNxxXnUX/iJY2v7vKB3tvh2PX -0DJq1l1sDPGzbjniazEuOQAnFN44wOwZZoYS6J1yFhNkUsepNxz9gjDthBgd9K5c -/3ATAOux9TN6S9ZV+AWNS2mw9bMoNlwUxFFzTWsL8TQH2xc519woe2v1n/MuwU8X -KhDzzMro6/1rqy6any2CbgTUUgGTLT2G/H783+9CHaZr77kgxve9oKeV/afmiSTY -zIw0bOIjL9kSGiG5VZFvC5F5GQytQIgLcOJ60g7YaEi7ghM5EFjp2CoHxhLbWNvS -O1UQRwUVZ2J+GGOmRj8JDlQyXr8NYnon74Do29lLBlo3WiXQCBJ31G8JUJc9yB3D -34xFMFbG02SrZvPAXpacw8Tvw3xrizp5f7NJzz3iiZ+gMEuFuZyUJHmPfWupRWgP -K9Dx2hzLabjKSWJtyNBjYt1gD1iqj6G8BaVmos8bdrKEZLFMOVLAMLrwjEsCsLa3 -AgMBAAGjQjBAMA8GA1UdEwEB/wQFMAMBAf8wHQYDVR0OBBYEFEe4zf/lb+74suwv -Tg75JbCOPGvDMA4GA1UdDwEB/wQEAwIBBjANBgkqhkiG9w0BAQsFAAOCAgEAACAj -QTUEkMJAYmDv4jVM1z+s4jSQuKFvdvoWFqRINyzpkMLyPPgKn9iB5btb2iUspKdV -cSQy9sgL8rxq+JOssgfCX5/bzMiKqr5qb+FJEMwx14C7u8jYog5kV+qi9cKpMRXS -IGrs/CIBKM+GuIAeqcwRpTzyFrNHnfzSgCHEy9BHcEGhyoMZCCxt8l13nIoUE9Q2 -HJLw5QY33KbmkJs4j1xrG0aGQ0JfPgEHU1RdZX33inOhmlRaHylDFCfChQ+1iHsa -O5S3HWCntZznKWlXWpuTekMwGwPXYshApqr8ZORK15FTAaggiG6cX0S5y2CBNOxv -033aSF/rtJC8LakcC6wc1aJoIIAE1vyxjy+7SjENSoYc6+I2KSb12tjE8nVhz36u -dmNKekBlk4f4HoCMhuWG1o8O/FMsYOgWYRqiPkN7zTlgVGr18okmAWiDSKIz6MkE -kbIRNBE+6tBDGR8Dk5AM/1E9V/RBbuHLoL7ryWPNbczk+DaqaJ3tvV2XcEQNtg41 -3OEMXbugUZTLfhbrES+jkkXITHHZvMmZUldGL1DPvTVp9D0VzgalLA8+9oG6lLvD -u79leNKGef9JOxqDDPDeeOzI8k1MGt6CKfjBWtrt7uYnXuhF0J0cUahoq0Tj0Itq -4/g7u9xN12TyUb7mqqta6THuBrxzvxNiCp/HuZc= ------END CERTIFICATE----- - -# Issuer: CN=T-TeleSec GlobalRoot Class 3 O=T-Systems Enterprise Services GmbH OU=T-Systems Trust Center -# Subject: CN=T-TeleSec GlobalRoot Class 3 O=T-Systems Enterprise Services GmbH OU=T-Systems Trust Center -# Label: "T-TeleSec GlobalRoot Class 3" -# Serial: 1 -# MD5 Fingerprint: ca:fb:40:a8:4e:39:92:8a:1d:fe:8e:2f:c4:27:ea:ef -# SHA1 Fingerprint: 55:a6:72:3e:cb:f2:ec:cd:c3:23:74:70:19:9d:2a:be:11:e3:81:d1 -# SHA256 Fingerprint: fd:73:da:d3:1c:64:4f:f1:b4:3b:ef:0c:cd:da:96:71:0b:9c:d9:87:5e:ca:7e:31:70:7a:f3:e9:6d:52:2b:bd ------BEGIN CERTIFICATE----- -MIIDwzCCAqugAwIBAgIBATANBgkqhkiG9w0BAQsFADCBgjELMAkGA1UEBhMCREUx -KzApBgNVBAoMIlQtU3lzdGVtcyBFbnRlcnByaXNlIFNlcnZpY2VzIEdtYkgxHzAd -BgNVBAsMFlQtU3lzdGVtcyBUcnVzdCBDZW50ZXIxJTAjBgNVBAMMHFQtVGVsZVNl -YyBHbG9iYWxSb290IENsYXNzIDMwHhcNMDgxMDAxMTAyOTU2WhcNMzMxMDAxMjM1 -OTU5WjCBgjELMAkGA1UEBhMCREUxKzApBgNVBAoMIlQtU3lzdGVtcyBFbnRlcnBy -aXNlIFNlcnZpY2VzIEdtYkgxHzAdBgNVBAsMFlQtU3lzdGVtcyBUcnVzdCBDZW50 -ZXIxJTAjBgNVBAMMHFQtVGVsZVNlYyBHbG9iYWxSb290IENsYXNzIDMwggEiMA0G -CSqGSIb3DQEBAQUAA4IBDwAwggEKAoIBAQC9dZPwYiJvJK7genasfb3ZJNW4t/zN -8ELg63iIVl6bmlQdTQyK9tPPcPRStdiTBONGhnFBSivwKixVA9ZIw+A5OO3yXDw/ -RLyTPWGrTs0NvvAgJ1gORH8EGoel15YUNpDQSXuhdfsaa3Ox+M6pCSzyU9XDFES4 -hqX2iys52qMzVNn6chr3IhUciJFrf2blw2qAsCTz34ZFiP0Zf3WHHx+xGwpzJFu5 -ZeAsVMhg02YXP+HMVDNzkQI6pn97djmiH5a2OK61yJN0HZ65tOVgnS9W0eDrXltM -EnAMbEQgqxHY9Bn20pxSN+f6tsIxO0rUFJmtxxr1XV/6B7h8DR/Wgx6zAgMBAAGj -QjBAMA8GA1UdEwEB/wQFMAMBAf8wDgYDVR0PAQH/BAQDAgEGMB0GA1UdDgQWBBS1 -A/d2O2GCahKqGFPrAyGUv/7OyjANBgkqhkiG9w0BAQsFAAOCAQEAVj3vlNW92nOy -WL6ukK2YJ5f+AbGwUgC4TeQbIXQbfsDuXmkqJa9c1h3a0nnJ85cp4IaH3gRZD/FZ -1GSFS5mvJQQeyUapl96Cshtwn5z2r3Ex3XsFpSzTucpH9sry9uetuUg/vBa3wW30 -6gmv7PO15wWeph6KU1HWk4HMdJP2udqmJQV0eVp+QD6CSyYRMG7hP0HHRwA11fXT -91Q+gT3aSWqas+8QPebrb9HIIkfLzM8BMZLZGOMivgkeGj5asuRrDFR6fUNOuIml -e9eiPZaGzPImNC1qkp2aGtAw4l1OBLBfiyB+d8E9lYLRRpo7PHi4b6HQDWSieB4p -TpPDpFQUWw== ------END CERTIFICATE----- - -# Issuer: CN=D-TRUST Root Class 3 CA 2 2009 O=D-Trust GmbH -# Subject: CN=D-TRUST Root Class 3 CA 2 2009 O=D-Trust GmbH -# Label: "D-TRUST Root Class 3 CA 2 2009" -# Serial: 623603 -# MD5 Fingerprint: cd:e0:25:69:8d:47:ac:9c:89:35:90:f7:fd:51:3d:2f -# SHA1 Fingerprint: 58:e8:ab:b0:36:15:33:fb:80:f7:9b:1b:6d:29:d3:ff:8d:5f:00:f0 -# SHA256 Fingerprint: 49:e7:a4:42:ac:f0:ea:62:87:05:00:54:b5:25:64:b6:50:e4:f4:9e:42:e3:48:d6:aa:38:e0:39:e9:57:b1:c1 ------BEGIN CERTIFICATE----- -MIIEMzCCAxugAwIBAgIDCYPzMA0GCSqGSIb3DQEBCwUAME0xCzAJBgNVBAYTAkRF -MRUwEwYDVQQKDAxELVRydXN0IEdtYkgxJzAlBgNVBAMMHkQtVFJVU1QgUm9vdCBD -bGFzcyAzIENBIDIgMjAwOTAeFw0wOTExMDUwODM1NThaFw0yOTExMDUwODM1NTha -ME0xCzAJBgNVBAYTAkRFMRUwEwYDVQQKDAxELVRydXN0IEdtYkgxJzAlBgNVBAMM -HkQtVFJVU1QgUm9vdCBDbGFzcyAzIENBIDIgMjAwOTCCASIwDQYJKoZIhvcNAQEB -BQADggEPADCCAQoCggEBANOySs96R+91myP6Oi/WUEWJNTrGa9v+2wBoqOADER03 -UAifTUpolDWzU9GUY6cgVq/eUXjsKj3zSEhQPgrfRlWLJ23DEE0NkVJD2IfgXU42 -tSHKXzlABF9bfsyjxiupQB7ZNoTWSPOSHjRGICTBpFGOShrvUD9pXRl/RcPHAY9R -ySPocq60vFYJfxLLHLGvKZAKyVXMD9O0Gu1HNVpK7ZxzBCHQqr0ME7UAyiZsxGsM -lFqVlNpQmvH/pStmMaTJOKDfHR+4CS7zp+hnUquVH+BGPtikw8paxTGA6Eian5Rp -/hnd2HN8gcqW3o7tszIFZYQ05ub9VxC1X3a/L7AQDcUCAwEAAaOCARowggEWMA8G -A1UdEwEB/wQFMAMBAf8wHQYDVR0OBBYEFP3aFMSfMN4hvR5COfyrYyNJ4PGEMA4G -A1UdDwEB/wQEAwIBBjCB0wYDVR0fBIHLMIHIMIGAoH6gfIZ6bGRhcDovL2RpcmVj -dG9yeS5kLXRydXN0Lm5ldC9DTj1ELVRSVVNUJTIwUm9vdCUyMENsYXNzJTIwMyUy -MENBJTIwMiUyMDIwMDksTz1ELVRydXN0JTIwR21iSCxDPURFP2NlcnRpZmljYXRl -cmV2b2NhdGlvbmxpc3QwQ6BBoD+GPWh0dHA6Ly93d3cuZC10cnVzdC5uZXQvY3Js -L2QtdHJ1c3Rfcm9vdF9jbGFzc18zX2NhXzJfMjAwOS5jcmwwDQYJKoZIhvcNAQEL -BQADggEBAH+X2zDI36ScfSF6gHDOFBJpiBSVYEQBrLLpME+bUMJm2H6NMLVwMeni -acfzcNsgFYbQDfC+rAF1hM5+n02/t2A7nPPKHeJeaNijnZflQGDSNiH+0LS4F9p0 -o3/U37CYAqxva2ssJSRyoWXuJVrl5jLn8t+rSfrzkGkj2wTZ51xY/GXUl77M/C4K -zCUqNQT4YJEVdT1B/yMfGchs64JTBKbkTCJNjYy6zltz7GRUUG3RnFX7acM2w4y8 -PIWmawomDeCTmGCufsYkl4phX5GOZpIJhzbNi5stPvZR1FDUWSi9g/LMKHtThm3Y -Johw1+qRzT65ysCQblrGXnRl11z+o+I= ------END CERTIFICATE----- - -# Issuer: CN=D-TRUST Root Class 3 CA 2 EV 2009 O=D-Trust GmbH -# Subject: CN=D-TRUST Root Class 3 CA 2 EV 2009 O=D-Trust GmbH -# Label: "D-TRUST Root Class 3 CA 2 EV 2009" -# Serial: 623604 -# MD5 Fingerprint: aa:c6:43:2c:5e:2d:cd:c4:34:c0:50:4f:11:02:4f:b6 -# SHA1 Fingerprint: 96:c9:1b:0b:95:b4:10:98:42:fa:d0:d8:22:79:fe:60:fa:b9:16:83 -# SHA256 Fingerprint: ee:c5:49:6b:98:8c:e9:86:25:b9:34:09:2e:ec:29:08:be:d0:b0:f3:16:c2:d4:73:0c:84:ea:f1:f3:d3:48:81 ------BEGIN CERTIFICATE----- -MIIEQzCCAyugAwIBAgIDCYP0MA0GCSqGSIb3DQEBCwUAMFAxCzAJBgNVBAYTAkRF -MRUwEwYDVQQKDAxELVRydXN0IEdtYkgxKjAoBgNVBAMMIUQtVFJVU1QgUm9vdCBD -bGFzcyAzIENBIDIgRVYgMjAwOTAeFw0wOTExMDUwODUwNDZaFw0yOTExMDUwODUw -NDZaMFAxCzAJBgNVBAYTAkRFMRUwEwYDVQQKDAxELVRydXN0IEdtYkgxKjAoBgNV -BAMMIUQtVFJVU1QgUm9vdCBDbGFzcyAzIENBIDIgRVYgMjAwOTCCASIwDQYJKoZI -hvcNAQEBBQADggEPADCCAQoCggEBAJnxhDRwui+3MKCOvXwEz75ivJn9gpfSegpn -ljgJ9hBOlSJzmY3aFS3nBfwZcyK3jpgAvDw9rKFs+9Z5JUut8Mxk2og+KbgPCdM0 -3TP1YtHhzRnp7hhPTFiu4h7WDFsVWtg6uMQYZB7jM7K1iXdODL/ZlGsTl28So/6Z -qQTMFexgaDbtCHu39b+T7WYxg4zGcTSHThfqr4uRjRxWQa4iN1438h3Z0S0NL2lR -p75mpoo6Kr3HGrHhFPC+Oh25z1uxav60sUYgovseO3Dvk5h9jHOW8sXvhXCtKSb8 -HgQ+HKDYD8tSg2J87otTlZCpV6LqYQXY+U3EJ/pure3511H3a6UCAwEAAaOCASQw -ggEgMA8GA1UdEwEB/wQFMAMBAf8wHQYDVR0OBBYEFNOUikxiEyoZLsyvcop9Ntea -HNxnMA4GA1UdDwEB/wQEAwIBBjCB3QYDVR0fBIHVMIHSMIGHoIGEoIGBhn9sZGFw -Oi8vZGlyZWN0b3J5LmQtdHJ1c3QubmV0L0NOPUQtVFJVU1QlMjBSb290JTIwQ2xh -c3MlMjAzJTIwQ0ElMjAyJTIwRVYlMjAyMDA5LE89RC1UcnVzdCUyMEdtYkgsQz1E -RT9jZXJ0aWZpY2F0ZXJldm9jYXRpb25saXN0MEagRKBChkBodHRwOi8vd3d3LmQt -dHJ1c3QubmV0L2NybC9kLXRydXN0X3Jvb3RfY2xhc3NfM19jYV8yX2V2XzIwMDku -Y3JsMA0GCSqGSIb3DQEBCwUAA4IBAQA07XtaPKSUiO8aEXUHL7P+PPoeUSbrh/Yp -3uDx1MYkCenBz1UbtDDZzhr+BlGmFaQt77JLvyAoJUnRpjZ3NOhk31KxEcdzes05 -nsKtjHEh8lprr988TlWvsoRlFIm5d8sqMb7Po23Pb0iUMkZv53GMoKaEGTcH8gNF -CSuGdXzfX2lXANtu2KZyIktQ1HWYVt+3GP9DQ1CuekR78HlR10M9p9OB0/DJT7na -xpeG0ILD5EJt/rDiZE4OJudANCa1CInXCGNjOCd1HjPqbqjdn5lPdE2BiYBL3ZqX -KVwvvoFBuYz/6n1gBp7N1z3TLqMVvKjmJuVvw9y4AyHqnxbxLFS1 ------END CERTIFICATE----- - -# Issuer: CN=CA Disig Root R2 O=Disig a.s. -# Subject: CN=CA Disig Root R2 O=Disig a.s. -# Label: "CA Disig Root R2" -# Serial: 10572350602393338211 -# MD5 Fingerprint: 26:01:fb:d8:27:a7:17:9a:45:54:38:1a:43:01:3b:03 -# SHA1 Fingerprint: b5:61:eb:ea:a4:de:e4:25:4b:69:1a:98:a5:57:47:c2:34:c7:d9:71 -# SHA256 Fingerprint: e2:3d:4a:03:6d:7b:70:e9:f5:95:b1:42:20:79:d2:b9:1e:df:bb:1f:b6:51:a0:63:3e:aa:8a:9d:c5:f8:07:03 ------BEGIN CERTIFICATE----- -MIIFaTCCA1GgAwIBAgIJAJK4iNuwisFjMA0GCSqGSIb3DQEBCwUAMFIxCzAJBgNV -BAYTAlNLMRMwEQYDVQQHEwpCcmF0aXNsYXZhMRMwEQYDVQQKEwpEaXNpZyBhLnMu -MRkwFwYDVQQDExBDQSBEaXNpZyBSb290IFIyMB4XDTEyMDcxOTA5MTUzMFoXDTQy -MDcxOTA5MTUzMFowUjELMAkGA1UEBhMCU0sxEzARBgNVBAcTCkJyYXRpc2xhdmEx -EzARBgNVBAoTCkRpc2lnIGEucy4xGTAXBgNVBAMTEENBIERpc2lnIFJvb3QgUjIw -ggIiMA0GCSqGSIb3DQEBAQUAA4ICDwAwggIKAoICAQCio8QACdaFXS1tFPbCw3Oe -NcJxVX6B+6tGUODBfEl45qt5WDza/3wcn9iXAng+a0EE6UG9vgMsRfYvZNSrXaNH -PWSb6WiaxswbP7q+sos0Ai6YVRn8jG+qX9pMzk0DIaPY0jSTVpbLTAwAFjxfGs3I -x2ymrdMxp7zo5eFm1tL7A7RBZckQrg4FY8aAamkw/dLukO8NJ9+flXP04SXabBbe -QTg06ov80egEFGEtQX6sx3dOy1FU+16SGBsEWmjGycT6txOgmLcRK7fWV8x8nhfR -yyX+hk4kLlYMeE2eARKmK6cBZW58Yh2EhN/qwGu1pSqVg8NTEQxzHQuyRpDRQjrO -QG6Vrf/GlK1ul4SOfW+eioANSW1z4nuSHsPzwfPrLgVv2RvPN3YEyLRa5Beny912 -H9AZdugsBbPWnDTYltxhh5EF5EQIM8HauQhl1K6yNg3ruji6DOWbnuuNZt2Zz9aJ -QfYEkoopKW1rOhzndX0CcQ7zwOe9yxndnWCywmZgtrEE7snmhrmaZkCo5xHtgUUD -i/ZnWejBBhG93c+AAk9lQHhcR1DIm+YfgXvkRKhbhZri3lrVx/k6RGZL5DJUfORs -nLMOPReisjQS1n6yqEm70XooQL6iFh/f5DcfEXP7kAplQ6INfPgGAVUzfbANuPT1 -rqVCV3w2EYx7XsQDnYx5nQIDAQABo0IwQDAPBgNVHRMBAf8EBTADAQH/MA4GA1Ud -DwEB/wQEAwIBBjAdBgNVHQ4EFgQUtZn4r7CU9eMg1gqtzk5WpC5uQu0wDQYJKoZI -hvcNAQELBQADggIBACYGXnDnZTPIgm7ZnBc6G3pmsgH2eDtpXi/q/075KMOYKmFM -tCQSin1tERT3nLXK5ryeJ45MGcipvXrA1zYObYVybqjGom32+nNjf7xueQgcnYqf -GopTpti72TVVsRHFqQOzVju5hJMiXn7B9hJSi+osZ7z+Nkz1uM/Rs0mSO9MpDpkb -lvdhuDvEK7Z4bLQjb/D907JedR+Zlais9trhxTF7+9FGs9K8Z7RiVLoJ92Owk6Ka -+elSLotgEqv89WBW7xBci8QaQtyDW2QOy7W81k/BfDxujRNt+3vrMNDcTa/F1bal -TFtxyegxvug4BkihGuLq0t4SOVga/4AOgnXmt8kHbA7v/zjxmHHEt38OFdAlab0i -nSvtBfZGR6ztwPDUO+Ls7pZbkBNOHlY667DvlruWIxG68kOGdGSVyCh13x01utI3 -gzhTODY7z2zp+WsO0PsE6E9312UBeIYMej4hYvF/Y3EMyZ9E26gnonW+boE+18Dr -G5gPcFw0sorMwIUY6256s/daoQe/qUKS82Ail+QUoQebTnbAjn39pCXHR+3/H3Os -zMOl6W8KjptlwlCFtaOgUxLMVYdh84GuEEZhvUQhuMI9dM9+JDX6HAcOmz0iyu8x -L4ysEr3vQCj8KWefshNPZiTEUxnpHikV7+ZtsH8tZ/3zbBt1RqPlShfppNcL ------END CERTIFICATE----- - -# Issuer: CN=ACCVRAIZ1 O=ACCV OU=PKIACCV -# Subject: CN=ACCVRAIZ1 O=ACCV OU=PKIACCV -# Label: "ACCVRAIZ1" -# Serial: 6828503384748696800 -# MD5 Fingerprint: d0:a0:5a:ee:05:b6:09:94:21:a1:7d:f1:b2:29:82:02 -# SHA1 Fingerprint: 93:05:7a:88:15:c6:4f:ce:88:2f:fa:91:16:52:28:78:bc:53:64:17 -# SHA256 Fingerprint: 9a:6e:c0:12:e1:a7:da:9d:be:34:19:4d:47:8a:d7:c0:db:18:22:fb:07:1d:f1:29:81:49:6e:d1:04:38:41:13 ------BEGIN CERTIFICATE----- -MIIH0zCCBbugAwIBAgIIXsO3pkN/pOAwDQYJKoZIhvcNAQEFBQAwQjESMBAGA1UE -AwwJQUNDVlJBSVoxMRAwDgYDVQQLDAdQS0lBQ0NWMQ0wCwYDVQQKDARBQ0NWMQsw -CQYDVQQGEwJFUzAeFw0xMTA1MDUwOTM3MzdaFw0zMDEyMzEwOTM3MzdaMEIxEjAQ -BgNVBAMMCUFDQ1ZSQUlaMTEQMA4GA1UECwwHUEtJQUNDVjENMAsGA1UECgwEQUND -VjELMAkGA1UEBhMCRVMwggIiMA0GCSqGSIb3DQEBAQUAA4ICDwAwggIKAoICAQCb -qau/YUqXry+XZpp0X9DZlv3P4uRm7x8fRzPCRKPfmt4ftVTdFXxpNRFvu8gMjmoY -HtiP2Ra8EEg2XPBjs5BaXCQ316PWywlxufEBcoSwfdtNgM3802/J+Nq2DoLSRYWo -G2ioPej0RGy9ocLLA76MPhMAhN9KSMDjIgro6TenGEyxCQ0jVn8ETdkXhBilyNpA -lHPrzg5XPAOBOp0KoVdDaaxXbXmQeOW1tDvYvEyNKKGno6e6Ak4l0Squ7a4DIrhr -IA8wKFSVf+DuzgpmndFALW4ir50awQUZ0m/A8p/4e7MCQvtQqR0tkw8jq8bBD5L/ -0KIV9VMJcRz/RROE5iZe+OCIHAr8Fraocwa48GOEAqDGWuzndN9wrqODJerWx5eH -k6fGioozl2A3ED6XPm4pFdahD9GILBKfb6qkxkLrQaLjlUPTAYVtjrs78yM2x/47 -4KElB0iryYl0/wiPgL/AlmXz7uxLaL2diMMxs0Dx6M/2OLuc5NF/1OVYm3z61PMO -m3WR5LpSLhl+0fXNWhn8ugb2+1KoS5kE3fj5tItQo05iifCHJPqDQsGH+tUtKSpa -cXpkatcnYGMN285J9Y0fkIkyF/hzQ7jSWpOGYdbhdQrqeWZ2iE9x6wQl1gpaepPl -uUsXQA+xtrn13k/c4LOsOxFwYIRKQ26ZIMApcQrAZQIDAQABo4ICyzCCAscwfQYI -KwYBBQUHAQEEcTBvMEwGCCsGAQUFBzAChkBodHRwOi8vd3d3LmFjY3YuZXMvZmls -ZWFkbWluL0FyY2hpdm9zL2NlcnRpZmljYWRvcy9yYWl6YWNjdjEuY3J0MB8GCCsG -AQUFBzABhhNodHRwOi8vb2NzcC5hY2N2LmVzMB0GA1UdDgQWBBTSh7Tj3zcnk1X2 -VuqB5TbMjB4/vTAPBgNVHRMBAf8EBTADAQH/MB8GA1UdIwQYMBaAFNKHtOPfNyeT -VfZW6oHlNsyMHj+9MIIBcwYDVR0gBIIBajCCAWYwggFiBgRVHSAAMIIBWDCCASIG -CCsGAQUFBwICMIIBFB6CARAAQQB1AHQAbwByAGkAZABhAGQAIABkAGUAIABDAGUA -cgB0AGkAZgBpAGMAYQBjAGkA8wBuACAAUgBhAO0AegAgAGQAZQAgAGwAYQAgAEEA -QwBDAFYAIAAoAEEAZwBlAG4AYwBpAGEAIABkAGUAIABUAGUAYwBuAG8AbABvAGcA -7QBhACAAeQAgAEMAZQByAHQAaQBmAGkAYwBhAGMAaQDzAG4AIABFAGwAZQBjAHQA -cgDzAG4AaQBjAGEALAAgAEMASQBGACAAUQA0ADYAMAAxADEANQA2AEUAKQAuACAA -QwBQAFMAIABlAG4AIABoAHQAdABwADoALwAvAHcAdwB3AC4AYQBjAGMAdgAuAGUA -czAwBggrBgEFBQcCARYkaHR0cDovL3d3dy5hY2N2LmVzL2xlZ2lzbGFjaW9uX2Mu -aHRtMFUGA1UdHwROMEwwSqBIoEaGRGh0dHA6Ly93d3cuYWNjdi5lcy9maWxlYWRt -aW4vQXJjaGl2b3MvY2VydGlmaWNhZG9zL3JhaXphY2N2MV9kZXIuY3JsMA4GA1Ud -DwEB/wQEAwIBBjAXBgNVHREEEDAOgQxhY2N2QGFjY3YuZXMwDQYJKoZIhvcNAQEF -BQADggIBAJcxAp/n/UNnSEQU5CmH7UwoZtCPNdpNYbdKl02125DgBS4OxnnQ8pdp -D70ER9m+27Up2pvZrqmZ1dM8MJP1jaGo/AaNRPTKFpV8M9xii6g3+CfYCS0b78gU -JyCpZET/LtZ1qmxNYEAZSUNUY9rizLpm5U9EelvZaoErQNV/+QEnWCzI7UiRfD+m -AM/EKXMRNt6GGT6d7hmKG9Ww7Y49nCrADdg9ZuM8Db3VlFzi4qc1GwQA9j9ajepD -vV+JHanBsMyZ4k0ACtrJJ1vnE5Bc5PUzolVt3OAJTS+xJlsndQAJxGJ3KQhfnlms -tn6tn1QwIgPBHnFk/vk4CpYY3QIUrCPLBhwepH2NDd4nQeit2hW3sCPdK6jT2iWH -7ehVRE2I9DZ+hJp4rPcOVkkO1jMl1oRQQmwgEh0q1b688nCBpHBgvgW1m54ERL5h -I6zppSSMEYCUWqKiuUnSwdzRp+0xESyeGabu4VXhwOrPDYTkF7eifKXeVSUG7szA -h1xA2syVP1XgNce4hL60Xc16gwFy7ofmXx2utYXGJt/mwZrpHgJHnyqobalbz+xF -d3+YJ5oyXSrjhO7FmGYvliAd3djDJ9ew+f7Zfc3Qn48LFFhRny+Lwzgt3uiP1o2H -pPVWQxaZLPSkVrQ0uGE3ycJYgBugl6H8WY3pEfbRD0tVNEYqi4Y7 ------END CERTIFICATE----- - -# Issuer: CN=TWCA Global Root CA O=TAIWAN-CA OU=Root CA -# Subject: CN=TWCA Global Root CA O=TAIWAN-CA OU=Root CA -# Label: "TWCA Global Root CA" -# Serial: 3262 -# MD5 Fingerprint: f9:03:7e:cf:e6:9e:3c:73:7a:2a:90:07:69:ff:2b:96 -# SHA1 Fingerprint: 9c:bb:48:53:f6:a4:f6:d3:52:a4:e8:32:52:55:60:13:f5:ad:af:65 -# SHA256 Fingerprint: 59:76:90:07:f7:68:5d:0f:cd:50:87:2f:9f:95:d5:75:5a:5b:2b:45:7d:81:f3:69:2b:61:0a:98:67:2f:0e:1b ------BEGIN CERTIFICATE----- -MIIFQTCCAymgAwIBAgICDL4wDQYJKoZIhvcNAQELBQAwUTELMAkGA1UEBhMCVFcx -EjAQBgNVBAoTCVRBSVdBTi1DQTEQMA4GA1UECxMHUm9vdCBDQTEcMBoGA1UEAxMT -VFdDQSBHbG9iYWwgUm9vdCBDQTAeFw0xMjA2MjcwNjI4MzNaFw0zMDEyMzExNTU5 -NTlaMFExCzAJBgNVBAYTAlRXMRIwEAYDVQQKEwlUQUlXQU4tQ0ExEDAOBgNVBAsT -B1Jvb3QgQ0ExHDAaBgNVBAMTE1RXQ0EgR2xvYmFsIFJvb3QgQ0EwggIiMA0GCSqG -SIb3DQEBAQUAA4ICDwAwggIKAoICAQCwBdvI64zEbooh745NnHEKH1Jw7W2CnJfF -10xORUnLQEK1EjRsGcJ0pDFfhQKX7EMzClPSnIyOt7h52yvVavKOZsTuKwEHktSz -0ALfUPZVr2YOy+BHYC8rMjk1Ujoog/h7FsYYuGLWRyWRzvAZEk2tY/XTP3VfKfCh -MBwqoJimFb3u/Rk28OKRQ4/6ytYQJ0lM793B8YVwm8rqqFpD/G2Gb3PpN0Wp8DbH -zIh1HrtsBv+baz4X7GGqcXzGHaL3SekVtTzWoWH1EfcFbx39Eb7QMAfCKbAJTibc -46KokWofwpFFiFzlmLhxpRUZyXx1EcxwdE8tmx2RRP1WKKD+u4ZqyPpcC1jcxkt2 -yKsi2XMPpfRaAok/T54igu6idFMqPVMnaR1sjjIsZAAmY2E2TqNGtz99sy2sbZCi -laLOz9qC5wc0GZbpuCGqKX6mOL6OKUohZnkfs8O1CWfe1tQHRvMq2uYiN2DLgbYP -oA/pyJV/v1WRBXrPPRXAb94JlAGD1zQbzECl8LibZ9WYkTunhHiVJqRaCPgrdLQA -BDzfuBSO6N+pjWxnkjMdwLfS7JLIvgm/LCkFbwJrnu+8vyq8W8BQj0FwcYeyTbcE -qYSjMq+u7msXi7Kx/mzhkIyIqJdIzshNy/MGz19qCkKxHh53L46g5pIOBvwFItIm -4TFRfTLcDwIDAQABoyMwITAOBgNVHQ8BAf8EBAMCAQYwDwYDVR0TAQH/BAUwAwEB -/zANBgkqhkiG9w0BAQsFAAOCAgEAXzSBdu+WHdXltdkCY4QWwa6gcFGn90xHNcgL -1yg9iXHZqjNB6hQbbCEAwGxCGX6faVsgQt+i0trEfJdLjbDorMjupWkEmQqSpqsn -LhpNgb+E1HAerUf+/UqdM+DyucRFCCEK2mlpc3INvjT+lIutwx4116KD7+U4x6WF -H6vPNOw/KP4M8VeGTslV9xzU2KV9Bnpv1d8Q34FOIWWxtuEXeZVFBs5fzNxGiWNo -RI2T9GRwoD2dKAXDOXC4Ynsg/eTb6QihuJ49CcdP+yz4k3ZB3lLg4VfSnQO8d57+ -nile98FRYB/e2guyLXW3Q0iT5/Z5xoRdgFlglPx4mI88k1HtQJAH32RjJMtOcQWh -15QaiDLxInQirqWm2BJpTGCjAu4r7NRjkgtevi92a6O2JryPA9gK8kxkRr05YuWW -6zRjESjMlfGt7+/cgFhI6Uu46mWs6fyAtbXIRfmswZ/ZuepiiI7E8UuDEq3mi4TW -nsLrgxifarsbJGAzcMzs9zLzXNl5fe+epP7JI8Mk7hWSsT2RTyaGvWZzJBPqpK5j -wa19hAM8EHiGG3njxPPyBJUgriOCxLM6AGK/5jYk4Ve6xx6QddVfP5VhK8E7zeWz -aGHQRiapIVJpLesux+t3zqY6tQMzT3bR51xUAV3LePTJDL/PEo4XLSNolOer/qmy -KwbQBM0= ------END CERTIFICATE----- - -# Issuer: CN=TeliaSonera Root CA v1 O=TeliaSonera -# Subject: CN=TeliaSonera Root CA v1 O=TeliaSonera -# Label: "TeliaSonera Root CA v1" -# Serial: 199041966741090107964904287217786801558 -# MD5 Fingerprint: 37:41:49:1b:18:56:9a:26:f5:ad:c2:66:fb:40:a5:4c -# SHA1 Fingerprint: 43:13:bb:96:f1:d5:86:9b:c1:4e:6a:92:f6:cf:f6:34:69:87:82:37 -# SHA256 Fingerprint: dd:69:36:fe:21:f8:f0:77:c1:23:a1:a5:21:c1:22:24:f7:22:55:b7:3e:03:a7:26:06:93:e8:a2:4b:0f:a3:89 ------BEGIN CERTIFICATE----- -MIIFODCCAyCgAwIBAgIRAJW+FqD3LkbxezmCcvqLzZYwDQYJKoZIhvcNAQEFBQAw -NzEUMBIGA1UECgwLVGVsaWFTb25lcmExHzAdBgNVBAMMFlRlbGlhU29uZXJhIFJv -b3QgQ0EgdjEwHhcNMDcxMDE4MTIwMDUwWhcNMzIxMDE4MTIwMDUwWjA3MRQwEgYD -VQQKDAtUZWxpYVNvbmVyYTEfMB0GA1UEAwwWVGVsaWFTb25lcmEgUm9vdCBDQSB2 -MTCCAiIwDQYJKoZIhvcNAQEBBQADggIPADCCAgoCggIBAMK+6yfwIaPzaSZVfp3F -VRaRXP3vIb9TgHot0pGMYzHw7CTww6XScnwQbfQ3t+XmfHnqjLWCi65ItqwA3GV1 -7CpNX8GH9SBlK4GoRz6JI5UwFpB/6FcHSOcZrr9FZ7E3GwYq/t75rH2D+1665I+X -Z75Ljo1kB1c4VWk0Nj0TSO9P4tNmHqTPGrdeNjPUtAa9GAH9d4RQAEX1jF3oI7x+ -/jXh7VB7qTCNGdMJjmhnXb88lxhTuylixcpecsHHltTbLaC0H2kD7OriUPEMPPCs -81Mt8Bz17Ww5OXOAFshSsCPN4D7c3TxHoLs1iuKYaIu+5b9y7tL6pe0S7fyYGKkm -dtwoSxAgHNN/Fnct7W+A90m7UwW7XWjH1Mh1Fj+JWov3F0fUTPHSiXk+TT2YqGHe -Oh7S+F4D4MHJHIzTjU3TlTazN19jY5szFPAtJmtTfImMMsJu7D0hADnJoWjiUIMu -sDor8zagrC/kb2HCUQk5PotTubtn2txTuXZZNp1D5SDgPTJghSJRt8czu90VL6R4 -pgd7gUY2BIbdeTXHlSw7sKMXNeVzH7RcWe/a6hBle3rQf5+ztCo3O3CLm1u5K7fs -slESl1MpWtTwEhDcTwK7EpIvYtQ/aUN8Ddb8WHUBiJ1YFkveupD/RwGJBmr2X7KQ -arMCpgKIv7NHfirZ1fpoeDVNAgMBAAGjPzA9MA8GA1UdEwEB/wQFMAMBAf8wCwYD -VR0PBAQDAgEGMB0GA1UdDgQWBBTwj1k4ALP1j5qWDNXr+nuqF+gTEjANBgkqhkiG -9w0BAQUFAAOCAgEAvuRcYk4k9AwI//DTDGjkk0kiP0Qnb7tt3oNmzqjMDfz1mgbl -dxSR651Be5kqhOX//CHBXfDkH1e3damhXwIm/9fH907eT/j3HEbAek9ALCI18Bmx -0GtnLLCo4MBANzX2hFxc469CeP6nyQ1Q6g2EdvZR74NTxnr/DlZJLo961gzmJ1Tj -TQpgcmLNkQfWpb/ImWvtxBnmq0wROMVvMeJuScg/doAmAyYp4Db29iBT4xdwNBed -Y2gea+zDTYa4EzAvXUYNR0PVG6pZDrlcjQZIrXSHX8f8MVRBE+LHIQ6e4B4N4cB7 -Q4WQxYpYxmUKeFfyxiMPAdkgS94P+5KFdSpcc41teyWRyu5FrgZLAMzTsVlQ2jqI -OylDRl6XK1TOU2+NSueW+r9xDkKLfP0ooNBIytrEgUy7onOTJsjrDNYmiLbAJM+7 -vVvrdX3pCI6GMyx5dwlppYn8s3CQh3aP0yK7Qs69cwsgJirQmz1wHiRszYd2qReW -t88NkvuOGKmYSdGe/mBEciG5Ge3C9THxOUiIkCR1VBatzvT4aRRkOfujuLpwQMcn -HL/EVlP6Y2XQ8xwOFvVrhlhNGNTkDY6lnVuR3HYkUD/GKvvZt5y11ubQ2egZixVx -SK236thZiNSQvxaz2emsWWFUyBy6ysHK4bkgTI86k4mloMy/0/Z1pHWWbVY= ------END CERTIFICATE----- - -# Issuer: CN=T-TeleSec GlobalRoot Class 2 O=T-Systems Enterprise Services GmbH OU=T-Systems Trust Center -# Subject: CN=T-TeleSec GlobalRoot Class 2 O=T-Systems Enterprise Services GmbH OU=T-Systems Trust Center -# Label: "T-TeleSec GlobalRoot Class 2" -# Serial: 1 -# MD5 Fingerprint: 2b:9b:9e:e4:7b:6c:1f:00:72:1a:cc:c1:77:79:df:6a -# SHA1 Fingerprint: 59:0d:2d:7d:88:4f:40:2e:61:7e:a5:62:32:17:65:cf:17:d8:94:e9 -# SHA256 Fingerprint: 91:e2:f5:78:8d:58:10:eb:a7:ba:58:73:7d:e1:54:8a:8e:ca:cd:01:45:98:bc:0b:14:3e:04:1b:17:05:25:52 ------BEGIN CERTIFICATE----- -MIIDwzCCAqugAwIBAgIBATANBgkqhkiG9w0BAQsFADCBgjELMAkGA1UEBhMCREUx -KzApBgNVBAoMIlQtU3lzdGVtcyBFbnRlcnByaXNlIFNlcnZpY2VzIEdtYkgxHzAd -BgNVBAsMFlQtU3lzdGVtcyBUcnVzdCBDZW50ZXIxJTAjBgNVBAMMHFQtVGVsZVNl -YyBHbG9iYWxSb290IENsYXNzIDIwHhcNMDgxMDAxMTA0MDE0WhcNMzMxMDAxMjM1 -OTU5WjCBgjELMAkGA1UEBhMCREUxKzApBgNVBAoMIlQtU3lzdGVtcyBFbnRlcnBy -aXNlIFNlcnZpY2VzIEdtYkgxHzAdBgNVBAsMFlQtU3lzdGVtcyBUcnVzdCBDZW50 -ZXIxJTAjBgNVBAMMHFQtVGVsZVNlYyBHbG9iYWxSb290IENsYXNzIDIwggEiMA0G -CSqGSIb3DQEBAQUAA4IBDwAwggEKAoIBAQCqX9obX+hzkeXaXPSi5kfl82hVYAUd -AqSzm1nzHoqvNK38DcLZSBnuaY/JIPwhqgcZ7bBcrGXHX+0CfHt8LRvWurmAwhiC -FoT6ZrAIxlQjgeTNuUk/9k9uN0goOA/FvudocP05l03Sx5iRUKrERLMjfTlH6VJi -1hKTXrcxlkIF+3anHqP1wvzpesVsqXFP6st4vGCvx9702cu+fjOlbpSD8DT6Iavq -jnKgP6TeMFvvhk1qlVtDRKgQFRzlAVfFmPHmBiiRqiDFt1MmUUOyCxGVWOHAD3bZ -wI18gfNycJ5v/hqO2V81xrJvNHy+SE/iWjnX2J14np+GPgNeGYtEotXHAgMBAAGj -QjBAMA8GA1UdEwEB/wQFMAMBAf8wDgYDVR0PAQH/BAQDAgEGMB0GA1UdDgQWBBS/ -WSA2AHmgoCJrjNXyYdK4LMuCSjANBgkqhkiG9w0BAQsFAAOCAQEAMQOiYQsfdOhy -NsZt+U2e+iKo4YFWz827n+qrkRk4r6p8FU3ztqONpfSO9kSpp+ghla0+AGIWiPAC -uvxhI+YzmzB6azZie60EI4RYZeLbK4rnJVM3YlNfvNoBYimipidx5joifsFvHZVw -IEoHNN/q/xWA5brXethbdXwFeilHfkCoMRN3zUA7tFFHei4R40cR3p1m0IvVVGb6 -g1XqfMIpiRvpb7PO4gWEyS8+eIVibslfwXhjdFjASBgMmTnrpMwatXlajRWc2BQN -9noHV8cigwUtPJslJj0Ys6lDfMjIq2SPDqO/nBudMNva0Bkuqjzx+zOAduTNrRlP -BSeOE6Fuwg== ------END CERTIFICATE----- - -# Issuer: CN=Atos TrustedRoot 2011 O=Atos -# Subject: CN=Atos TrustedRoot 2011 O=Atos -# Label: "Atos TrustedRoot 2011" -# Serial: 6643877497813316402 -# MD5 Fingerprint: ae:b9:c4:32:4b:ac:7f:5d:66:cc:77:94:bb:2a:77:56 -# SHA1 Fingerprint: 2b:b1:f5:3e:55:0c:1d:c5:f1:d4:e6:b7:6a:46:4b:55:06:02:ac:21 -# SHA256 Fingerprint: f3:56:be:a2:44:b7:a9:1e:b3:5d:53:ca:9a:d7:86:4a:ce:01:8e:2d:35:d5:f8:f9:6d:df:68:a6:f4:1a:a4:74 ------BEGIN CERTIFICATE----- -MIIDdzCCAl+gAwIBAgIIXDPLYixfszIwDQYJKoZIhvcNAQELBQAwPDEeMBwGA1UE -AwwVQXRvcyBUcnVzdGVkUm9vdCAyMDExMQ0wCwYDVQQKDARBdG9zMQswCQYDVQQG -EwJERTAeFw0xMTA3MDcxNDU4MzBaFw0zMDEyMzEyMzU5NTlaMDwxHjAcBgNVBAMM -FUF0b3MgVHJ1c3RlZFJvb3QgMjAxMTENMAsGA1UECgwEQXRvczELMAkGA1UEBhMC -REUwggEiMA0GCSqGSIb3DQEBAQUAA4IBDwAwggEKAoIBAQCVhTuXbyo7LjvPpvMp -Nb7PGKw+qtn4TaA+Gke5vJrf8v7MPkfoepbCJI419KkM/IL9bcFyYie96mvr54rM -VD6QUM+A1JX76LWC1BTFtqlVJVfbsVD2sGBkWXppzwO3bw2+yj5vdHLqqjAqc2K+ -SZFhyBH+DgMq92og3AIVDV4VavzjgsG1xZ1kCWyjWZgHJ8cblithdHFsQ/H3NYkQ -4J7sVaE3IqKHBAUsR320HLliKWYoyrfhk/WklAOZuXCFteZI6o1Q/NnezG8HDt0L -cp2AMBYHlT8oDv3FdU9T1nSatCQujgKRz3bFmx5VdJx4IbHwLfELn8LVlhgf8FQi -eowHAgMBAAGjfTB7MB0GA1UdDgQWBBSnpQaxLKYJYO7Rl+lwrrw7GWzbITAPBgNV -HRMBAf8EBTADAQH/MB8GA1UdIwQYMBaAFKelBrEspglg7tGX6XCuvDsZbNshMBgG -A1UdIAQRMA8wDQYLKwYBBAGwLQMEAQEwDgYDVR0PAQH/BAQDAgGGMA0GCSqGSIb3 -DQEBCwUAA4IBAQAmdzTblEiGKkGdLD4GkGDEjKwLVLgfuXvTBznk+j57sj1O7Z8j -vZfza1zv7v1Apt+hk6EKhqzvINB5Ab149xnYJDE0BAGmuhWawyfc2E8PzBhj/5kP -DpFrdRbhIfzYJsdHt6bPWHJxfrrhTZVHO8mvbaG0weyJ9rQPOLXiZNwlz6bb65pc -maHFCN795trV1lpFDMS3wrUU77QR/w4VtfX128a961qn8FYiqTxlVMYVqL2Gns2D -lmh6cYGJ4Qvh6hEbaAjMaZ7snkGeRDImeuKHCnE96+RapNLbxc3G3mB/ufNPRJLv -KrcYPqcZ2Qt9sTdBQrC6YB3y/gkRsPCHe6ed ------END CERTIFICATE----- - -# Issuer: CN=QuoVadis Root CA 1 G3 O=QuoVadis Limited -# Subject: CN=QuoVadis Root CA 1 G3 O=QuoVadis Limited -# Label: "QuoVadis Root CA 1 G3" -# Serial: 687049649626669250736271037606554624078720034195 -# MD5 Fingerprint: a4:bc:5b:3f:fe:37:9a:fa:64:f0:e2:fa:05:3d:0b:ab -# SHA1 Fingerprint: 1b:8e:ea:57:96:29:1a:c9:39:ea:b8:0a:81:1a:73:73:c0:93:79:67 -# SHA256 Fingerprint: 8a:86:6f:d1:b2:76:b5:7e:57:8e:92:1c:65:82:8a:2b:ed:58:e9:f2:f2:88:05:41:34:b7:f1:f4:bf:c9:cc:74 ------BEGIN CERTIFICATE----- -MIIFYDCCA0igAwIBAgIUeFhfLq0sGUvjNwc1NBMotZbUZZMwDQYJKoZIhvcNAQEL -BQAwSDELMAkGA1UEBhMCQk0xGTAXBgNVBAoTEFF1b1ZhZGlzIExpbWl0ZWQxHjAc -BgNVBAMTFVF1b1ZhZGlzIFJvb3QgQ0EgMSBHMzAeFw0xMjAxMTIxNzI3NDRaFw00 -MjAxMTIxNzI3NDRaMEgxCzAJBgNVBAYTAkJNMRkwFwYDVQQKExBRdW9WYWRpcyBM -aW1pdGVkMR4wHAYDVQQDExVRdW9WYWRpcyBSb290IENBIDEgRzMwggIiMA0GCSqG -SIb3DQEBAQUAA4ICDwAwggIKAoICAQCgvlAQjunybEC0BJyFuTHK3C3kEakEPBtV -wedYMB0ktMPvhd6MLOHBPd+C5k+tR4ds7FtJwUrVu4/sh6x/gpqG7D0DmVIB0jWe -rNrwU8lmPNSsAgHaJNM7qAJGr6Qc4/hzWHa39g6QDbXwz8z6+cZM5cOGMAqNF341 -68Xfuw6cwI2H44g4hWf6Pser4BOcBRiYz5P1sZK0/CPTz9XEJ0ngnjybCKOLXSoh -4Pw5qlPafX7PGglTvF0FBM+hSo+LdoINofjSxxR3W5A2B4GbPgb6Ul5jxaYA/qXp -UhtStZI5cgMJYr2wYBZupt0lwgNm3fME0UDiTouG9G/lg6AnhF4EwfWQvTA9xO+o -abw4m6SkltFi2mnAAZauy8RRNOoMqv8hjlmPSlzkYZqn0ukqeI1RPToV7qJZjqlc -3sX5kCLliEVx3ZGZbHqfPT2YfF72vhZooF6uCyP8Wg+qInYtyaEQHeTTRCOQiJ/G -KubX9ZqzWB4vMIkIG1SitZgj7Ah3HJVdYdHLiZxfokqRmu8hqkkWCKi9YSgxyXSt -hfbZxbGL0eUQMk1fiyA6PEkfM4VZDdvLCXVDaXP7a3F98N/ETH3Goy7IlXnLc6KO -Tk0k+17kBL5yG6YnLUlamXrXXAkgt3+UuU/xDRxeiEIbEbfnkduebPRq34wGmAOt -zCjvpUfzUwIDAQABo0IwQDAPBgNVHRMBAf8EBTADAQH/MA4GA1UdDwEB/wQEAwIB -BjAdBgNVHQ4EFgQUo5fW816iEOGrRZ88F2Q87gFwnMwwDQYJKoZIhvcNAQELBQAD -ggIBABj6W3X8PnrHX3fHyt/PX8MSxEBd1DKquGrX1RUVRpgjpeaQWxiZTOOtQqOC -MTaIzen7xASWSIsBx40Bz1szBpZGZnQdT+3Btrm0DWHMY37XLneMlhwqI2hrhVd2 -cDMT/uFPpiN3GPoajOi9ZcnPP/TJF9zrx7zABC4tRi9pZsMbj/7sPtPKlL92CiUN -qXsCHKnQO18LwIE6PWThv6ctTr1NxNgpxiIY0MWscgKCP6o6ojoilzHdCGPDdRS5 -YCgtW2jgFqlmgiNR9etT2DGbe+m3nUvriBbP+V04ikkwj+3x6xn0dxoxGE1nVGwv -b2X52z3sIexe9PSLymBlVNFxZPT5pqOBMzYzcfCkeF9OrYMh3jRJjehZrJ3ydlo2 -8hP0r+AJx2EqbPfgna67hkooby7utHnNkDPDs3b69fBsnQGQ+p6Q9pxyz0fawx/k -NSBT8lTR32GDpgLiJTjehTItXnOQUl1CxM49S+H5GYQd1aJQzEH7QRTDvdbJWqNj -ZgKAvQU6O0ec7AAmTPWIUb+oI38YB7AL7YsmoWTTYUrrXJ/es69nA7Mf3W1daWhp -q1467HxpvMc7hU6eFbm0FU/DlXpY18ls6Wy58yljXrQs8C097Vpl4KlbQMJImYFt -nh8GKjwStIsPm6Ik8KaN1nrgS7ZklmOVhMJKzRwuJIczYOXD ------END CERTIFICATE----- - -# Issuer: CN=QuoVadis Root CA 2 G3 O=QuoVadis Limited -# Subject: CN=QuoVadis Root CA 2 G3 O=QuoVadis Limited -# Label: "QuoVadis Root CA 2 G3" -# Serial: 390156079458959257446133169266079962026824725800 -# MD5 Fingerprint: af:0c:86:6e:bf:40:2d:7f:0b:3e:12:50:ba:12:3d:06 -# SHA1 Fingerprint: 09:3c:61:f3:8b:8b:dc:7d:55:df:75:38:02:05:00:e1:25:f5:c8:36 -# SHA256 Fingerprint: 8f:e4:fb:0a:f9:3a:4d:0d:67:db:0b:eb:b2:3e:37:c7:1b:f3:25:dc:bc:dd:24:0e:a0:4d:af:58:b4:7e:18:40 ------BEGIN CERTIFICATE----- -MIIFYDCCA0igAwIBAgIURFc0JFuBiZs18s64KztbpybwdSgwDQYJKoZIhvcNAQEL -BQAwSDELMAkGA1UEBhMCQk0xGTAXBgNVBAoTEFF1b1ZhZGlzIExpbWl0ZWQxHjAc -BgNVBAMTFVF1b1ZhZGlzIFJvb3QgQ0EgMiBHMzAeFw0xMjAxMTIxODU5MzJaFw00 -MjAxMTIxODU5MzJaMEgxCzAJBgNVBAYTAkJNMRkwFwYDVQQKExBRdW9WYWRpcyBM -aW1pdGVkMR4wHAYDVQQDExVRdW9WYWRpcyBSb290IENBIDIgRzMwggIiMA0GCSqG -SIb3DQEBAQUAA4ICDwAwggIKAoICAQChriWyARjcV4g/Ruv5r+LrI3HimtFhZiFf -qq8nUeVuGxbULX1QsFN3vXg6YOJkApt8hpvWGo6t/x8Vf9WVHhLL5hSEBMHfNrMW -n4rjyduYNM7YMxcoRvynyfDStNVNCXJJ+fKH46nafaF9a7I6JaltUkSs+L5u+9ym -c5GQYaYDFCDy54ejiK2toIz/pgslUiXnFgHVy7g1gQyjO/Dh4fxaXc6AcW34Sas+ -O7q414AB+6XrW7PFXmAqMaCvN+ggOp+oMiwMzAkd056OXbxMmO7FGmh77FOm6RQ1 -o9/NgJ8MSPsc9PG/Srj61YxxSscfrf5BmrODXfKEVu+lV0POKa2Mq1W/xPtbAd0j -IaFYAI7D0GoT7RPjEiuA3GfmlbLNHiJuKvhB1PLKFAeNilUSxmn1uIZoL1NesNKq -IcGY5jDjZ1XHm26sGahVpkUG0CM62+tlXSoREfA7T8pt9DTEceT/AFr2XK4jYIVz -8eQQsSWu1ZK7E8EM4DnatDlXtas1qnIhO4M15zHfeiFuuDIIfR0ykRVKYnLP43eh -vNURG3YBZwjgQQvD6xVu+KQZ2aKrr+InUlYrAoosFCT5v0ICvybIxo/gbjh9Uy3l -7ZizlWNof/k19N+IxWA1ksB8aRxhlRbQ694Lrz4EEEVlWFA4r0jyWbYW8jwNkALG -cC4BrTwV1wIDAQABo0IwQDAPBgNVHRMBAf8EBTADAQH/MA4GA1UdDwEB/wQEAwIB -BjAdBgNVHQ4EFgQU7edvdlq/YOxJW8ald7tyFnGbxD0wDQYJKoZIhvcNAQELBQAD -ggIBAJHfgD9DCX5xwvfrs4iP4VGyvD11+ShdyLyZm3tdquXK4Qr36LLTn91nMX66 -AarHakE7kNQIXLJgapDwyM4DYvmL7ftuKtwGTTwpD4kWilhMSA/ohGHqPHKmd+RC -roijQ1h5fq7KpVMNqT1wvSAZYaRsOPxDMuHBR//47PERIjKWnML2W2mWeyAMQ0Ga -W/ZZGYjeVYg3UQt4XAoeo0L9x52ID8DyeAIkVJOviYeIyUqAHerQbj5hLja7NQ4n -lv1mNDthcnPxFlxHBlRJAHpYErAK74X9sbgzdWqTHBLmYF5vHX/JHyPLhGGfHoJE -+V+tYlUkmlKY7VHnoX6XOuYvHxHaU4AshZ6rNRDbIl9qxV6XU/IyAgkwo1jwDQHV -csaxfGl7w/U2Rcxhbl5MlMVerugOXou/983g7aEOGzPuVBj+D77vfoRrQ+NwmNtd -dbINWQeFFSM51vHfqSYP1kjHs6Yi9TM3WpVHn3u6GBVv/9YUZINJ0gpnIdsPNWNg -KCLjsZWDzYWm3S8P52dSbrsvhXz1SnPnxT7AvSESBT/8twNJAlvIJebiVDj1eYeM -HVOyToV7BjjHLPj4sHKNJeV3UvQDHEimUF+IIDBu8oJDqz2XhOdT+yHBTw8imoa4 -WSr2Rz0ZiC3oheGe7IUIarFsNMkd7EgrO3jtZsSOeWmD3n+M ------END CERTIFICATE----- - -# Issuer: CN=QuoVadis Root CA 3 G3 O=QuoVadis Limited -# Subject: CN=QuoVadis Root CA 3 G3 O=QuoVadis Limited -# Label: "QuoVadis Root CA 3 G3" -# Serial: 268090761170461462463995952157327242137089239581 -# MD5 Fingerprint: df:7d:b9:ad:54:6f:68:a1:df:89:57:03:97:43:b0:d7 -# SHA1 Fingerprint: 48:12:bd:92:3c:a8:c4:39:06:e7:30:6d:27:96:e6:a4:cf:22:2e:7d -# SHA256 Fingerprint: 88:ef:81:de:20:2e:b0:18:45:2e:43:f8:64:72:5c:ea:5f:bd:1f:c2:d9:d2:05:73:07:09:c5:d8:b8:69:0f:46 ------BEGIN CERTIFICATE----- -MIIFYDCCA0igAwIBAgIULvWbAiin23r/1aOp7r0DoM8Sah0wDQYJKoZIhvcNAQEL -BQAwSDELMAkGA1UEBhMCQk0xGTAXBgNVBAoTEFF1b1ZhZGlzIExpbWl0ZWQxHjAc -BgNVBAMTFVF1b1ZhZGlzIFJvb3QgQ0EgMyBHMzAeFw0xMjAxMTIyMDI2MzJaFw00 -MjAxMTIyMDI2MzJaMEgxCzAJBgNVBAYTAkJNMRkwFwYDVQQKExBRdW9WYWRpcyBM -aW1pdGVkMR4wHAYDVQQDExVRdW9WYWRpcyBSb290IENBIDMgRzMwggIiMA0GCSqG -SIb3DQEBAQUAA4ICDwAwggIKAoICAQCzyw4QZ47qFJenMioKVjZ/aEzHs286IxSR -/xl/pcqs7rN2nXrpixurazHb+gtTTK/FpRp5PIpM/6zfJd5O2YIyC0TeytuMrKNu -FoM7pmRLMon7FhY4futD4tN0SsJiCnMK3UmzV9KwCoWdcTzeo8vAMvMBOSBDGzXR -U7Ox7sWTaYI+FrUoRqHe6okJ7UO4BUaKhvVZR74bbwEhELn9qdIoyhA5CcoTNs+c -ra1AdHkrAj80//ogaX3T7mH1urPnMNA3I4ZyYUUpSFlob3emLoG+B01vr87ERROR -FHAGjx+f+IdpsQ7vw4kZ6+ocYfx6bIrc1gMLnia6Et3UVDmrJqMz6nWB2i3ND0/k -A9HvFZcba5DFApCTZgIhsUfei5pKgLlVj7WiL8DWM2fafsSntARE60f75li59wzw -eyuxwHApw0BiLTtIadwjPEjrewl5qW3aqDCYz4ByA4imW0aucnl8CAMhZa634Ryl -sSqiMd5mBPfAdOhx3v89WcyWJhKLhZVXGqtrdQtEPREoPHtht+KPZ0/l7DxMYIBp -VzgeAVuNVejH38DMdyM0SXV89pgR6y3e7UEuFAUCf+D+IOs15xGsIs5XPd7JMG0Q -A4XN8f+MFrXBsj6IbGB/kE+V9/YtrQE5BwT6dYB9v0lQ7e/JxHwc64B+27bQ3RP+ -ydOc17KXqQIDAQABo0IwQDAPBgNVHRMBAf8EBTADAQH/MA4GA1UdDwEB/wQEAwIB -BjAdBgNVHQ4EFgQUxhfQvKjqAkPyGwaZXSuQILnXnOQwDQYJKoZIhvcNAQELBQAD -ggIBADRh2Va1EodVTd2jNTFGu6QHcrxfYWLopfsLN7E8trP6KZ1/AvWkyaiTt3px -KGmPc+FSkNrVvjrlt3ZqVoAh313m6Tqe5T72omnHKgqwGEfcIHB9UqM+WXzBusnI -FUBhynLWcKzSt/Ac5IYp8M7vaGPQtSCKFWGafoaYtMnCdvvMujAWzKNhxnQT5Wvv -oxXqA/4Ti2Tk08HS6IT7SdEQTXlm66r99I0xHnAUrdzeZxNMgRVhvLfZkXdxGYFg -u/BYpbWcC/ePIlUnwEsBbTuZDdQdm2NnL9DuDcpmvJRPpq3t/O5jrFc/ZSXPsoaP -0Aj/uHYUbt7lJ+yreLVTubY/6CD50qi+YUbKh4yE8/nxoGibIh6BJpsQBJFxwAYf -3KDTuVan45gtf4Od34wrnDKOMpTwATwiKp9Dwi7DmDkHOHv8XgBCH/MyJnmDhPbl -8MFREsALHgQjDFSlTC9JxUrRtm5gDWv8a4uFJGS3iQ6rJUdbPM9+Sb3H6QrG2vd+ -DhcI00iX0HGS8A85PjRqHH3Y8iKuu2n0M7SmSFXRDw4m6Oy2Cy2nhTXN/VnIn9HN -PlopNLk9hM6xZdRZkZFWdSHBd575euFgndOtBBj0fOtek49TSiIp+EgrPk2GrFt/ -ywaZWWDYWGWVjUTR939+J399roD1B0y2PpxxVJkES/1Y+Zj0 ------END CERTIFICATE----- - -# Issuer: CN=DigiCert Assured ID Root G2 O=DigiCert Inc OU=www.digicert.com -# Subject: CN=DigiCert Assured ID Root G2 O=DigiCert Inc OU=www.digicert.com -# Label: "DigiCert Assured ID Root G2" -# Serial: 15385348160840213938643033620894905419 -# MD5 Fingerprint: 92:38:b9:f8:63:24:82:65:2c:57:33:e6:fe:81:8f:9d -# SHA1 Fingerprint: a1:4b:48:d9:43:ee:0a:0e:40:90:4f:3c:e0:a4:c0:91:93:51:5d:3f -# SHA256 Fingerprint: 7d:05:eb:b6:82:33:9f:8c:94:51:ee:09:4e:eb:fe:fa:79:53:a1:14:ed:b2:f4:49:49:45:2f:ab:7d:2f:c1:85 ------BEGIN CERTIFICATE----- -MIIDljCCAn6gAwIBAgIQC5McOtY5Z+pnI7/Dr5r0SzANBgkqhkiG9w0BAQsFADBl -MQswCQYDVQQGEwJVUzEVMBMGA1UEChMMRGlnaUNlcnQgSW5jMRkwFwYDVQQLExB3 -d3cuZGlnaWNlcnQuY29tMSQwIgYDVQQDExtEaWdpQ2VydCBBc3N1cmVkIElEIFJv -b3QgRzIwHhcNMTMwODAxMTIwMDAwWhcNMzgwMTE1MTIwMDAwWjBlMQswCQYDVQQG -EwJVUzEVMBMGA1UEChMMRGlnaUNlcnQgSW5jMRkwFwYDVQQLExB3d3cuZGlnaWNl -cnQuY29tMSQwIgYDVQQDExtEaWdpQ2VydCBBc3N1cmVkIElEIFJvb3QgRzIwggEi -MA0GCSqGSIb3DQEBAQUAA4IBDwAwggEKAoIBAQDZ5ygvUj82ckmIkzTz+GoeMVSA -n61UQbVH35ao1K+ALbkKz3X9iaV9JPrjIgwrvJUXCzO/GU1BBpAAvQxNEP4Htecc -biJVMWWXvdMX0h5i89vqbFCMP4QMls+3ywPgym2hFEwbid3tALBSfK+RbLE4E9Hp -EgjAALAcKxHad3A2m67OeYfcgnDmCXRwVWmvo2ifv922ebPynXApVfSr/5Vh88lA -bx3RvpO704gqu52/clpWcTs/1PPRCv4o76Pu2ZmvA9OPYLfykqGxvYmJHzDNw6Yu -YjOuFgJ3RFrngQo8p0Quebg/BLxcoIfhG69Rjs3sLPr4/m3wOnyqi+RnlTGNAgMB -AAGjQjBAMA8GA1UdEwEB/wQFMAMBAf8wDgYDVR0PAQH/BAQDAgGGMB0GA1UdDgQW -BBTOw0q5mVXyuNtgv6l+vVa1lzan1jANBgkqhkiG9w0BAQsFAAOCAQEAyqVVjOPI -QW5pJ6d1Ee88hjZv0p3GeDgdaZaikmkuOGybfQTUiaWxMTeKySHMq2zNixya1r9I -0jJmwYrA8y8678Dj1JGG0VDjA9tzd29KOVPt3ibHtX2vK0LRdWLjSisCx1BL4Gni -lmwORGYQRI+tBev4eaymG+g3NJ1TyWGqolKvSnAWhsI6yLETcDbYz+70CjTVW0z9 -B5yiutkBclzzTcHdDrEcDcRjvq30FPuJ7KJBDkzMyFdA0G4Dqs0MjomZmWzwPDCv -ON9vvKO+KSAnq3T/EyJ43pdSVR6DtVQgA+6uwE9W3jfMw3+qBCe703e4YtsXfJwo -IhNzbM8m9Yop5w== ------END CERTIFICATE----- - -# Issuer: CN=DigiCert Assured ID Root G3 O=DigiCert Inc OU=www.digicert.com -# Subject: CN=DigiCert Assured ID Root G3 O=DigiCert Inc OU=www.digicert.com -# Label: "DigiCert Assured ID Root G3" -# Serial: 15459312981008553731928384953135426796 -# MD5 Fingerprint: 7c:7f:65:31:0c:81:df:8d:ba:3e:99:e2:5c:ad:6e:fb -# SHA1 Fingerprint: f5:17:a2:4f:9a:48:c6:c9:f8:a2:00:26:9f:dc:0f:48:2c:ab:30:89 -# SHA256 Fingerprint: 7e:37:cb:8b:4c:47:09:0c:ab:36:55:1b:a6:f4:5d:b8:40:68:0f:ba:16:6a:95:2d:b1:00:71:7f:43:05:3f:c2 ------BEGIN CERTIFICATE----- -MIICRjCCAc2gAwIBAgIQC6Fa+h3foLVJRK/NJKBs7DAKBggqhkjOPQQDAzBlMQsw -CQYDVQQGEwJVUzEVMBMGA1UEChMMRGlnaUNlcnQgSW5jMRkwFwYDVQQLExB3d3cu -ZGlnaWNlcnQuY29tMSQwIgYDVQQDExtEaWdpQ2VydCBBc3N1cmVkIElEIFJvb3Qg -RzMwHhcNMTMwODAxMTIwMDAwWhcNMzgwMTE1MTIwMDAwWjBlMQswCQYDVQQGEwJV -UzEVMBMGA1UEChMMRGlnaUNlcnQgSW5jMRkwFwYDVQQLExB3d3cuZGlnaWNlcnQu -Y29tMSQwIgYDVQQDExtEaWdpQ2VydCBBc3N1cmVkIElEIFJvb3QgRzMwdjAQBgcq -hkjOPQIBBgUrgQQAIgNiAAQZ57ysRGXtzbg/WPuNsVepRC0FFfLvC/8QdJ+1YlJf -Zn4f5dwbRXkLzMZTCp2NXQLZqVneAlr2lSoOjThKiknGvMYDOAdfVdp+CW7if17Q -RSAPWXYQ1qAk8C3eNvJsKTmjQjBAMA8GA1UdEwEB/wQFMAMBAf8wDgYDVR0PAQH/ -BAQDAgGGMB0GA1UdDgQWBBTL0L2p4ZgFUaFNN6KDec6NHSrkhDAKBggqhkjOPQQD -AwNnADBkAjAlpIFFAmsSS3V0T8gj43DydXLefInwz5FyYZ5eEJJZVrmDxxDnOOlY -JjZ91eQ0hjkCMHw2U/Aw5WJjOpnitqM7mzT6HtoQknFekROn3aRukswy1vUhZscv -6pZjamVFkpUBtA== ------END CERTIFICATE----- - -# Issuer: CN=DigiCert Global Root G2 O=DigiCert Inc OU=www.digicert.com -# Subject: CN=DigiCert Global Root G2 O=DigiCert Inc OU=www.digicert.com -# Label: "DigiCert Global Root G2" -# Serial: 4293743540046975378534879503202253541 -# MD5 Fingerprint: e4:a6:8a:c8:54:ac:52:42:46:0a:fd:72:48:1b:2a:44 -# SHA1 Fingerprint: df:3c:24:f9:bf:d6:66:76:1b:26:80:73:fe:06:d1:cc:8d:4f:82:a4 -# SHA256 Fingerprint: cb:3c:cb:b7:60:31:e5:e0:13:8f:8d:d3:9a:23:f9:de:47:ff:c3:5e:43:c1:14:4c:ea:27:d4:6a:5a:b1:cb:5f ------BEGIN CERTIFICATE----- -MIIDjjCCAnagAwIBAgIQAzrx5qcRqaC7KGSxHQn65TANBgkqhkiG9w0BAQsFADBh -MQswCQYDVQQGEwJVUzEVMBMGA1UEChMMRGlnaUNlcnQgSW5jMRkwFwYDVQQLExB3 -d3cuZGlnaWNlcnQuY29tMSAwHgYDVQQDExdEaWdpQ2VydCBHbG9iYWwgUm9vdCBH -MjAeFw0xMzA4MDExMjAwMDBaFw0zODAxMTUxMjAwMDBaMGExCzAJBgNVBAYTAlVT -MRUwEwYDVQQKEwxEaWdpQ2VydCBJbmMxGTAXBgNVBAsTEHd3dy5kaWdpY2VydC5j -b20xIDAeBgNVBAMTF0RpZ2lDZXJ0IEdsb2JhbCBSb290IEcyMIIBIjANBgkqhkiG -9w0BAQEFAAOCAQ8AMIIBCgKCAQEAuzfNNNx7a8myaJCtSnX/RrohCgiN9RlUyfuI -2/Ou8jqJkTx65qsGGmvPrC3oXgkkRLpimn7Wo6h+4FR1IAWsULecYxpsMNzaHxmx -1x7e/dfgy5SDN67sH0NO3Xss0r0upS/kqbitOtSZpLYl6ZtrAGCSYP9PIUkY92eQ -q2EGnI/yuum06ZIya7XzV+hdG82MHauVBJVJ8zUtluNJbd134/tJS7SsVQepj5Wz -tCO7TG1F8PapspUwtP1MVYwnSlcUfIKdzXOS0xZKBgyMUNGPHgm+F6HmIcr9g+UQ -vIOlCsRnKPZzFBQ9RnbDhxSJITRNrw9FDKZJobq7nMWxM4MphQIDAQABo0IwQDAP -BgNVHRMBAf8EBTADAQH/MA4GA1UdDwEB/wQEAwIBhjAdBgNVHQ4EFgQUTiJUIBiV -5uNu5g/6+rkS7QYXjzkwDQYJKoZIhvcNAQELBQADggEBAGBnKJRvDkhj6zHd6mcY -1Yl9PMWLSn/pvtsrF9+wX3N3KjITOYFnQoQj8kVnNeyIv/iPsGEMNKSuIEyExtv4 -NeF22d+mQrvHRAiGfzZ0JFrabA0UWTW98kndth/Jsw1HKj2ZL7tcu7XUIOGZX1NG -Fdtom/DzMNU+MeKNhJ7jitralj41E6Vf8PlwUHBHQRFXGU7Aj64GxJUTFy8bJZ91 -8rGOmaFvE7FBcf6IKshPECBV1/MUReXgRPTqh5Uykw7+U0b6LJ3/iyK5S9kJRaTe -pLiaWN0bfVKfjllDiIGknibVb63dDcY3fe0Dkhvld1927jyNxF1WW6LZZm6zNTfl -MrY= ------END CERTIFICATE----- - -# Issuer: CN=DigiCert Global Root G3 O=DigiCert Inc OU=www.digicert.com -# Subject: CN=DigiCert Global Root G3 O=DigiCert Inc OU=www.digicert.com -# Label: "DigiCert Global Root G3" -# Serial: 7089244469030293291760083333884364146 -# MD5 Fingerprint: f5:5d:a4:50:a5:fb:28:7e:1e:0f:0d:cc:96:57:56:ca -# SHA1 Fingerprint: 7e:04:de:89:6a:3e:66:6d:00:e6:87:d3:3f:fa:d9:3b:e8:3d:34:9e -# SHA256 Fingerprint: 31:ad:66:48:f8:10:41:38:c7:38:f3:9e:a4:32:01:33:39:3e:3a:18:cc:02:29:6e:f9:7c:2a:c9:ef:67:31:d0 ------BEGIN CERTIFICATE----- -MIICPzCCAcWgAwIBAgIQBVVWvPJepDU1w6QP1atFcjAKBggqhkjOPQQDAzBhMQsw -CQYDVQQGEwJVUzEVMBMGA1UEChMMRGlnaUNlcnQgSW5jMRkwFwYDVQQLExB3d3cu -ZGlnaWNlcnQuY29tMSAwHgYDVQQDExdEaWdpQ2VydCBHbG9iYWwgUm9vdCBHMzAe -Fw0xMzA4MDExMjAwMDBaFw0zODAxMTUxMjAwMDBaMGExCzAJBgNVBAYTAlVTMRUw -EwYDVQQKEwxEaWdpQ2VydCBJbmMxGTAXBgNVBAsTEHd3dy5kaWdpY2VydC5jb20x -IDAeBgNVBAMTF0RpZ2lDZXJ0IEdsb2JhbCBSb290IEczMHYwEAYHKoZIzj0CAQYF -K4EEACIDYgAE3afZu4q4C/sLfyHS8L6+c/MzXRq8NOrexpu80JX28MzQC7phW1FG -fp4tn+6OYwwX7Adw9c+ELkCDnOg/QW07rdOkFFk2eJ0DQ+4QE2xy3q6Ip6FrtUPO -Z9wj/wMco+I+o0IwQDAPBgNVHRMBAf8EBTADAQH/MA4GA1UdDwEB/wQEAwIBhjAd -BgNVHQ4EFgQUs9tIpPmhxdiuNkHMEWNpYim8S8YwCgYIKoZIzj0EAwMDaAAwZQIx -AK288mw/EkrRLTnDCgmXc/SINoyIJ7vmiI1Qhadj+Z4y3maTD/HMsQmP3Wyr+mt/ -oAIwOWZbwmSNuJ5Q3KjVSaLtx9zRSX8XAbjIho9OjIgrqJqpisXRAL34VOKa5Vt8 -sycX ------END CERTIFICATE----- - -# Issuer: CN=DigiCert Trusted Root G4 O=DigiCert Inc OU=www.digicert.com -# Subject: CN=DigiCert Trusted Root G4 O=DigiCert Inc OU=www.digicert.com -# Label: "DigiCert Trusted Root G4" -# Serial: 7451500558977370777930084869016614236 -# MD5 Fingerprint: 78:f2:fc:aa:60:1f:2f:b4:eb:c9:37:ba:53:2e:75:49 -# SHA1 Fingerprint: dd:fb:16:cd:49:31:c9:73:a2:03:7d:3f:c8:3a:4d:7d:77:5d:05:e4 -# SHA256 Fingerprint: 55:2f:7b:dc:f1:a7:af:9e:6c:e6:72:01:7f:4f:12:ab:f7:72:40:c7:8e:76:1a:c2:03:d1:d9:d2:0a:c8:99:88 ------BEGIN CERTIFICATE----- -MIIFkDCCA3igAwIBAgIQBZsbV56OITLiOQe9p3d1XDANBgkqhkiG9w0BAQwFADBi -MQswCQYDVQQGEwJVUzEVMBMGA1UEChMMRGlnaUNlcnQgSW5jMRkwFwYDVQQLExB3 -d3cuZGlnaWNlcnQuY29tMSEwHwYDVQQDExhEaWdpQ2VydCBUcnVzdGVkIFJvb3Qg -RzQwHhcNMTMwODAxMTIwMDAwWhcNMzgwMTE1MTIwMDAwWjBiMQswCQYDVQQGEwJV -UzEVMBMGA1UEChMMRGlnaUNlcnQgSW5jMRkwFwYDVQQLExB3d3cuZGlnaWNlcnQu -Y29tMSEwHwYDVQQDExhEaWdpQ2VydCBUcnVzdGVkIFJvb3QgRzQwggIiMA0GCSqG -SIb3DQEBAQUAA4ICDwAwggIKAoICAQC/5pBzaN675F1KPDAiMGkz7MKnJS7JIT3y -ithZwuEppz1Yq3aaza57G4QNxDAf8xukOBbrVsaXbR2rsnnyyhHS5F/WBTxSD1If -xp4VpX6+n6lXFllVcq9ok3DCsrp1mWpzMpTREEQQLt+C8weE5nQ7bXHiLQwb7iDV -ySAdYyktzuxeTsiT+CFhmzTrBcZe7FsavOvJz82sNEBfsXpm7nfISKhmV1efVFiO -DCu3T6cw2Vbuyntd463JT17lNecxy9qTXtyOj4DatpGYQJB5w3jHtrHEtWoYOAMQ -jdjUN6QuBX2I9YI+EJFwq1WCQTLX2wRzKm6RAXwhTNS8rhsDdV14Ztk6MUSaM0C/ -CNdaSaTC5qmgZ92kJ7yhTzm1EVgX9yRcRo9k98FpiHaYdj1ZXUJ2h4mXaXpI8OCi -EhtmmnTK3kse5w5jrubU75KSOp493ADkRSWJtppEGSt+wJS00mFt6zPZxd9LBADM -fRyVw4/3IbKyEbe7f/LVjHAsQWCqsWMYRJUadmJ+9oCw++hkpjPRiQfhvbfmQ6QY -uKZ3AeEPlAwhHbJUKSWJbOUOUlFHdL4mrLZBdd56rF+NP8m800ERElvlEFDrMcXK -chYiCd98THU/Y+whX8QgUWtvsauGi0/C1kVfnSD8oR7FwI+isX4KJpn15GkvmB0t -9dmpsh3lGwIDAQABo0IwQDAPBgNVHRMBAf8EBTADAQH/MA4GA1UdDwEB/wQEAwIB -hjAdBgNVHQ4EFgQU7NfjgtJxXWRM3y5nP+e6mK4cD08wDQYJKoZIhvcNAQEMBQAD -ggIBALth2X2pbL4XxJEbw6GiAI3jZGgPVs93rnD5/ZpKmbnJeFwMDF/k5hQpVgs2 -SV1EY+CtnJYYZhsjDT156W1r1lT40jzBQ0CuHVD1UvyQO7uYmWlrx8GnqGikJ9yd -+SeuMIW59mdNOj6PWTkiU0TryF0Dyu1Qen1iIQqAyHNm0aAFYF/opbSnr6j3bTWc -fFqK1qI4mfN4i/RN0iAL3gTujJtHgXINwBQy7zBZLq7gcfJW5GqXb5JQbZaNaHqa -sjYUegbyJLkJEVDXCLG4iXqEI2FCKeWjzaIgQdfRnGTZ6iahixTXTBmyUEFxPT9N -cCOGDErcgdLMMpSEDQgJlxxPwO5rIHQw0uA5NBCFIRUBCOhVMt5xSdkoF1BN5r5N -0XWs0Mr7QbhDparTwwVETyw2m+L64kW4I1NsBm9nVX9GtUw/bihaeSbSpKhil9Ie -4u1Ki7wb/UdKDd9nZn6yW0HQO+T0O/QEY+nvwlQAUaCKKsnOeMzV6ocEGLPOr0mI -r/OSmbaz5mEP0oUA51Aa5BuVnRmhuZyxm7EAHu/QD09CbMkKvO5D+jpxpchNJqU1 -/YldvIViHTLSoCtU7ZpXwdv6EM8Zt4tKG48BtieVU+i2iW1bvGjUI+iLUaJW+fCm -gKDWHrO8Dw9TdSmq6hN35N6MgSGtBxBHEa2HPQfRdbzP82Z+ ------END CERTIFICATE----- - -# Issuer: CN=COMODO RSA Certification Authority O=COMODO CA Limited -# Subject: CN=COMODO RSA Certification Authority O=COMODO CA Limited -# Label: "COMODO RSA Certification Authority" -# Serial: 101909084537582093308941363524873193117 -# MD5 Fingerprint: 1b:31:b0:71:40:36:cc:14:36:91:ad:c4:3e:fd:ec:18 -# SHA1 Fingerprint: af:e5:d2:44:a8:d1:19:42:30:ff:47:9f:e2:f8:97:bb:cd:7a:8c:b4 -# SHA256 Fingerprint: 52:f0:e1:c4:e5:8e:c6:29:29:1b:60:31:7f:07:46:71:b8:5d:7e:a8:0d:5b:07:27:34:63:53:4b:32:b4:02:34 ------BEGIN CERTIFICATE----- -MIIF2DCCA8CgAwIBAgIQTKr5yttjb+Af907YWwOGnTANBgkqhkiG9w0BAQwFADCB -hTELMAkGA1UEBhMCR0IxGzAZBgNVBAgTEkdyZWF0ZXIgTWFuY2hlc3RlcjEQMA4G -A1UEBxMHU2FsZm9yZDEaMBgGA1UEChMRQ09NT0RPIENBIExpbWl0ZWQxKzApBgNV -BAMTIkNPTU9ETyBSU0EgQ2VydGlmaWNhdGlvbiBBdXRob3JpdHkwHhcNMTAwMTE5 -MDAwMDAwWhcNMzgwMTE4MjM1OTU5WjCBhTELMAkGA1UEBhMCR0IxGzAZBgNVBAgT -EkdyZWF0ZXIgTWFuY2hlc3RlcjEQMA4GA1UEBxMHU2FsZm9yZDEaMBgGA1UEChMR -Q09NT0RPIENBIExpbWl0ZWQxKzApBgNVBAMTIkNPTU9ETyBSU0EgQ2VydGlmaWNh -dGlvbiBBdXRob3JpdHkwggIiMA0GCSqGSIb3DQEBAQUAA4ICDwAwggIKAoICAQCR -6FSS0gpWsawNJN3Fz0RndJkrN6N9I3AAcbxT38T6KhKPS38QVr2fcHK3YX/JSw8X -pz3jsARh7v8Rl8f0hj4K+j5c+ZPmNHrZFGvnnLOFoIJ6dq9xkNfs/Q36nGz637CC -9BR++b7Epi9Pf5l/tfxnQ3K9DADWietrLNPtj5gcFKt+5eNu/Nio5JIk2kNrYrhV -/erBvGy2i/MOjZrkm2xpmfh4SDBF1a3hDTxFYPwyllEnvGfDyi62a+pGx8cgoLEf -Zd5ICLqkTqnyg0Y3hOvozIFIQ2dOciqbXL1MGyiKXCJ7tKuY2e7gUYPDCUZObT6Z -+pUX2nwzV0E8jVHtC7ZcryxjGt9XyD+86V3Em69FmeKjWiS0uqlWPc9vqv9JWL7w -qP/0uK3pN/u6uPQLOvnoQ0IeidiEyxPx2bvhiWC4jChWrBQdnArncevPDt09qZah -SL0896+1DSJMwBGB7FY79tOi4lu3sgQiUpWAk2nojkxl8ZEDLXB0AuqLZxUpaVIC -u9ffUGpVRr+goyhhf3DQw6KqLCGqR84onAZFdr+CGCe01a60y1Dma/RMhnEw6abf -Fobg2P9A3fvQQoh/ozM6LlweQRGBY84YcWsr7KaKtzFcOmpH4MN5WdYgGq/yapiq -crxXStJLnbsQ/LBMQeXtHT1eKJ2czL+zUdqnR+WEUwIDAQABo0IwQDAdBgNVHQ4E -FgQUu69+Aj36pvE8hI6t7jiY7NkyMtQwDgYDVR0PAQH/BAQDAgEGMA8GA1UdEwEB -/wQFMAMBAf8wDQYJKoZIhvcNAQEMBQADggIBAArx1UaEt65Ru2yyTUEUAJNMnMvl -wFTPoCWOAvn9sKIN9SCYPBMtrFaisNZ+EZLpLrqeLppysb0ZRGxhNaKatBYSaVqM -4dc+pBroLwP0rmEdEBsqpIt6xf4FpuHA1sj+nq6PK7o9mfjYcwlYRm6mnPTXJ9OV -2jeDchzTc+CiR5kDOF3VSXkAKRzH7JsgHAckaVd4sjn8OoSgtZx8jb8uk2Intzna -FxiuvTwJaP+EmzzV1gsD41eeFPfR60/IvYcjt7ZJQ3mFXLrrkguhxuhoqEwWsRqZ -CuhTLJK7oQkYdQxlqHvLI7cawiiFwxv/0Cti76R7CZGYZ4wUAc1oBmpjIXUDgIiK -boHGhfKppC3n9KUkEEeDys30jXlYsQab5xoq2Z0B15R97QNKyvDb6KkBPvVWmcke -jkk9u+UJueBPSZI9FoJAzMxZxuY67RIuaTxslbH9qh17f4a+Hg4yRvv7E491f0yL -S0Zj/gA0QHDBw7mh3aZw4gSzQbzpgJHqZJx64SIDqZxubw5lT2yHh17zbqD5daWb -QOhTsiedSrnAdyGN/4fy3ryM7xfft0kL0fJuMAsaDk527RH89elWsn2/x20Kk4yl -0MC2Hb46TpSi125sC8KKfPog88Tk5c0NqMuRkrF8hey1FGlmDoLnzc7ILaZRfyHB -NVOFBkpdn627G190 ------END CERTIFICATE----- - -# Issuer: CN=USERTrust RSA Certification Authority O=The USERTRUST Network -# Subject: CN=USERTrust RSA Certification Authority O=The USERTRUST Network -# Label: "USERTrust RSA Certification Authority" -# Serial: 2645093764781058787591871645665788717 -# MD5 Fingerprint: 1b:fe:69:d1:91:b7:19:33:a3:72:a8:0f:e1:55:e5:b5 -# SHA1 Fingerprint: 2b:8f:1b:57:33:0d:bb:a2:d0:7a:6c:51:f7:0e:e9:0d:da:b9:ad:8e -# SHA256 Fingerprint: e7:93:c9:b0:2f:d8:aa:13:e2:1c:31:22:8a:cc:b0:81:19:64:3b:74:9c:89:89:64:b1:74:6d:46:c3:d4:cb:d2 ------BEGIN CERTIFICATE----- -MIIF3jCCA8agAwIBAgIQAf1tMPyjylGoG7xkDjUDLTANBgkqhkiG9w0BAQwFADCB -iDELMAkGA1UEBhMCVVMxEzARBgNVBAgTCk5ldyBKZXJzZXkxFDASBgNVBAcTC0pl -cnNleSBDaXR5MR4wHAYDVQQKExVUaGUgVVNFUlRSVVNUIE5ldHdvcmsxLjAsBgNV -BAMTJVVTRVJUcnVzdCBSU0EgQ2VydGlmaWNhdGlvbiBBdXRob3JpdHkwHhcNMTAw -MjAxMDAwMDAwWhcNMzgwMTE4MjM1OTU5WjCBiDELMAkGA1UEBhMCVVMxEzARBgNV -BAgTCk5ldyBKZXJzZXkxFDASBgNVBAcTC0plcnNleSBDaXR5MR4wHAYDVQQKExVU -aGUgVVNFUlRSVVNUIE5ldHdvcmsxLjAsBgNVBAMTJVVTRVJUcnVzdCBSU0EgQ2Vy -dGlmaWNhdGlvbiBBdXRob3JpdHkwggIiMA0GCSqGSIb3DQEBAQUAA4ICDwAwggIK -AoICAQCAEmUXNg7D2wiz0KxXDXbtzSfTTK1Qg2HiqiBNCS1kCdzOiZ/MPans9s/B -3PHTsdZ7NygRK0faOca8Ohm0X6a9fZ2jY0K2dvKpOyuR+OJv0OwWIJAJPuLodMkY -tJHUYmTbf6MG8YgYapAiPLz+E/CHFHv25B+O1ORRxhFnRghRy4YUVD+8M/5+bJz/ -Fp0YvVGONaanZshyZ9shZrHUm3gDwFA66Mzw3LyeTP6vBZY1H1dat//O+T23LLb2 -VN3I5xI6Ta5MirdcmrS3ID3KfyI0rn47aGYBROcBTkZTmzNg95S+UzeQc0PzMsNT -79uq/nROacdrjGCT3sTHDN/hMq7MkztReJVni+49Vv4M0GkPGw/zJSZrM233bkf6 -c0Plfg6lZrEpfDKEY1WJxA3Bk1QwGROs0303p+tdOmw1XNtB1xLaqUkL39iAigmT -Yo61Zs8liM2EuLE/pDkP2QKe6xJMlXzzawWpXhaDzLhn4ugTncxbgtNMs+1b/97l -c6wjOy0AvzVVdAlJ2ElYGn+SNuZRkg7zJn0cTRe8yexDJtC/QV9AqURE9JnnV4ee -UB9XVKg+/XRjL7FQZQnmWEIuQxpMtPAlR1n6BB6T1CZGSlCBst6+eLf8ZxXhyVeE -Hg9j1uliutZfVS7qXMYoCAQlObgOK6nyTJccBz8NUvXt7y+CDwIDAQABo0IwQDAd -BgNVHQ4EFgQUU3m/WqorSs9UgOHYm8Cd8rIDZsswDgYDVR0PAQH/BAQDAgEGMA8G -A1UdEwEB/wQFMAMBAf8wDQYJKoZIhvcNAQEMBQADggIBAFzUfA3P9wF9QZllDHPF -Up/L+M+ZBn8b2kMVn54CVVeWFPFSPCeHlCjtHzoBN6J2/FNQwISbxmtOuowhT6KO -VWKR82kV2LyI48SqC/3vqOlLVSoGIG1VeCkZ7l8wXEskEVX/JJpuXior7gtNn3/3 -ATiUFJVDBwn7YKnuHKsSjKCaXqeYalltiz8I+8jRRa8YFWSQEg9zKC7F4iRO/Fjs -8PRF/iKz6y+O0tlFYQXBl2+odnKPi4w2r78NBc5xjeambx9spnFixdjQg3IM8WcR -iQycE0xyNN+81XHfqnHd4blsjDwSXWXavVcStkNr/+XeTWYRUc+ZruwXtuhxkYze -Sf7dNXGiFSeUHM9h4ya7b6NnJSFd5t0dCy5oGzuCr+yDZ4XUmFF0sbmZgIn/f3gZ -XHlKYC6SQK5MNyosycdiyA5d9zZbyuAlJQG03RoHnHcAP9Dc1ew91Pq7P8yF1m9/ -qS3fuQL39ZeatTXaw2ewh0qpKJ4jjv9cJ2vhsE/zB+4ALtRZh8tSQZXq9EfX7mRB -VXyNWQKV3WKdwrnuWih0hKWbt5DHDAff9Yk2dDLWKMGwsAvgnEzDHNb842m1R0aB -L6KCq9NjRHDEjf8tM7qtj3u1cIiuPhnPQCjY/MiQu12ZIvVS5ljFH4gxQ+6IHdfG -jjxDah2nGN59PRbxYvnKkKj9 ------END CERTIFICATE----- - -# Issuer: CN=USERTrust ECC Certification Authority O=The USERTRUST Network -# Subject: CN=USERTrust ECC Certification Authority O=The USERTRUST Network -# Label: "USERTrust ECC Certification Authority" -# Serial: 123013823720199481456569720443997572134 -# MD5 Fingerprint: fa:68:bc:d9:b5:7f:ad:fd:c9:1d:06:83:28:cc:24:c1 -# SHA1 Fingerprint: d1:cb:ca:5d:b2:d5:2a:7f:69:3b:67:4d:e5:f0:5a:1d:0c:95:7d:f0 -# SHA256 Fingerprint: 4f:f4:60:d5:4b:9c:86:da:bf:bc:fc:57:12:e0:40:0d:2b:ed:3f:bc:4d:4f:bd:aa:86:e0:6a:dc:d2:a9:ad:7a ------BEGIN CERTIFICATE----- -MIICjzCCAhWgAwIBAgIQXIuZxVqUxdJxVt7NiYDMJjAKBggqhkjOPQQDAzCBiDEL -MAkGA1UEBhMCVVMxEzARBgNVBAgTCk5ldyBKZXJzZXkxFDASBgNVBAcTC0plcnNl -eSBDaXR5MR4wHAYDVQQKExVUaGUgVVNFUlRSVVNUIE5ldHdvcmsxLjAsBgNVBAMT -JVVTRVJUcnVzdCBFQ0MgQ2VydGlmaWNhdGlvbiBBdXRob3JpdHkwHhcNMTAwMjAx -MDAwMDAwWhcNMzgwMTE4MjM1OTU5WjCBiDELMAkGA1UEBhMCVVMxEzARBgNVBAgT -Ck5ldyBKZXJzZXkxFDASBgNVBAcTC0plcnNleSBDaXR5MR4wHAYDVQQKExVUaGUg -VVNFUlRSVVNUIE5ldHdvcmsxLjAsBgNVBAMTJVVTRVJUcnVzdCBFQ0MgQ2VydGlm -aWNhdGlvbiBBdXRob3JpdHkwdjAQBgcqhkjOPQIBBgUrgQQAIgNiAAQarFRaqflo -I+d61SRvU8Za2EurxtW20eZzca7dnNYMYf3boIkDuAUU7FfO7l0/4iGzzvfUinng -o4N+LZfQYcTxmdwlkWOrfzCjtHDix6EznPO/LlxTsV+zfTJ/ijTjeXmjQjBAMB0G -A1UdDgQWBBQ64QmG1M8ZwpZ2dEl23OA1xmNjmjAOBgNVHQ8BAf8EBAMCAQYwDwYD -VR0TAQH/BAUwAwEB/zAKBggqhkjOPQQDAwNoADBlAjA2Z6EWCNzklwBBHU6+4WMB -zzuqQhFkoJ2UOQIReVx7Hfpkue4WQrO/isIJxOzksU0CMQDpKmFHjFJKS04YcPbW -RNZu9YO6bVi9JNlWSOrvxKJGgYhqOkbRqZtNyWHa0V1Xahg= ------END CERTIFICATE----- - -# Issuer: CN=GlobalSign O=GlobalSign OU=GlobalSign ECC Root CA - R5 -# Subject: CN=GlobalSign O=GlobalSign OU=GlobalSign ECC Root CA - R5 -# Label: "GlobalSign ECC Root CA - R5" -# Serial: 32785792099990507226680698011560947931244 -# MD5 Fingerprint: 9f:ad:3b:1c:02:1e:8a:ba:17:74:38:81:0c:a2:bc:08 -# SHA1 Fingerprint: 1f:24:c6:30:cd:a4:18:ef:20:69:ff:ad:4f:dd:5f:46:3a:1b:69:aa -# SHA256 Fingerprint: 17:9f:bc:14:8a:3d:d0:0f:d2:4e:a1:34:58:cc:43:bf:a7:f5:9c:81:82:d7:83:a5:13:f6:eb:ec:10:0c:89:24 ------BEGIN CERTIFICATE----- -MIICHjCCAaSgAwIBAgIRYFlJ4CYuu1X5CneKcflK2GwwCgYIKoZIzj0EAwMwUDEk -MCIGA1UECxMbR2xvYmFsU2lnbiBFQ0MgUm9vdCBDQSAtIFI1MRMwEQYDVQQKEwpH -bG9iYWxTaWduMRMwEQYDVQQDEwpHbG9iYWxTaWduMB4XDTEyMTExMzAwMDAwMFoX -DTM4MDExOTAzMTQwN1owUDEkMCIGA1UECxMbR2xvYmFsU2lnbiBFQ0MgUm9vdCBD -QSAtIFI1MRMwEQYDVQQKEwpHbG9iYWxTaWduMRMwEQYDVQQDEwpHbG9iYWxTaWdu -MHYwEAYHKoZIzj0CAQYFK4EEACIDYgAER0UOlvt9Xb/pOdEh+J8LttV7HpI6SFkc -8GIxLcB6KP4ap1yztsyX50XUWPrRd21DosCHZTQKH3rd6zwzocWdTaRvQZU4f8ke -hOvRnkmSh5SHDDqFSmafnVmTTZdhBoZKo0IwQDAOBgNVHQ8BAf8EBAMCAQYwDwYD -VR0TAQH/BAUwAwEB/zAdBgNVHQ4EFgQUPeYpSJvqB8ohREom3m7e0oPQn1kwCgYI -KoZIzj0EAwMDaAAwZQIxAOVpEslu28YxuglB4Zf4+/2a4n0Sye18ZNPLBSWLVtmg -515dTguDnFt2KaAJJiFqYgIwcdK1j1zqO+F4CYWodZI7yFz9SO8NdCKoCOJuxUnO -xwy8p2Fp8fc74SrL+SvzZpA3 ------END CERTIFICATE----- - -# Issuer: CN=IdenTrust Commercial Root CA 1 O=IdenTrust -# Subject: CN=IdenTrust Commercial Root CA 1 O=IdenTrust -# Label: "IdenTrust Commercial Root CA 1" -# Serial: 13298821034946342390520003877796839426 -# MD5 Fingerprint: b3:3e:77:73:75:ee:a0:d3:e3:7e:49:63:49:59:bb:c7 -# SHA1 Fingerprint: df:71:7e:aa:4a:d9:4e:c9:55:84:99:60:2d:48:de:5f:bc:f0:3a:25 -# SHA256 Fingerprint: 5d:56:49:9b:e4:d2:e0:8b:cf:ca:d0:8a:3e:38:72:3d:50:50:3b:de:70:69:48:e4:2f:55:60:30:19:e5:28:ae ------BEGIN CERTIFICATE----- -MIIFYDCCA0igAwIBAgIQCgFCgAAAAUUjyES1AAAAAjANBgkqhkiG9w0BAQsFADBK -MQswCQYDVQQGEwJVUzESMBAGA1UEChMJSWRlblRydXN0MScwJQYDVQQDEx5JZGVu -VHJ1c3QgQ29tbWVyY2lhbCBSb290IENBIDEwHhcNMTQwMTE2MTgxMjIzWhcNMzQw -MTE2MTgxMjIzWjBKMQswCQYDVQQGEwJVUzESMBAGA1UEChMJSWRlblRydXN0MScw -JQYDVQQDEx5JZGVuVHJ1c3QgQ29tbWVyY2lhbCBSb290IENBIDEwggIiMA0GCSqG -SIb3DQEBAQUAA4ICDwAwggIKAoICAQCnUBneP5k91DNG8W9RYYKyqU+PZ4ldhNlT -3Qwo2dfw/66VQ3KZ+bVdfIrBQuExUHTRgQ18zZshq0PirK1ehm7zCYofWjK9ouuU -+ehcCuz/mNKvcbO0U59Oh++SvL3sTzIwiEsXXlfEU8L2ApeN2WIrvyQfYo3fw7gp -S0l4PJNgiCL8mdo2yMKi1CxUAGc1bnO/AljwpN3lsKImesrgNqUZFvX9t++uP0D1 -bVoE/c40yiTcdCMbXTMTEl3EASX2MN0CXZ/g1Ue9tOsbobtJSdifWwLziuQkkORi -T0/Br4sOdBeo0XKIanoBScy0RnnGF7HamB4HWfp1IYVl3ZBWzvurpWCdxJ35UrCL -vYf5jysjCiN2O/cz4ckA82n5S6LgTrx+kzmEB/dEcH7+B1rlsazRGMzyNeVJSQjK -Vsk9+w8YfYs7wRPCTY/JTw436R+hDmrfYi7LNQZReSzIJTj0+kuniVyc0uMNOYZK -dHzVWYfCP04MXFL0PfdSgvHqo6z9STQaKPNBiDoT7uje/5kdX7rL6B7yuVBgwDHT -c+XvvqDtMwt0viAgxGds8AgDelWAf0ZOlqf0Hj7h9tgJ4TNkK2PXMl6f+cB7D3hv -l7yTmvmcEpB4eoCHFddydJxVdHixuuFucAS6T6C6aMN7/zHwcz09lCqxC0EOoP5N -iGVreTO01wIDAQABo0IwQDAOBgNVHQ8BAf8EBAMCAQYwDwYDVR0TAQH/BAUwAwEB -/zAdBgNVHQ4EFgQU7UQZwNPwBovupHu+QucmVMiONnYwDQYJKoZIhvcNAQELBQAD -ggIBAA2ukDL2pkt8RHYZYR4nKM1eVO8lvOMIkPkp165oCOGUAFjvLi5+U1KMtlwH -6oi6mYtQlNeCgN9hCQCTrQ0U5s7B8jeUeLBfnLOic7iPBZM4zY0+sLj7wM+x8uwt -LRvM7Kqas6pgghstO8OEPVeKlh6cdbjTMM1gCIOQ045U8U1mwF10A0Cj7oV+wh93 -nAbowacYXVKV7cndJZ5t+qntozo00Fl72u1Q8zW/7esUTTHHYPTa8Yec4kjixsU3 -+wYQ+nVZZjFHKdp2mhzpgq7vmrlR94gjmmmVYjzlVYA211QC//G5Xc7UI2/YRYRK -W2XviQzdFKcgyxilJbQN+QHwotL0AMh0jqEqSI5l2xPE4iUXfeu+h1sXIFRRk0pT -AwvsXcoz7WL9RccvW9xYoIA55vrX/hMUpu09lEpCdNTDd1lzzY9GvlU47/rokTLq -l1gEIt44w8y8bckzOmoKaT+gyOpyj4xjhiO9bTyWnpXgSUyqorkqG5w2gXjtw+hG -4iZZRHUe2XWJUc0QhJ1hYMtd+ZciTY6Y5uN/9lu7rs3KSoFrXgvzUeF0K+l+J6fZ -mUlO+KWA2yUPHGNiiskzZ2s8EIPGrd6ozRaOjfAHN3Gf8qv8QfXBi+wAN10J5U6A -7/qxXDgGpRtK4dw4LTzcqx+QGtVKnO7RcGzM7vRX+Bi6hG6H ------END CERTIFICATE----- - -# Issuer: CN=IdenTrust Public Sector Root CA 1 O=IdenTrust -# Subject: CN=IdenTrust Public Sector Root CA 1 O=IdenTrust -# Label: "IdenTrust Public Sector Root CA 1" -# Serial: 13298821034946342390521976156843933698 -# MD5 Fingerprint: 37:06:a5:b0:fc:89:9d:ba:f4:6b:8c:1a:64:cd:d5:ba -# SHA1 Fingerprint: ba:29:41:60:77:98:3f:f4:f3:ef:f2:31:05:3b:2e:ea:6d:4d:45:fd -# SHA256 Fingerprint: 30:d0:89:5a:9a:44:8a:26:20:91:63:55:22:d1:f5:20:10:b5:86:7a:ca:e1:2c:78:ef:95:8f:d4:f4:38:9f:2f ------BEGIN CERTIFICATE----- -MIIFZjCCA06gAwIBAgIQCgFCgAAAAUUjz0Z8AAAAAjANBgkqhkiG9w0BAQsFADBN -MQswCQYDVQQGEwJVUzESMBAGA1UEChMJSWRlblRydXN0MSowKAYDVQQDEyFJZGVu -VHJ1c3QgUHVibGljIFNlY3RvciBSb290IENBIDEwHhcNMTQwMTE2MTc1MzMyWhcN -MzQwMTE2MTc1MzMyWjBNMQswCQYDVQQGEwJVUzESMBAGA1UEChMJSWRlblRydXN0 -MSowKAYDVQQDEyFJZGVuVHJ1c3QgUHVibGljIFNlY3RvciBSb290IENBIDEwggIi -MA0GCSqGSIb3DQEBAQUAA4ICDwAwggIKAoICAQC2IpT8pEiv6EdrCvsnduTyP4o7 -ekosMSqMjbCpwzFrqHd2hCa2rIFCDQjrVVi7evi8ZX3yoG2LqEfpYnYeEe4IFNGy -RBb06tD6Hi9e28tzQa68ALBKK0CyrOE7S8ItneShm+waOh7wCLPQ5CQ1B5+ctMlS -bdsHyo+1W/CD80/HLaXIrcuVIKQxKFdYWuSNG5qrng0M8gozOSI5Cpcu81N3uURF -/YTLNiCBWS2ab21ISGHKTN9T0a9SvESfqy9rg3LvdYDaBjMbXcjaY8ZNzaxmMc3R -3j6HEDbhuaR672BQssvKplbgN6+rNBM5Jeg5ZuSYeqoSmJxZZoY+rfGwyj4GD3vw -EUs3oERte8uojHH01bWRNszwFcYr3lEXsZdMUD2xlVl8BX0tIdUAvwFnol57plzy -9yLxkA2T26pEUWbMfXYD62qoKjgZl3YNa4ph+bz27nb9cCvdKTz4Ch5bQhyLVi9V -GxyhLrXHFub4qjySjmm2AcG1hp2JDws4lFTo6tyePSW8Uybt1as5qsVATFSrsrTZ -2fjXctscvG29ZV/viDUqZi/u9rNl8DONfJhBaUYPQxxp+pu10GFqzcpL2UyQRqsV -WaFHVCkugyhfHMKiq3IXAAaOReyL4jM9f9oZRORicsPfIsbyVtTdX5Vy7W1f90gD -W/3FKqD2cyOEEBsB5wIDAQABo0IwQDAOBgNVHQ8BAf8EBAMCAQYwDwYDVR0TAQH/ -BAUwAwEB/zAdBgNVHQ4EFgQU43HgntinQtnbcZFrlJPrw6PRFKMwDQYJKoZIhvcN -AQELBQADggIBAEf63QqwEZE4rU1d9+UOl1QZgkiHVIyqZJnYWv6IAcVYpZmxI1Qj -t2odIFflAWJBF9MJ23XLblSQdf4an4EKwt3X9wnQW3IV5B4Jaj0z8yGa5hV+rVHV -DRDtfULAj+7AmgjVQdZcDiFpboBhDhXAuM/FSRJSzL46zNQuOAXeNf0fb7iAaJg9 -TaDKQGXSc3z1i9kKlT/YPyNtGtEqJBnZhbMX73huqVjRI9PHE+1yJX9dsXNw0H8G -lwmEKYBhHfpe/3OsoOOJuBxxFcbeMX8S3OFtm6/n6J91eEyrRjuazr8FGF1NFTwW -mhlQBJqymm9li1JfPFgEKCXAZmExfrngdbkaqIHWchezxQMxNRF4eKLg6TCMf4Df -WN88uieW4oA0beOY02QnrEh+KHdcxiVhJfiFDGX6xDIvpZgF5PgLZxYWxoK4Mhn5 -+bl53B/N66+rDt0b20XkeucC4pVd/GnwU2lhlXV5C15V5jgclKlZM57IcXR5f1GJ -tshquDDIajjDbp7hNxbqBWJMWxJH7ae0s1hWx0nzfxJoCTFx8G34Tkf71oXuxVhA -GaQdp/lLQzfcaFpPz+vCZHTetBXZ9FRUGi8c15dxVJCO2SCdUyt/q4/i6jC8UDfv -8Ue1fXwsBOxonbRJRBD0ckscZOf85muQ3Wl9af0AVqW3rLatt8o+Ae+c ------END CERTIFICATE----- - -# Issuer: CN=Entrust Root Certification Authority - G2 O=Entrust, Inc. OU=See www.entrust.net/legal-terms/(c) 2009 Entrust, Inc. - for authorized use only -# Subject: CN=Entrust Root Certification Authority - G2 O=Entrust, Inc. OU=See www.entrust.net/legal-terms/(c) 2009 Entrust, Inc. - for authorized use only -# Label: "Entrust Root Certification Authority - G2" -# Serial: 1246989352 -# MD5 Fingerprint: 4b:e2:c9:91:96:65:0c:f4:0e:5a:93:92:a0:0a:fe:b2 -# SHA1 Fingerprint: 8c:f4:27:fd:79:0c:3a:d1:66:06:8d:e8:1e:57:ef:bb:93:22:72:d4 -# SHA256 Fingerprint: 43:df:57:74:b0:3e:7f:ef:5f:e4:0d:93:1a:7b:ed:f1:bb:2e:6b:42:73:8c:4e:6d:38:41:10:3d:3a:a7:f3:39 ------BEGIN CERTIFICATE----- -MIIEPjCCAyagAwIBAgIESlOMKDANBgkqhkiG9w0BAQsFADCBvjELMAkGA1UEBhMC -VVMxFjAUBgNVBAoTDUVudHJ1c3QsIEluYy4xKDAmBgNVBAsTH1NlZSB3d3cuZW50 -cnVzdC5uZXQvbGVnYWwtdGVybXMxOTA3BgNVBAsTMChjKSAyMDA5IEVudHJ1c3Qs -IEluYy4gLSBmb3IgYXV0aG9yaXplZCB1c2Ugb25seTEyMDAGA1UEAxMpRW50cnVz -dCBSb290IENlcnRpZmljYXRpb24gQXV0aG9yaXR5IC0gRzIwHhcNMDkwNzA3MTcy -NTU0WhcNMzAxMjA3MTc1NTU0WjCBvjELMAkGA1UEBhMCVVMxFjAUBgNVBAoTDUVu -dHJ1c3QsIEluYy4xKDAmBgNVBAsTH1NlZSB3d3cuZW50cnVzdC5uZXQvbGVnYWwt -dGVybXMxOTA3BgNVBAsTMChjKSAyMDA5IEVudHJ1c3QsIEluYy4gLSBmb3IgYXV0 -aG9yaXplZCB1c2Ugb25seTEyMDAGA1UEAxMpRW50cnVzdCBSb290IENlcnRpZmlj -YXRpb24gQXV0aG9yaXR5IC0gRzIwggEiMA0GCSqGSIb3DQEBAQUAA4IBDwAwggEK -AoIBAQC6hLZy254Ma+KZ6TABp3bqMriVQRrJ2mFOWHLP/vaCeb9zYQYKpSfYs1/T -RU4cctZOMvJyig/3gxnQaoCAAEUesMfnmr8SVycco2gvCoe9amsOXmXzHHfV1IWN -cCG0szLni6LVhjkCsbjSR87kyUnEO6fe+1R9V77w6G7CebI6C1XiUJgWMhNcL3hW -wcKUs/Ja5CeanyTXxuzQmyWC48zCxEXFjJd6BmsqEZ+pCm5IO2/b1BEZQvePB7/1 -U1+cPvQXLOZprE4yTGJ36rfo5bs0vBmLrpxR57d+tVOxMyLlbc9wPBr64ptntoP0 -jaWvYkxN4FisZDQSA/i2jZRjJKRxAgMBAAGjQjBAMA4GA1UdDwEB/wQEAwIBBjAP -BgNVHRMBAf8EBTADAQH/MB0GA1UdDgQWBBRqciZ60B7vfec7aVHUbI2fkBJmqzAN -BgkqhkiG9w0BAQsFAAOCAQEAeZ8dlsa2eT8ijYfThwMEYGprmi5ZiXMRrEPR9RP/ -jTkrwPK9T3CMqS/qF8QLVJ7UG5aYMzyorWKiAHarWWluBh1+xLlEjZivEtRh2woZ -Rkfz6/djwUAFQKXSt/S1mja/qYh2iARVBCuch38aNzx+LaUa2NSJXsq9rD1s2G2v -1fN2D807iDginWyTmsQ9v4IbZT+mD12q/OWyFcq1rca8PdCE6OoGcrBNOTJ4vz4R -nAuknZoh8/CbCzB428Hch0P+vGOaysXCHMnHjf87ElgI5rY97HosTvuDls4MPGmH -VHOkc8KT/1EQrBVUAdj8BbGJoX90g5pJ19xOe4pIb4tF9g== ------END CERTIFICATE----- - -# Issuer: CN=Entrust Root Certification Authority - EC1 O=Entrust, Inc. OU=See www.entrust.net/legal-terms/(c) 2012 Entrust, Inc. - for authorized use only -# Subject: CN=Entrust Root Certification Authority - EC1 O=Entrust, Inc. OU=See www.entrust.net/legal-terms/(c) 2012 Entrust, Inc. - for authorized use only -# Label: "Entrust Root Certification Authority - EC1" -# Serial: 51543124481930649114116133369 -# MD5 Fingerprint: b6:7e:1d:f0:58:c5:49:6c:24:3b:3d:ed:98:18:ed:bc -# SHA1 Fingerprint: 20:d8:06:40:df:9b:25:f5:12:25:3a:11:ea:f7:59:8a:eb:14:b5:47 -# SHA256 Fingerprint: 02:ed:0e:b2:8c:14:da:45:16:5c:56:67:91:70:0d:64:51:d7:fb:56:f0:b2:ab:1d:3b:8e:b0:70:e5:6e:df:f5 ------BEGIN CERTIFICATE----- -MIIC+TCCAoCgAwIBAgINAKaLeSkAAAAAUNCR+TAKBggqhkjOPQQDAzCBvzELMAkG -A1UEBhMCVVMxFjAUBgNVBAoTDUVudHJ1c3QsIEluYy4xKDAmBgNVBAsTH1NlZSB3 -d3cuZW50cnVzdC5uZXQvbGVnYWwtdGVybXMxOTA3BgNVBAsTMChjKSAyMDEyIEVu -dHJ1c3QsIEluYy4gLSBmb3IgYXV0aG9yaXplZCB1c2Ugb25seTEzMDEGA1UEAxMq -RW50cnVzdCBSb290IENlcnRpZmljYXRpb24gQXV0aG9yaXR5IC0gRUMxMB4XDTEy -MTIxODE1MjUzNloXDTM3MTIxODE1NTUzNlowgb8xCzAJBgNVBAYTAlVTMRYwFAYD -VQQKEw1FbnRydXN0LCBJbmMuMSgwJgYDVQQLEx9TZWUgd3d3LmVudHJ1c3QubmV0 -L2xlZ2FsLXRlcm1zMTkwNwYDVQQLEzAoYykgMjAxMiBFbnRydXN0LCBJbmMuIC0g -Zm9yIGF1dGhvcml6ZWQgdXNlIG9ubHkxMzAxBgNVBAMTKkVudHJ1c3QgUm9vdCBD -ZXJ0aWZpY2F0aW9uIEF1dGhvcml0eSAtIEVDMTB2MBAGByqGSM49AgEGBSuBBAAi -A2IABIQTydC6bUF74mzQ61VfZgIaJPRbiWlH47jCffHyAsWfoPZb1YsGGYZPUxBt -ByQnoaD41UcZYUx9ypMn6nQM72+WCf5j7HBdNq1nd67JnXxVRDqiY1Ef9eNi1KlH -Bz7MIKNCMEAwDgYDVR0PAQH/BAQDAgEGMA8GA1UdEwEB/wQFMAMBAf8wHQYDVR0O -BBYEFLdj5xrdjekIplWDpOBqUEFlEUJJMAoGCCqGSM49BAMDA2cAMGQCMGF52OVC -R98crlOZF7ZvHH3hvxGU0QOIdeSNiaSKd0bebWHvAvX7td/M/k7//qnmpwIwW5nX -hTcGtXsI/esni0qU+eH6p44mCOh8kmhtc9hvJqwhAriZtyZBWyVgrtBIGu4G ------END CERTIFICATE----- - -# Issuer: CN=CFCA EV ROOT O=China Financial Certification Authority -# Subject: CN=CFCA EV ROOT O=China Financial Certification Authority -# Label: "CFCA EV ROOT" -# Serial: 407555286 -# MD5 Fingerprint: 74:e1:b6:ed:26:7a:7a:44:30:33:94:ab:7b:27:81:30 -# SHA1 Fingerprint: e2:b8:29:4b:55:84:ab:6b:58:c2:90:46:6c:ac:3f:b8:39:8f:84:83 -# SHA256 Fingerprint: 5c:c3:d7:8e:4e:1d:5e:45:54:7a:04:e6:87:3e:64:f9:0c:f9:53:6d:1c:cc:2e:f8:00:f3:55:c4:c5:fd:70:fd ------BEGIN CERTIFICATE----- -MIIFjTCCA3WgAwIBAgIEGErM1jANBgkqhkiG9w0BAQsFADBWMQswCQYDVQQGEwJD -TjEwMC4GA1UECgwnQ2hpbmEgRmluYW5jaWFsIENlcnRpZmljYXRpb24gQXV0aG9y -aXR5MRUwEwYDVQQDDAxDRkNBIEVWIFJPT1QwHhcNMTIwODA4MDMwNzAxWhcNMjkx -MjMxMDMwNzAxWjBWMQswCQYDVQQGEwJDTjEwMC4GA1UECgwnQ2hpbmEgRmluYW5j -aWFsIENlcnRpZmljYXRpb24gQXV0aG9yaXR5MRUwEwYDVQQDDAxDRkNBIEVWIFJP -T1QwggIiMA0GCSqGSIb3DQEBAQUAA4ICDwAwggIKAoICAQDXXWvNED8fBVnVBU03 -sQ7smCuOFR36k0sXgiFxEFLXUWRwFsJVaU2OFW2fvwwbwuCjZ9YMrM8irq93VCpL -TIpTUnrD7i7es3ElweldPe6hL6P3KjzJIx1qqx2hp/Hz7KDVRM8Vz3IvHWOX6Jn5 -/ZOkVIBMUtRSqy5J35DNuF++P96hyk0g1CXohClTt7GIH//62pCfCqktQT+x8Rgp -7hZZLDRJGqgG16iI0gNyejLi6mhNbiyWZXvKWfry4t3uMCz7zEasxGPrb382KzRz -EpR/38wmnvFyXVBlWY9ps4deMm/DGIq1lY+wejfeWkU7xzbh72fROdOXW3NiGUgt -hxwG+3SYIElz8AXSG7Ggo7cbcNOIabla1jj0Ytwli3i/+Oh+uFzJlU9fpy25IGvP -a931DfSCt/SyZi4QKPaXWnuWFo8BGS1sbn85WAZkgwGDg8NNkt0yxoekN+kWzqot -aK8KgWU6cMGbrU1tVMoqLUuFG7OA5nBFDWteNfB/O7ic5ARwiRIlk9oKmSJgamNg -TnYGmE69g60dWIolhdLHZR4tjsbftsbhf4oEIRUpdPA+nJCdDC7xij5aqgwJHsfV -PKPtl8MeNPo4+QgO48BdK4PRVmrJtqhUUy54Mmc9gn900PvhtgVguXDbjgv5E1hv -cWAQUhC5wUEJ73IfZzF4/5YFjQIDAQABo2MwYTAfBgNVHSMEGDAWgBTj/i39KNAL -tbq2osS/BqoFjJP7LzAPBgNVHRMBAf8EBTADAQH/MA4GA1UdDwEB/wQEAwIBBjAd -BgNVHQ4EFgQU4/4t/SjQC7W6tqLEvwaqBYyT+y8wDQYJKoZIhvcNAQELBQADggIB -ACXGumvrh8vegjmWPfBEp2uEcwPenStPuiB/vHiyz5ewG5zz13ku9Ui20vsXiObT -ej/tUxPQ4i9qecsAIyjmHjdXNYmEwnZPNDatZ8POQQaIxffu2Bq41gt/UP+TqhdL -jOztUmCypAbqTuv0axn96/Ua4CUqmtzHQTb3yHQFhDmVOdYLO6Qn+gjYXB74BGBS -ESgoA//vU2YApUo0FmZ8/Qmkrp5nGm9BC2sGE5uPhnEFtC+NiWYzKXZUmhH4J/qy -P5Hgzg0b8zAarb8iXRvTvyUFTeGSGn+ZnzxEk8rUQElsgIfXBDrDMlI1Dlb4pd19 -xIsNER9Tyx6yF7Zod1rg1MvIB671Oi6ON7fQAUtDKXeMOZePglr4UeWJoBjnaH9d -Ci77o0cOPaYjesYBx4/IXr9tgFa+iiS6M+qf4TIRnvHST4D2G0CvOJ4RUHlzEhLN -5mydLIhyPDCBBpEi6lmt2hkuIsKNuYyH4Ga8cyNfIWRjgEj1oDwYPZTISEEdQLpe -/v5WOaHIz16eGWRGENoXkbcFgKyLmZJ956LYBws2J+dIeWCKw9cTXPhyQN9Ky8+Z -AAoACxGV2lZFA4gKn2fQ1XmxqI1AbQ3CekD6819kR5LLU7m7Wc5P/dAVUwHY3+vZ -5nbv0CO7O6l5s9UCKc2Jo5YPSjXnTkLAdc0Hz+Ys63su ------END CERTIFICATE----- - -# Issuer: CN=OISTE WISeKey Global Root GB CA O=WISeKey OU=OISTE Foundation Endorsed -# Subject: CN=OISTE WISeKey Global Root GB CA O=WISeKey OU=OISTE Foundation Endorsed -# Label: "OISTE WISeKey Global Root GB CA" -# Serial: 157768595616588414422159278966750757568 -# MD5 Fingerprint: a4:eb:b9:61:28:2e:b7:2f:98:b0:35:26:90:99:51:1d -# SHA1 Fingerprint: 0f:f9:40:76:18:d3:d7:6a:4b:98:f0:a8:35:9e:0c:fd:27:ac:cc:ed -# SHA256 Fingerprint: 6b:9c:08:e8:6e:b0:f7:67:cf:ad:65:cd:98:b6:21:49:e5:49:4a:67:f5:84:5e:7b:d1:ed:01:9f:27:b8:6b:d6 ------BEGIN CERTIFICATE----- -MIIDtTCCAp2gAwIBAgIQdrEgUnTwhYdGs/gjGvbCwDANBgkqhkiG9w0BAQsFADBt -MQswCQYDVQQGEwJDSDEQMA4GA1UEChMHV0lTZUtleTEiMCAGA1UECxMZT0lTVEUg -Rm91bmRhdGlvbiBFbmRvcnNlZDEoMCYGA1UEAxMfT0lTVEUgV0lTZUtleSBHbG9i -YWwgUm9vdCBHQiBDQTAeFw0xNDEyMDExNTAwMzJaFw0zOTEyMDExNTEwMzFaMG0x -CzAJBgNVBAYTAkNIMRAwDgYDVQQKEwdXSVNlS2V5MSIwIAYDVQQLExlPSVNURSBG -b3VuZGF0aW9uIEVuZG9yc2VkMSgwJgYDVQQDEx9PSVNURSBXSVNlS2V5IEdsb2Jh -bCBSb290IEdCIENBMIIBIjANBgkqhkiG9w0BAQEFAAOCAQ8AMIIBCgKCAQEA2Be3 -HEokKtaXscriHvt9OO+Y9bI5mE4nuBFde9IllIiCFSZqGzG7qFshISvYD06fWvGx -WuR51jIjK+FTzJlFXHtPrby/h0oLS5daqPZI7H17Dc0hBt+eFf1Biki3IPShehtX -1F1Q/7pn2COZH8g/497/b1t3sWtuuMlk9+HKQUYOKXHQuSP8yYFfTvdv37+ErXNk -u7dCjmn21HYdfp2nuFeKUWdy19SouJVUQHMD9ur06/4oQnc/nSMbsrY9gBQHTC5P -99UKFg29ZkM3fiNDecNAhvVMKdqOmq0NpQSHiB6F4+lT1ZvIiwNjeOvgGUpuuy9r -M2RYk61pv48b74JIxwIDAQABo1EwTzALBgNVHQ8EBAMCAYYwDwYDVR0TAQH/BAUw -AwEB/zAdBgNVHQ4EFgQUNQ/INmNe4qPs+TtmFc5RUuORmj0wEAYJKwYBBAGCNxUB -BAMCAQAwDQYJKoZIhvcNAQELBQADggEBAEBM+4eymYGQfp3FsLAmzYh7KzKNbrgh -cViXfa43FK8+5/ea4n32cZiZBKpDdHij40lhPnOMTZTg+XHEthYOU3gf1qKHLwI5 -gSk8rxWYITD+KJAAjNHhy/peyP34EEY7onhCkRd0VQreUGdNZtGn//3ZwLWoo4rO -ZvUPQ82nK1d7Y0Zqqi5S2PTt4W2tKZB4SLrhI6qjiey1q5bAtEuiHZeeevJuQHHf -aPFlTc58Bd9TZaml8LGXBHAVRgOY1NK/VLSgWH1Sb9pWJmLU2NuJMW8c8CLC02Ic -Nc1MaRVUGpCY3useX8p3x8uOPUNpnJpY0CQ73xtAln41rYHHTnG6iBM= ------END CERTIFICATE----- - -# Issuer: CN=SZAFIR ROOT CA2 O=Krajowa Izba Rozliczeniowa S.A. -# Subject: CN=SZAFIR ROOT CA2 O=Krajowa Izba Rozliczeniowa S.A. -# Label: "SZAFIR ROOT CA2" -# Serial: 357043034767186914217277344587386743377558296292 -# MD5 Fingerprint: 11:64:c1:89:b0:24:b1:8c:b1:07:7e:89:9e:51:9e:99 -# SHA1 Fingerprint: e2:52:fa:95:3f:ed:db:24:60:bd:6e:28:f3:9c:cc:cf:5e:b3:3f:de -# SHA256 Fingerprint: a1:33:9d:33:28:1a:0b:56:e5:57:d3:d3:2b:1c:e7:f9:36:7e:b0:94:bd:5f:a7:2a:7e:50:04:c8:de:d7:ca:fe ------BEGIN CERTIFICATE----- -MIIDcjCCAlqgAwIBAgIUPopdB+xV0jLVt+O2XwHrLdzk1uQwDQYJKoZIhvcNAQEL -BQAwUTELMAkGA1UEBhMCUEwxKDAmBgNVBAoMH0tyYWpvd2EgSXpiYSBSb3psaWN6 -ZW5pb3dhIFMuQS4xGDAWBgNVBAMMD1NaQUZJUiBST09UIENBMjAeFw0xNTEwMTkw -NzQzMzBaFw0zNTEwMTkwNzQzMzBaMFExCzAJBgNVBAYTAlBMMSgwJgYDVQQKDB9L -cmFqb3dhIEl6YmEgUm96bGljemVuaW93YSBTLkEuMRgwFgYDVQQDDA9TWkFGSVIg -Uk9PVCBDQTIwggEiMA0GCSqGSIb3DQEBAQUAA4IBDwAwggEKAoIBAQC3vD5QqEvN -QLXOYeeWyrSh2gwisPq1e3YAd4wLz32ohswmUeQgPYUM1ljj5/QqGJ3a0a4m7utT -3PSQ1hNKDJA8w/Ta0o4NkjrcsbH/ON7Dui1fgLkCvUqdGw+0w8LBZwPd3BucPbOw -3gAeqDRHu5rr/gsUvTaE2g0gv/pby6kWIK05YO4vdbbnl5z5Pv1+TW9NL++IDWr6 -3fE9biCloBK0TXC5ztdyO4mTp4CEHCdJckm1/zuVnsHMyAHs6A6KCpbns6aH5db5 -BSsNl0BwPLqsdVqc1U2dAgrSS5tmS0YHF2Wtn2yIANwiieDhZNRnvDF5YTy7ykHN -XGoAyDw4jlivAgMBAAGjQjBAMA8GA1UdEwEB/wQFMAMBAf8wDgYDVR0PAQH/BAQD -AgEGMB0GA1UdDgQWBBQuFqlKGLXLzPVvUPMjX/hd56zwyDANBgkqhkiG9w0BAQsF -AAOCAQEAtXP4A9xZWx126aMqe5Aosk3AM0+qmrHUuOQn/6mWmc5G4G18TKI4pAZw -8PRBEew/R40/cof5O/2kbytTAOD/OblqBw7rHRz2onKQy4I9EYKL0rufKq8h5mOG -nXkZ7/e7DDWQw4rtTw/1zBLZpD67oPwglV9PJi8RI4NOdQcPv5vRtB3pEAT+ymCP -oky4rc/hkA/NrgrHXXu3UNLUYfrVFdvXn4dRVOul4+vJhaAlIDf7js4MNIThPIGy -d05DpYhfhmehPea0XGG2Ptv+tyjFogeutcrKjSoS75ftwjCkySp6+/NNIxuZMzSg -LvWpCz/UXeHPhJ/iGcJfitYgHuNztw== ------END CERTIFICATE----- - -# Issuer: CN=Certum Trusted Network CA 2 O=Unizeto Technologies S.A. OU=Certum Certification Authority -# Subject: CN=Certum Trusted Network CA 2 O=Unizeto Technologies S.A. OU=Certum Certification Authority -# Label: "Certum Trusted Network CA 2" -# Serial: 44979900017204383099463764357512596969 -# MD5 Fingerprint: 6d:46:9e:d9:25:6d:08:23:5b:5e:74:7d:1e:27:db:f2 -# SHA1 Fingerprint: d3:dd:48:3e:2b:bf:4c:05:e8:af:10:f5:fa:76:26:cf:d3:dc:30:92 -# SHA256 Fingerprint: b6:76:f2:ed:da:e8:77:5c:d3:6c:b0:f6:3c:d1:d4:60:39:61:f4:9e:62:65:ba:01:3a:2f:03:07:b6:d0:b8:04 ------BEGIN CERTIFICATE----- -MIIF0jCCA7qgAwIBAgIQIdbQSk8lD8kyN/yqXhKN6TANBgkqhkiG9w0BAQ0FADCB -gDELMAkGA1UEBhMCUEwxIjAgBgNVBAoTGVVuaXpldG8gVGVjaG5vbG9naWVzIFMu -QS4xJzAlBgNVBAsTHkNlcnR1bSBDZXJ0aWZpY2F0aW9uIEF1dGhvcml0eTEkMCIG -A1UEAxMbQ2VydHVtIFRydXN0ZWQgTmV0d29yayBDQSAyMCIYDzIwMTExMDA2MDgz -OTU2WhgPMjA0NjEwMDYwODM5NTZaMIGAMQswCQYDVQQGEwJQTDEiMCAGA1UEChMZ -VW5pemV0byBUZWNobm9sb2dpZXMgUy5BLjEnMCUGA1UECxMeQ2VydHVtIENlcnRp -ZmljYXRpb24gQXV0aG9yaXR5MSQwIgYDVQQDExtDZXJ0dW0gVHJ1c3RlZCBOZXR3 -b3JrIENBIDIwggIiMA0GCSqGSIb3DQEBAQUAA4ICDwAwggIKAoICAQC9+Xj45tWA -DGSdhhuWZGc/IjoedQF97/tcZ4zJzFxrqZHmuULlIEub2pt7uZld2ZuAS9eEQCsn -0+i6MLs+CRqnSZXvK0AkwpfHp+6bJe+oCgCXhVqqndwpyeI1B+twTUrWwbNWuKFB -OJvR+zF/j+Bf4bE/D44WSWDXBo0Y+aomEKsq09DRZ40bRr5HMNUuctHFY9rnY3lE -fktjJImGLjQ/KUxSiyqnwOKRKIm5wFv5HdnnJ63/mgKXwcZQkpsCLL2puTRZCr+E -Sv/f/rOf69me4Jgj7KZrdxYq28ytOxykh9xGc14ZYmhFV+SQgkK7QtbwYeDBoz1m -o130GO6IyY0XRSmZMnUCMe4pJshrAua1YkV/NxVaI2iJ1D7eTiew8EAMvE0Xy02i -sx7QBlrd9pPPV3WZ9fqGGmd4s7+W/jTcvedSVuWz5XV710GRBdxdaeOVDUO5/IOW -OZV7bIBaTxNyxtd9KXpEulKkKtVBRgkg/iKgtlswjbyJDNXXcPiHUv3a76xRLgez -Tv7QCdpw75j6VuZt27VXS9zlLCUVyJ4ueE742pyehizKV/Ma5ciSixqClnrDvFAS -adgOWkaLOusm+iPJtrCBvkIApPjW/jAux9JG9uWOdf3yzLnQh1vMBhBgu4M1t15n -3kfsmUjxpKEV/q2MYo45VU85FrmxY53/twIDAQABo0IwQDAPBgNVHRMBAf8EBTAD -AQH/MB0GA1UdDgQWBBS2oVQ5AsOgP46KvPrU+Bym0ToO/TAOBgNVHQ8BAf8EBAMC -AQYwDQYJKoZIhvcNAQENBQADggIBAHGlDs7k6b8/ONWJWsQCYftMxRQXLYtPU2sQ -F/xlhMcQSZDe28cmk4gmb3DWAl45oPePq5a1pRNcgRRtDoGCERuKTsZPpd1iHkTf -CVn0W3cLN+mLIMb4Ck4uWBzrM9DPhmDJ2vuAL55MYIR4PSFk1vtBHxgP58l1cb29 -XN40hz5BsA72udY/CROWFC/emh1auVbONTqwX3BNXuMp8SMoclm2q8KMZiYcdywm -djWLKKdpoPk79SPdhRB0yZADVpHnr7pH1BKXESLjokmUbOe3lEu6LaTaM4tMpkT/ -WjzGHWTYtTHkpjx6qFcL2+1hGsvxznN3Y6SHb0xRONbkX8eftoEq5IVIeVheO/jb -AoJnwTnbw3RLPTYe+SmTiGhbqEQZIfCn6IENLOiTNrQ3ssqwGyZ6miUfmpqAnksq -P/ujmv5zMnHCnsZy4YpoJ/HkD7TETKVhk/iXEAcqMCWpuchxuO9ozC1+9eB+D4Ko -b7a6bINDd82Kkhehnlt4Fj1F4jNy3eFmypnTycUm/Q1oBEauttmbjL4ZvrHG8hnj -XALKLNhvSgfZyTXaQHXyxKcZb55CEJh15pWLYLztxRLXis7VmFxWlgPF7ncGNf/P -5O4/E2Hu29othfDNrp2yGAlFw5Khchf8R7agCyzxxN5DaAhqXzvwdmP7zAYspsbi -DrW5viSP ------END CERTIFICATE----- - -# Issuer: CN=Hellenic Academic and Research Institutions RootCA 2015 O=Hellenic Academic and Research Institutions Cert. Authority -# Subject: CN=Hellenic Academic and Research Institutions RootCA 2015 O=Hellenic Academic and Research Institutions Cert. Authority -# Label: "Hellenic Academic and Research Institutions RootCA 2015" -# Serial: 0 -# MD5 Fingerprint: ca:ff:e2:db:03:d9:cb:4b:e9:0f:ad:84:fd:7b:18:ce -# SHA1 Fingerprint: 01:0c:06:95:a6:98:19:14:ff:bf:5f:c6:b0:b6:95:ea:29:e9:12:a6 -# SHA256 Fingerprint: a0:40:92:9a:02:ce:53:b4:ac:f4:f2:ff:c6:98:1c:e4:49:6f:75:5e:6d:45:fe:0b:2a:69:2b:cd:52:52:3f:36 ------BEGIN CERTIFICATE----- -MIIGCzCCA/OgAwIBAgIBADANBgkqhkiG9w0BAQsFADCBpjELMAkGA1UEBhMCR1Ix -DzANBgNVBAcTBkF0aGVuczFEMEIGA1UEChM7SGVsbGVuaWMgQWNhZGVtaWMgYW5k -IFJlc2VhcmNoIEluc3RpdHV0aW9ucyBDZXJ0LiBBdXRob3JpdHkxQDA+BgNVBAMT -N0hlbGxlbmljIEFjYWRlbWljIGFuZCBSZXNlYXJjaCBJbnN0aXR1dGlvbnMgUm9v -dENBIDIwMTUwHhcNMTUwNzA3MTAxMTIxWhcNNDAwNjMwMTAxMTIxWjCBpjELMAkG -A1UEBhMCR1IxDzANBgNVBAcTBkF0aGVuczFEMEIGA1UEChM7SGVsbGVuaWMgQWNh -ZGVtaWMgYW5kIFJlc2VhcmNoIEluc3RpdHV0aW9ucyBDZXJ0LiBBdXRob3JpdHkx -QDA+BgNVBAMTN0hlbGxlbmljIEFjYWRlbWljIGFuZCBSZXNlYXJjaCBJbnN0aXR1 -dGlvbnMgUm9vdENBIDIwMTUwggIiMA0GCSqGSIb3DQEBAQUAA4ICDwAwggIKAoIC -AQDC+Kk/G4n8PDwEXT2QNrCROnk8ZlrvbTkBSRq0t89/TSNTt5AA4xMqKKYx8ZEA -4yjsriFBzh/a/X0SWwGDD7mwX5nh8hKDgE0GPt+sr+ehiGsxr/CL0BgzuNtFajT0 -AoAkKAoCFZVedioNmToUW/bLy1O8E00BiDeUJRtCvCLYjqOWXjrZMts+6PAQZe10 -4S+nfK8nNLspfZu2zwnI5dMK/IhlZXQK3HMcXM1AsRzUtoSMTFDPaI6oWa7CJ06C -ojXdFPQf/7J31Ycvqm59JCfnxssm5uX+Zwdj2EUN3TpZZTlYepKZcj2chF6IIbjV -9Cz82XBST3i4vTwri5WY9bPRaM8gFH5MXF/ni+X1NYEZN9cRCLdmvtNKzoNXADrD -gfgXy5I2XdGj2HUb4Ysn6npIQf1FGQatJ5lOwXBH3bWfgVMS5bGMSF0xQxfjjMZ6 -Y5ZLKTBOhE5iGV48zpeQpX8B653g+IuJ3SWYPZK2fu/Z8VFRfS0myGlZYeCsargq -NhEEelC9MoS+L9xy1dcdFkfkR2YgP/SWxa+OAXqlD3pk9Q0Yh9muiNX6hME6wGko -LfINaFGq46V3xqSQDqE3izEjR8EJCOtu93ib14L8hCCZSRm2Ekax+0VVFqmjZayc -Bw/qa9wfLgZy7IaIEuQt218FL+TwA9MmM+eAws1CoRc0CwIDAQABo0IwQDAPBgNV -HRMBAf8EBTADAQH/MA4GA1UdDwEB/wQEAwIBBjAdBgNVHQ4EFgQUcRVnyMjJvXVd -ctA4GGqd83EkVAswDQYJKoZIhvcNAQELBQADggIBAHW7bVRLqhBYRjTyYtcWNl0I -XtVsyIe9tC5G8jH4fOpCtZMWVdyhDBKg2mF+D1hYc2Ryx+hFjtyp8iY/xnmMsVMI -M4GwVhO+5lFc2JsKT0ucVlMC6U/2DWDqTUJV6HwbISHTGzrMd/K4kPFox/la/vot -9L/J9UUbzjgQKjeKeaO04wlshYaT/4mWJ3iBj2fjRnRUjtkNaeJK9E10A/+yd+2V -Z5fkscWrv2oj6NSU4kQoYsRL4vDY4ilrGnB+JGGTe08DMiUNRSQrlrRGar9KC/ea -j8GsGsVn82800vpzY4zvFrCopEYq+OsS7HK07/grfoxSwIuEVPkvPuNVqNxmsdnh -X9izjFk0WaSrT2y7HxjbdavYy5LNlDhhDgcGH0tGEPEVvo2FXDtKK4F5D7Rpn0lQ -l033DlZdwJVqwjbDG2jJ9SrcR5q+ss7FJej6A7na+RZukYT1HCjI/CbM1xyQVqdf -bzoEvM14iQuODy+jqk+iGxI9FghAD/FGTNeqewjBCvVtJ94Cj8rDtSvK6evIIVM4 -pcw72Hc3MKJP2W/R8kCtQXoXxdZKNYm3QdV8hn9VTYNKpXMgwDqvkPGaJI7ZjnHK -e7iG2rKPmT4dEw0SEe7Uq/DpFXYC5ODfqiAeW2GFZECpkJcNrVPSWh2HagCXZWK0 -vm9qp/UsQu0yrbYhnr68 ------END CERTIFICATE----- - -# Issuer: CN=Hellenic Academic and Research Institutions ECC RootCA 2015 O=Hellenic Academic and Research Institutions Cert. Authority -# Subject: CN=Hellenic Academic and Research Institutions ECC RootCA 2015 O=Hellenic Academic and Research Institutions Cert. Authority -# Label: "Hellenic Academic and Research Institutions ECC RootCA 2015" -# Serial: 0 -# MD5 Fingerprint: 81:e5:b4:17:eb:c2:f5:e1:4b:0d:41:7b:49:92:fe:ef -# SHA1 Fingerprint: 9f:f1:71:8d:92:d5:9a:f3:7d:74:97:b4:bc:6f:84:68:0b:ba:b6:66 -# SHA256 Fingerprint: 44:b5:45:aa:8a:25:e6:5a:73:ca:15:dc:27:fc:36:d2:4c:1c:b9:95:3a:06:65:39:b1:15:82:dc:48:7b:48:33 ------BEGIN CERTIFICATE----- -MIICwzCCAkqgAwIBAgIBADAKBggqhkjOPQQDAjCBqjELMAkGA1UEBhMCR1IxDzAN -BgNVBAcTBkF0aGVuczFEMEIGA1UEChM7SGVsbGVuaWMgQWNhZGVtaWMgYW5kIFJl -c2VhcmNoIEluc3RpdHV0aW9ucyBDZXJ0LiBBdXRob3JpdHkxRDBCBgNVBAMTO0hl -bGxlbmljIEFjYWRlbWljIGFuZCBSZXNlYXJjaCBJbnN0aXR1dGlvbnMgRUNDIFJv -b3RDQSAyMDE1MB4XDTE1MDcwNzEwMzcxMloXDTQwMDYzMDEwMzcxMlowgaoxCzAJ -BgNVBAYTAkdSMQ8wDQYDVQQHEwZBdGhlbnMxRDBCBgNVBAoTO0hlbGxlbmljIEFj -YWRlbWljIGFuZCBSZXNlYXJjaCBJbnN0aXR1dGlvbnMgQ2VydC4gQXV0aG9yaXR5 -MUQwQgYDVQQDEztIZWxsZW5pYyBBY2FkZW1pYyBhbmQgUmVzZWFyY2ggSW5zdGl0 -dXRpb25zIEVDQyBSb290Q0EgMjAxNTB2MBAGByqGSM49AgEGBSuBBAAiA2IABJKg -QehLgoRc4vgxEZmGZE4JJS+dQS8KrjVPdJWyUWRrjWvmP3CV8AVER6ZyOFB2lQJa -jq4onvktTpnvLEhvTCUp6NFxW98dwXU3tNf6e3pCnGoKVlp8aQuqgAkkbH7BRqNC -MEAwDwYDVR0TAQH/BAUwAwEB/zAOBgNVHQ8BAf8EBAMCAQYwHQYDVR0OBBYEFLQi -C4KZJAEOnLvkDv2/+5cgk5kqMAoGCCqGSM49BAMCA2cAMGQCMGfOFmI4oqxiRaep -lSTAGiecMjvAwNW6qef4BENThe5SId6d9SWDPp5YSy/XZxMOIQIwBeF1Ad5o7Sof -TUwJCA3sS61kFyjndc5FZXIhF8siQQ6ME5g4mlRtm8rifOoCWCKR ------END CERTIFICATE----- - -# Issuer: CN=ISRG Root X1 O=Internet Security Research Group -# Subject: CN=ISRG Root X1 O=Internet Security Research Group -# Label: "ISRG Root X1" -# Serial: 172886928669790476064670243504169061120 -# MD5 Fingerprint: 0c:d2:f9:e0:da:17:73:e9:ed:86:4d:a5:e3:70:e7:4e -# SHA1 Fingerprint: ca:bd:2a:79:a1:07:6a:31:f2:1d:25:36:35:cb:03:9d:43:29:a5:e8 -# SHA256 Fingerprint: 96:bc:ec:06:26:49:76:f3:74:60:77:9a:cf:28:c5:a7:cf:e8:a3:c0:aa:e1:1a:8f:fc:ee:05:c0:bd:df:08:c6 ------BEGIN CERTIFICATE----- -MIIFazCCA1OgAwIBAgIRAIIQz7DSQONZRGPgu2OCiwAwDQYJKoZIhvcNAQELBQAw -TzELMAkGA1UEBhMCVVMxKTAnBgNVBAoTIEludGVybmV0IFNlY3VyaXR5IFJlc2Vh -cmNoIEdyb3VwMRUwEwYDVQQDEwxJU1JHIFJvb3QgWDEwHhcNMTUwNjA0MTEwNDM4 -WhcNMzUwNjA0MTEwNDM4WjBPMQswCQYDVQQGEwJVUzEpMCcGA1UEChMgSW50ZXJu -ZXQgU2VjdXJpdHkgUmVzZWFyY2ggR3JvdXAxFTATBgNVBAMTDElTUkcgUm9vdCBY -MTCCAiIwDQYJKoZIhvcNAQEBBQADggIPADCCAgoCggIBAK3oJHP0FDfzm54rVygc -h77ct984kIxuPOZXoHj3dcKi/vVqbvYATyjb3miGbESTtrFj/RQSa78f0uoxmyF+ -0TM8ukj13Xnfs7j/EvEhmkvBioZxaUpmZmyPfjxwv60pIgbz5MDmgK7iS4+3mX6U -A5/TR5d8mUgjU+g4rk8Kb4Mu0UlXjIB0ttov0DiNewNwIRt18jA8+o+u3dpjq+sW -T8KOEUt+zwvo/7V3LvSye0rgTBIlDHCNAymg4VMk7BPZ7hm/ELNKjD+Jo2FR3qyH -B5T0Y3HsLuJvW5iB4YlcNHlsdu87kGJ55tukmi8mxdAQ4Q7e2RCOFvu396j3x+UC -B5iPNgiV5+I3lg02dZ77DnKxHZu8A/lJBdiB3QW0KtZB6awBdpUKD9jf1b0SHzUv -KBds0pjBqAlkd25HN7rOrFleaJ1/ctaJxQZBKT5ZPt0m9STJEadao0xAH0ahmbWn -OlFuhjuefXKnEgV4We0+UXgVCwOPjdAvBbI+e0ocS3MFEvzG6uBQE3xDk3SzynTn -jh8BCNAw1FtxNrQHusEwMFxIt4I7mKZ9YIqioymCzLq9gwQbooMDQaHWBfEbwrbw -qHyGO0aoSCqI3Haadr8faqU9GY/rOPNk3sgrDQoo//fb4hVC1CLQJ13hef4Y53CI -rU7m2Ys6xt0nUW7/vGT1M0NPAgMBAAGjQjBAMA4GA1UdDwEB/wQEAwIBBjAPBgNV -HRMBAf8EBTADAQH/MB0GA1UdDgQWBBR5tFnme7bl5AFzgAiIyBpY9umbbjANBgkq -hkiG9w0BAQsFAAOCAgEAVR9YqbyyqFDQDLHYGmkgJykIrGF1XIpu+ILlaS/V9lZL -ubhzEFnTIZd+50xx+7LSYK05qAvqFyFWhfFQDlnrzuBZ6brJFe+GnY+EgPbk6ZGQ -3BebYhtF8GaV0nxvwuo77x/Py9auJ/GpsMiu/X1+mvoiBOv/2X/qkSsisRcOj/KK -NFtY2PwByVS5uCbMiogziUwthDyC3+6WVwW6LLv3xLfHTjuCvjHIInNzktHCgKQ5 -ORAzI4JMPJ+GslWYHb4phowim57iaztXOoJwTdwJx4nLCgdNbOhdjsnvzqvHu7Ur -TkXWStAmzOVyyghqpZXjFaH3pO3JLF+l+/+sKAIuvtd7u+Nxe5AW0wdeRlN8NwdC -jNPElpzVmbUq4JUagEiuTDkHzsxHpFKVK7q4+63SM1N95R1NbdWhscdCb+ZAJzVc -oyi3B43njTOQ5yOf+1CceWxG1bQVs5ZufpsMljq4Ui0/1lvh+wjChP4kqKOJ2qxq -4RgqsahDYVvTH9w7jXbyLeiNdd8XM2w9U/t7y0Ff/9yi0GE44Za4rF2LN9d11TPA -mRGunUHBcnWEvgJBQl9nJEiU0Zsnvgc/ubhPgXRR4Xq37Z0j4r7g1SgEEzwxA57d -emyPxgcYxn/eR44/KJ4EBs+lVDR3veyJm+kXQ99b21/+jh5Xos1AnX5iItreGCc= ------END CERTIFICATE----- - -# Issuer: O=FNMT-RCM OU=AC RAIZ FNMT-RCM -# Subject: O=FNMT-RCM OU=AC RAIZ FNMT-RCM -# Label: "AC RAIZ FNMT-RCM" -# Serial: 485876308206448804701554682760554759 -# MD5 Fingerprint: e2:09:04:b4:d3:bd:d1:a0:14:fd:1a:d2:47:c4:57:1d -# SHA1 Fingerprint: ec:50:35:07:b2:15:c4:95:62:19:e2:a8:9a:5b:42:99:2c:4c:2c:20 -# SHA256 Fingerprint: eb:c5:57:0c:29:01:8c:4d:67:b1:aa:12:7b:af:12:f7:03:b4:61:1e:bc:17:b7:da:b5:57:38:94:17:9b:93:fa ------BEGIN CERTIFICATE----- -MIIFgzCCA2ugAwIBAgIPXZONMGc2yAYdGsdUhGkHMA0GCSqGSIb3DQEBCwUAMDsx -CzAJBgNVBAYTAkVTMREwDwYDVQQKDAhGTk1ULVJDTTEZMBcGA1UECwwQQUMgUkFJ -WiBGTk1ULVJDTTAeFw0wODEwMjkxNTU5NTZaFw0zMDAxMDEwMDAwMDBaMDsxCzAJ -BgNVBAYTAkVTMREwDwYDVQQKDAhGTk1ULVJDTTEZMBcGA1UECwwQQUMgUkFJWiBG -Tk1ULVJDTTCCAiIwDQYJKoZIhvcNAQEBBQADggIPADCCAgoCggIBALpxgHpMhm5/ -yBNtwMZ9HACXjywMI7sQmkCpGreHiPibVmr75nuOi5KOpyVdWRHbNi63URcfqQgf -BBckWKo3Shjf5TnUV/3XwSyRAZHiItQDwFj8d0fsjz50Q7qsNI1NOHZnjrDIbzAz -WHFctPVrbtQBULgTfmxKo0nRIBnuvMApGGWn3v7v3QqQIecaZ5JCEJhfTzC8PhxF -tBDXaEAUwED653cXeuYLj2VbPNmaUtu1vZ5Gzz3rkQUCwJaydkxNEJY7kvqcfw+Z -374jNUUeAlz+taibmSXaXvMiwzn15Cou08YfxGyqxRxqAQVKL9LFwag0Jl1mpdIC -IfkYtwb1TplvqKtMUejPUBjFd8g5CSxJkjKZqLsXF3mwWsXmo8RZZUc1g16p6DUL -mbvkzSDGm0oGObVo/CK67lWMK07q87Hj/LaZmtVC+nFNCM+HHmpxffnTtOmlcYF7 -wk5HlqX2doWjKI/pgG6BU6VtX7hI+cL5NqYuSf+4lsKMB7ObiFj86xsc3i1w4peS -MKGJ47xVqCfWS+2QrYv6YyVZLag13cqXM7zlzced0ezvXg5KkAYmY6252TUtB7p2 -ZSysV4999AeU14ECll2jB0nVetBX+RvnU0Z1qrB5QstocQjpYL05ac70r8NWQMet -UqIJ5G+GR4of6ygnXYMgrwTJbFaai0b1AgMBAAGjgYMwgYAwDwYDVR0TAQH/BAUw -AwEB/zAOBgNVHQ8BAf8EBAMCAQYwHQYDVR0OBBYEFPd9xf3E6Jobd2Sn9R2gzL+H -YJptMD4GA1UdIAQ3MDUwMwYEVR0gADArMCkGCCsGAQUFBwIBFh1odHRwOi8vd3d3 -LmNlcnQuZm5tdC5lcy9kcGNzLzANBgkqhkiG9w0BAQsFAAOCAgEAB5BK3/MjTvDD -nFFlm5wioooMhfNzKWtN/gHiqQxjAb8EZ6WdmF/9ARP67Jpi6Yb+tmLSbkyU+8B1 -RXxlDPiyN8+sD8+Nb/kZ94/sHvJwnvDKuO+3/3Y3dlv2bojzr2IyIpMNOmqOFGYM -LVN0V2Ue1bLdI4E7pWYjJ2cJj+F3qkPNZVEI7VFY/uY5+ctHhKQV8Xa7pO6kO8Rf -77IzlhEYt8llvhjho6Tc+hj507wTmzl6NLrTQfv6MooqtyuGC2mDOL7Nii4LcK2N -JpLuHvUBKwrZ1pebbuCoGRw6IYsMHkCtA+fdZn71uSANA+iW+YJF1DngoABd15jm -fZ5nc8OaKveri6E6FO80vFIOiZiaBECEHX5FaZNXzuvO+FB8TxxuBEOb+dY7Ixjp -6o7RTUaN8Tvkasq6+yO3m/qZASlaWFot4/nUbQ4mrcFuNLwy+AwF+mWj2zs3gyLp -1txyM/1d8iC9djwj2ij3+RvrWWTV3F9yfiD8zYm1kGdNYno/Tq0dwzn+evQoFt9B -9kiABdcPUXmsEKvU7ANm5mqwujGSQkBqvjrTcuFqN1W8rB2Vt2lh8kORdOag0wok -RqEIr9baRRmW1FMdW4R58MD3R++Lj8UGrp1MYp3/RgT408m2ECVAdf4WqslKYIYv -uu8wd+RU4riEmViAqhOLUTpPSPaLtrM= ------END CERTIFICATE----- - -# Issuer: CN=Amazon Root CA 1 O=Amazon -# Subject: CN=Amazon Root CA 1 O=Amazon -# Label: "Amazon Root CA 1" -# Serial: 143266978916655856878034712317230054538369994 -# MD5 Fingerprint: 43:c6:bf:ae:ec:fe:ad:2f:18:c6:88:68:30:fc:c8:e6 -# SHA1 Fingerprint: 8d:a7:f9:65:ec:5e:fc:37:91:0f:1c:6e:59:fd:c1:cc:6a:6e:de:16 -# SHA256 Fingerprint: 8e:cd:e6:88:4f:3d:87:b1:12:5b:a3:1a:c3:fc:b1:3d:70:16:de:7f:57:cc:90:4f:e1:cb:97:c6:ae:98:19:6e ------BEGIN CERTIFICATE----- -MIIDQTCCAimgAwIBAgITBmyfz5m/jAo54vB4ikPmljZbyjANBgkqhkiG9w0BAQsF -ADA5MQswCQYDVQQGEwJVUzEPMA0GA1UEChMGQW1hem9uMRkwFwYDVQQDExBBbWF6 -b24gUm9vdCBDQSAxMB4XDTE1MDUyNjAwMDAwMFoXDTM4MDExNzAwMDAwMFowOTEL -MAkGA1UEBhMCVVMxDzANBgNVBAoTBkFtYXpvbjEZMBcGA1UEAxMQQW1hem9uIFJv -b3QgQ0EgMTCCASIwDQYJKoZIhvcNAQEBBQADggEPADCCAQoCggEBALJ4gHHKeNXj -ca9HgFB0fW7Y14h29Jlo91ghYPl0hAEvrAIthtOgQ3pOsqTQNroBvo3bSMgHFzZM -9O6II8c+6zf1tRn4SWiw3te5djgdYZ6k/oI2peVKVuRF4fn9tBb6dNqcmzU5L/qw -IFAGbHrQgLKm+a/sRxmPUDgH3KKHOVj4utWp+UhnMJbulHheb4mjUcAwhmahRWa6 -VOujw5H5SNz/0egwLX0tdHA114gk957EWW67c4cX8jJGKLhD+rcdqsq08p8kDi1L -93FcXmn/6pUCyziKrlA4b9v7LWIbxcceVOF34GfID5yHI9Y/QCB/IIDEgEw+OyQm -jgSubJrIqg0CAwEAAaNCMEAwDwYDVR0TAQH/BAUwAwEB/zAOBgNVHQ8BAf8EBAMC -AYYwHQYDVR0OBBYEFIQYzIU07LwMlJQuCFmcx7IQTgoIMA0GCSqGSIb3DQEBCwUA -A4IBAQCY8jdaQZChGsV2USggNiMOruYou6r4lK5IpDB/G/wkjUu0yKGX9rbxenDI -U5PMCCjjmCXPI6T53iHTfIUJrU6adTrCC2qJeHZERxhlbI1Bjjt/msv0tadQ1wUs -N+gDS63pYaACbvXy8MWy7Vu33PqUXHeeE6V/Uq2V8viTO96LXFvKWlJbYK8U90vv -o/ufQJVtMVT8QtPHRh8jrdkPSHCa2XV4cdFyQzR1bldZwgJcJmApzyMZFo6IQ6XU -5MsI+yMRQ+hDKXJioaldXgjUkK642M4UwtBV8ob2xJNDd2ZhwLnoQdeXeGADbkpy -rqXRfboQnoZsG4q5WTP468SQvvG5 ------END CERTIFICATE----- - -# Issuer: CN=Amazon Root CA 2 O=Amazon -# Subject: CN=Amazon Root CA 2 O=Amazon -# Label: "Amazon Root CA 2" -# Serial: 143266982885963551818349160658925006970653239 -# MD5 Fingerprint: c8:e5:8d:ce:a8:42:e2:7a:c0:2a:5c:7c:9e:26:bf:66 -# SHA1 Fingerprint: 5a:8c:ef:45:d7:a6:98:59:76:7a:8c:8b:44:96:b5:78:cf:47:4b:1a -# SHA256 Fingerprint: 1b:a5:b2:aa:8c:65:40:1a:82:96:01:18:f8:0b:ec:4f:62:30:4d:83:ce:c4:71:3a:19:c3:9c:01:1e:a4:6d:b4 ------BEGIN CERTIFICATE----- -MIIFQTCCAymgAwIBAgITBmyf0pY1hp8KD+WGePhbJruKNzANBgkqhkiG9w0BAQwF -ADA5MQswCQYDVQQGEwJVUzEPMA0GA1UEChMGQW1hem9uMRkwFwYDVQQDExBBbWF6 -b24gUm9vdCBDQSAyMB4XDTE1MDUyNjAwMDAwMFoXDTQwMDUyNjAwMDAwMFowOTEL -MAkGA1UEBhMCVVMxDzANBgNVBAoTBkFtYXpvbjEZMBcGA1UEAxMQQW1hem9uIFJv -b3QgQ0EgMjCCAiIwDQYJKoZIhvcNAQEBBQADggIPADCCAgoCggIBAK2Wny2cSkxK -gXlRmeyKy2tgURO8TW0G/LAIjd0ZEGrHJgw12MBvIITplLGbhQPDW9tK6Mj4kHbZ -W0/jTOgGNk3Mmqw9DJArktQGGWCsN0R5hYGCrVo34A3MnaZMUnbqQ523BNFQ9lXg -1dKmSYXpN+nKfq5clU1Imj+uIFptiJXZNLhSGkOQsL9sBbm2eLfq0OQ6PBJTYv9K -8nu+NQWpEjTj82R0Yiw9AElaKP4yRLuH3WUnAnE72kr3H9rN9yFVkE8P7K6C4Z9r -2UXTu/Bfh+08LDmG2j/e7HJV63mjrdvdfLC6HM783k81ds8P+HgfajZRRidhW+me -z/CiVX18JYpvL7TFz4QuK/0NURBs+18bvBt+xa47mAExkv8LV/SasrlX6avvDXbR -8O70zoan4G7ptGmh32n2M8ZpLpcTnqWHsFcQgTfJU7O7f/aS0ZzQGPSSbtqDT6Zj -mUyl+17vIWR6IF9sZIUVyzfpYgwLKhbcAS4y2j5L9Z469hdAlO+ekQiG+r5jqFoz -7Mt0Q5X5bGlSNscpb/xVA1wf+5+9R+vnSUeVC06JIglJ4PVhHvG/LopyboBZ/1c6 -+XUyo05f7O0oYtlNc/LMgRdg7c3r3NunysV+Ar3yVAhU/bQtCSwXVEqY0VThUWcI -0u1ufm8/0i2BWSlmy5A5lREedCf+3euvAgMBAAGjQjBAMA8GA1UdEwEB/wQFMAMB -Af8wDgYDVR0PAQH/BAQDAgGGMB0GA1UdDgQWBBSwDPBMMPQFWAJI/TPlUq9LhONm -UjANBgkqhkiG9w0BAQwFAAOCAgEAqqiAjw54o+Ci1M3m9Zh6O+oAA7CXDpO8Wqj2 -LIxyh6mx/H9z/WNxeKWHWc8w4Q0QshNabYL1auaAn6AFC2jkR2vHat+2/XcycuUY -+gn0oJMsXdKMdYV2ZZAMA3m3MSNjrXiDCYZohMr/+c8mmpJ5581LxedhpxfL86kS -k5Nrp+gvU5LEYFiwzAJRGFuFjWJZY7attN6a+yb3ACfAXVU3dJnJUH/jWS5E4ywl -7uxMMne0nxrpS10gxdr9HIcWxkPo1LsmmkVwXqkLN1PiRnsn/eBG8om3zEK2yygm -btmlyTrIQRNg91CMFa6ybRoVGld45pIq2WWQgj9sAq+uEjonljYE1x2igGOpm/Hl -urR8FLBOybEfdF849lHqm/osohHUqS0nGkWxr7JOcQ3AWEbWaQbLU8uz/mtBzUF+ -fUwPfHJ5elnNXkoOrJupmHN5fLT0zLm4BwyydFy4x2+IoZCn9Kr5v2c69BoVYh63 -n749sSmvZ6ES8lgQGVMDMBu4Gon2nL2XA46jCfMdiyHxtN/kHNGfZQIG6lzWE7OE -76KlXIx3KadowGuuQNKotOrN8I1LOJwZmhsoVLiJkO/KdYE+HvJkJMcYr07/R54H -9jVlpNMKVv/1F2Rs76giJUmTtt8AF9pYfl3uxRuw0dFfIRDH+fO6AgonB8Xx1sfT -4PsJYGw= ------END CERTIFICATE----- - -# Issuer: CN=Amazon Root CA 3 O=Amazon -# Subject: CN=Amazon Root CA 3 O=Amazon -# Label: "Amazon Root CA 3" -# Serial: 143266986699090766294700635381230934788665930 -# MD5 Fingerprint: a0:d4:ef:0b:f7:b5:d8:49:95:2a:ec:f5:c4:fc:81:87 -# SHA1 Fingerprint: 0d:44:dd:8c:3c:8c:1a:1a:58:75:64:81:e9:0f:2e:2a:ff:b3:d2:6e -# SHA256 Fingerprint: 18:ce:6c:fe:7b:f1:4e:60:b2:e3:47:b8:df:e8:68:cb:31:d0:2e:bb:3a:da:27:15:69:f5:03:43:b4:6d:b3:a4 ------BEGIN CERTIFICATE----- -MIIBtjCCAVugAwIBAgITBmyf1XSXNmY/Owua2eiedgPySjAKBggqhkjOPQQDAjA5 -MQswCQYDVQQGEwJVUzEPMA0GA1UEChMGQW1hem9uMRkwFwYDVQQDExBBbWF6b24g -Um9vdCBDQSAzMB4XDTE1MDUyNjAwMDAwMFoXDTQwMDUyNjAwMDAwMFowOTELMAkG -A1UEBhMCVVMxDzANBgNVBAoTBkFtYXpvbjEZMBcGA1UEAxMQQW1hem9uIFJvb3Qg -Q0EgMzBZMBMGByqGSM49AgEGCCqGSM49AwEHA0IABCmXp8ZBf8ANm+gBG1bG8lKl -ui2yEujSLtf6ycXYqm0fc4E7O5hrOXwzpcVOho6AF2hiRVd9RFgdszflZwjrZt6j -QjBAMA8GA1UdEwEB/wQFMAMBAf8wDgYDVR0PAQH/BAQDAgGGMB0GA1UdDgQWBBSr -ttvXBp43rDCGB5Fwx5zEGbF4wDAKBggqhkjOPQQDAgNJADBGAiEA4IWSoxe3jfkr -BqWTrBqYaGFy+uGh0PsceGCmQ5nFuMQCIQCcAu/xlJyzlvnrxir4tiz+OpAUFteM -YyRIHN8wfdVoOw== ------END CERTIFICATE----- - -# Issuer: CN=Amazon Root CA 4 O=Amazon -# Subject: CN=Amazon Root CA 4 O=Amazon -# Label: "Amazon Root CA 4" -# Serial: 143266989758080763974105200630763877849284878 -# MD5 Fingerprint: 89:bc:27:d5:eb:17:8d:06:6a:69:d5:fd:89:47:b4:cd -# SHA1 Fingerprint: f6:10:84:07:d6:f8:bb:67:98:0c:c2:e2:44:c2:eb:ae:1c:ef:63:be -# SHA256 Fingerprint: e3:5d:28:41:9e:d0:20:25:cf:a6:90:38:cd:62:39:62:45:8d:a5:c6:95:fb:de:a3:c2:2b:0b:fb:25:89:70:92 ------BEGIN CERTIFICATE----- -MIIB8jCCAXigAwIBAgITBmyf18G7EEwpQ+Vxe3ssyBrBDjAKBggqhkjOPQQDAzA5 -MQswCQYDVQQGEwJVUzEPMA0GA1UEChMGQW1hem9uMRkwFwYDVQQDExBBbWF6b24g -Um9vdCBDQSA0MB4XDTE1MDUyNjAwMDAwMFoXDTQwMDUyNjAwMDAwMFowOTELMAkG -A1UEBhMCVVMxDzANBgNVBAoTBkFtYXpvbjEZMBcGA1UEAxMQQW1hem9uIFJvb3Qg -Q0EgNDB2MBAGByqGSM49AgEGBSuBBAAiA2IABNKrijdPo1MN/sGKe0uoe0ZLY7Bi -9i0b2whxIdIA6GO9mif78DluXeo9pcmBqqNbIJhFXRbb/egQbeOc4OO9X4Ri83Bk -M6DLJC9wuoihKqB1+IGuYgbEgds5bimwHvouXKNCMEAwDwYDVR0TAQH/BAUwAwEB -/zAOBgNVHQ8BAf8EBAMCAYYwHQYDVR0OBBYEFNPsxzplbszh2naaVvuc84ZtV+WB -MAoGCCqGSM49BAMDA2gAMGUCMDqLIfG9fhGt0O9Yli/W651+kI0rz2ZVwyzjKKlw -CkcO8DdZEv8tmZQoTipPNU0zWgIxAOp1AE47xDqUEpHJWEadIRNyp4iciuRMStuW -1KyLa2tJElMzrdfkviT8tQp21KW8EA== ------END CERTIFICATE----- - -# Issuer: CN=TUBITAK Kamu SM SSL Kok Sertifikasi - Surum 1 O=Turkiye Bilimsel ve Teknolojik Arastirma Kurumu - TUBITAK OU=Kamu Sertifikasyon Merkezi - Kamu SM -# Subject: CN=TUBITAK Kamu SM SSL Kok Sertifikasi - Surum 1 O=Turkiye Bilimsel ve Teknolojik Arastirma Kurumu - TUBITAK OU=Kamu Sertifikasyon Merkezi - Kamu SM -# Label: "TUBITAK Kamu SM SSL Kok Sertifikasi - Surum 1" -# Serial: 1 -# MD5 Fingerprint: dc:00:81:dc:69:2f:3e:2f:b0:3b:f6:3d:5a:91:8e:49 -# SHA1 Fingerprint: 31:43:64:9b:ec:ce:27:ec:ed:3a:3f:0b:8f:0d:e4:e8:91:dd:ee:ca -# SHA256 Fingerprint: 46:ed:c3:68:90:46:d5:3a:45:3f:b3:10:4a:b8:0d:ca:ec:65:8b:26:60:ea:16:29:dd:7e:86:79:90:64:87:16 ------BEGIN CERTIFICATE----- -MIIEYzCCA0ugAwIBAgIBATANBgkqhkiG9w0BAQsFADCB0jELMAkGA1UEBhMCVFIx -GDAWBgNVBAcTD0dlYnplIC0gS29jYWVsaTFCMEAGA1UEChM5VHVya2l5ZSBCaWxp -bXNlbCB2ZSBUZWtub2xvamlrIEFyYXN0aXJtYSBLdXJ1bXUgLSBUVUJJVEFLMS0w -KwYDVQQLEyRLYW11IFNlcnRpZmlrYXN5b24gTWVya2V6aSAtIEthbXUgU00xNjA0 -BgNVBAMTLVRVQklUQUsgS2FtdSBTTSBTU0wgS29rIFNlcnRpZmlrYXNpIC0gU3Vy -dW0gMTAeFw0xMzExMjUwODI1NTVaFw00MzEwMjUwODI1NTVaMIHSMQswCQYDVQQG -EwJUUjEYMBYGA1UEBxMPR2ViemUgLSBLb2NhZWxpMUIwQAYDVQQKEzlUdXJraXll -IEJpbGltc2VsIHZlIFRla25vbG9qaWsgQXJhc3Rpcm1hIEt1cnVtdSAtIFRVQklU -QUsxLTArBgNVBAsTJEthbXUgU2VydGlmaWthc3lvbiBNZXJrZXppIC0gS2FtdSBT -TTE2MDQGA1UEAxMtVFVCSVRBSyBLYW11IFNNIFNTTCBLb2sgU2VydGlmaWthc2kg -LSBTdXJ1bSAxMIIBIjANBgkqhkiG9w0BAQEFAAOCAQ8AMIIBCgKCAQEAr3UwM6q7 -a9OZLBI3hNmNe5eA027n/5tQlT6QlVZC1xl8JoSNkvoBHToP4mQ4t4y86Ij5iySr -LqP1N+RAjhgleYN1Hzv/bKjFxlb4tO2KRKOrbEz8HdDc72i9z+SqzvBV96I01INr -N3wcwv61A+xXzry0tcXtAA9TNypN9E8Mg/uGz8v+jE69h/mniyFXnHrfA2eJLJ2X -YacQuFWQfw4tJzh03+f92k4S400VIgLI4OD8D62K18lUUMw7D8oWgITQUVbDjlZ/ -iSIzL+aFCr2lqBs23tPcLG07xxO9WSMs5uWk99gL7eqQQESolbuT1dCANLZGeA4f -AJNG4e7p+exPFwIDAQABo0IwQDAdBgNVHQ4EFgQUZT/HiobGPN08VFw1+DrtUgxH -V8gwDgYDVR0PAQH/BAQDAgEGMA8GA1UdEwEB/wQFMAMBAf8wDQYJKoZIhvcNAQEL -BQADggEBACo/4fEyjq7hmFxLXs9rHmoJ0iKpEsdeV31zVmSAhHqT5Am5EM2fKifh -AHe+SMg1qIGf5LgsyX8OsNJLN13qudULXjS99HMpw+0mFZx+CFOKWI3QSyjfwbPf -IPP54+M638yclNhOT8NrF7f3cuitZjO1JVOr4PhMqZ398g26rrnZqsZr+ZO7rqu4 -lzwDGrpDxpa5RXI4s6ehlj2Re37AIVNMh+3yC1SVUZPVIqUNivGTDj5UDrDYyU7c -8jEyVupk+eq1nRZmQnLzf9OxMUP8pI4X8W0jq5Rm+K37DwhuJi1/FwcJsoz7UMCf -lo3Ptv0AnVoUmr8CRPXBwp8iXqIPoeM= ------END CERTIFICATE----- - -# Issuer: CN=GDCA TrustAUTH R5 ROOT O=GUANG DONG CERTIFICATE AUTHORITY CO.,LTD. -# Subject: CN=GDCA TrustAUTH R5 ROOT O=GUANG DONG CERTIFICATE AUTHORITY CO.,LTD. -# Label: "GDCA TrustAUTH R5 ROOT" -# Serial: 9009899650740120186 -# MD5 Fingerprint: 63:cc:d9:3d:34:35:5c:6f:53:a3:e2:08:70:48:1f:b4 -# SHA1 Fingerprint: 0f:36:38:5b:81:1a:25:c3:9b:31:4e:83:ca:e9:34:66:70:cc:74:b4 -# SHA256 Fingerprint: bf:ff:8f:d0:44:33:48:7d:6a:8a:a6:0c:1a:29:76:7a:9f:c2:bb:b0:5e:42:0f:71:3a:13:b9:92:89:1d:38:93 ------BEGIN CERTIFICATE----- -MIIFiDCCA3CgAwIBAgIIfQmX/vBH6nowDQYJKoZIhvcNAQELBQAwYjELMAkGA1UE -BhMCQ04xMjAwBgNVBAoMKUdVQU5HIERPTkcgQ0VSVElGSUNBVEUgQVVUSE9SSVRZ -IENPLixMVEQuMR8wHQYDVQQDDBZHRENBIFRydXN0QVVUSCBSNSBST09UMB4XDTE0 -MTEyNjA1MTMxNVoXDTQwMTIzMTE1NTk1OVowYjELMAkGA1UEBhMCQ04xMjAwBgNV -BAoMKUdVQU5HIERPTkcgQ0VSVElGSUNBVEUgQVVUSE9SSVRZIENPLixMVEQuMR8w -HQYDVQQDDBZHRENBIFRydXN0QVVUSCBSNSBST09UMIICIjANBgkqhkiG9w0BAQEF -AAOCAg8AMIICCgKCAgEA2aMW8Mh0dHeb7zMNOwZ+Vfy1YI92hhJCfVZmPoiC7XJj -Dp6L3TQsAlFRwxn9WVSEyfFrs0yw6ehGXTjGoqcuEVe6ghWinI9tsJlKCvLriXBj -TnnEt1u9ol2x8kECK62pOqPseQrsXzrj/e+APK00mxqriCZ7VqKChh/rNYmDf1+u -KU49tm7srsHwJ5uu4/Ts765/94Y9cnrrpftZTqfrlYwiOXnhLQiPzLyRuEH3FMEj -qcOtmkVEs7LXLM3GKeJQEK5cy4KOFxg2fZfmiJqwTTQJ9Cy5WmYqsBebnh52nUpm -MUHfP/vFBu8btn4aRjb3ZGM74zkYI+dndRTVdVeSN72+ahsmUPI2JgaQxXABZG12 -ZuGR224HwGGALrIuL4xwp9E7PLOR5G62xDtw8mySlwnNR30YwPO7ng/Wi64HtloP -zgsMR6flPri9fcebNaBhlzpBdRfMK5Z3KpIhHtmVdiBnaM8Nvd/WHwlqmuLMc3Gk -L30SgLdTMEZeS1SZD2fJpcjyIMGC7J0R38IC+xo70e0gmu9lZJIQDSri3nDxGGeC -jGHeuLzRL5z7D9Ar7Rt2ueQ5Vfj4oR24qoAATILnsn8JuLwwoC8N9VKejveSswoA -HQBUlwbgsQfZxw9cZX08bVlX5O2ljelAU58VS6Bx9hoh49pwBiFYFIeFd3mqgnkC -AwEAAaNCMEAwHQYDVR0OBBYEFOLJQJ9NzuiaoXzPDj9lxSmIahlRMA8GA1UdEwEB -/wQFMAMBAf8wDgYDVR0PAQH/BAQDAgGGMA0GCSqGSIb3DQEBCwUAA4ICAQDRSVfg -p8xoWLoBDysZzY2wYUWsEe1jUGn4H3++Fo/9nesLqjJHdtJnJO29fDMylyrHBYZm -DRd9FBUb1Ov9H5r2XpdptxolpAqzkT9fNqyL7FeoPueBihhXOYV0GkLH6VsTX4/5 -COmSdI31R9KrO9b7eGZONn356ZLpBN79SWP8bfsUcZNnL0dKt7n/HipzcEYwv1ry -L3ml4Y0M2fmyYzeMN2WFcGpcWwlyua1jPLHd+PwyvzeG5LuOmCd+uh8W4XAR8gPf -JWIyJyYYMoSf/wA6E7qaTfRPuBRwIrHKK5DOKcFw9C+df/KQHtZa37dG/OaG+svg -IHZ6uqbL9XzeYqWxi+7egmaKTjowHz+Ay60nugxe19CxVsp3cbK1daFQqUBDF8Io -2c9Si1vIY9RCPqAzekYu9wogRlR+ak8x8YF+QnQ4ZXMn7sZ8uI7XpTrXmKGcjBBV -09tL7ECQ8s1uV9JiDnxXk7Gnbc2dg7sq5+W2O3FYrf3RRbxake5TFW/TRQl1brqQ -XR4EzzffHqhmsYzmIGrv/EhOdJhCrylvLmrH+33RZjEizIYAfmaDDEL0vTSSwxrq -T8p+ck0LcIymSLumoRT2+1hEmRSuqguTaaApJUqlyyvdimYHFngVV3Eb7PVHhPOe -MTd61X8kreS8/f3MboPoDKi3QWwH3b08hpcv0g== ------END CERTIFICATE----- - -# Issuer: CN=SSL.com Root Certification Authority RSA O=SSL Corporation -# Subject: CN=SSL.com Root Certification Authority RSA O=SSL Corporation -# Label: "SSL.com Root Certification Authority RSA" -# Serial: 8875640296558310041 -# MD5 Fingerprint: 86:69:12:c0:70:f1:ec:ac:ac:c2:d5:bc:a5:5b:a1:29 -# SHA1 Fingerprint: b7:ab:33:08:d1:ea:44:77:ba:14:80:12:5a:6f:bd:a9:36:49:0c:bb -# SHA256 Fingerprint: 85:66:6a:56:2e:e0:be:5c:e9:25:c1:d8:89:0a:6f:76:a8:7e:c1:6d:4d:7d:5f:29:ea:74:19:cf:20:12:3b:69 ------BEGIN CERTIFICATE----- -MIIF3TCCA8WgAwIBAgIIeyyb0xaAMpkwDQYJKoZIhvcNAQELBQAwfDELMAkGA1UE -BhMCVVMxDjAMBgNVBAgMBVRleGFzMRAwDgYDVQQHDAdIb3VzdG9uMRgwFgYDVQQK -DA9TU0wgQ29ycG9yYXRpb24xMTAvBgNVBAMMKFNTTC5jb20gUm9vdCBDZXJ0aWZp -Y2F0aW9uIEF1dGhvcml0eSBSU0EwHhcNMTYwMjEyMTczOTM5WhcNNDEwMjEyMTcz -OTM5WjB8MQswCQYDVQQGEwJVUzEOMAwGA1UECAwFVGV4YXMxEDAOBgNVBAcMB0hv -dXN0b24xGDAWBgNVBAoMD1NTTCBDb3Jwb3JhdGlvbjExMC8GA1UEAwwoU1NMLmNv -bSBSb290IENlcnRpZmljYXRpb24gQXV0aG9yaXR5IFJTQTCCAiIwDQYJKoZIhvcN -AQEBBQADggIPADCCAgoCggIBAPkP3aMrfcvQKv7sZ4Wm5y4bunfh4/WvpOz6Sl2R -xFdHaxh3a3by/ZPkPQ/CFp4LZsNWlJ4Xg4XOVu/yFv0AYvUiCVToZRdOQbngT0aX -qhvIuG5iXmmxX9sqAn78bMrzQdjt0Oj8P2FI7bADFB0QDksZ4LtO7IZl/zbzXmcC -C52GVWH9ejjt/uIZALdvoVBidXQ8oPrIJZK0bnoix/geoeOy3ZExqysdBP+lSgQ3 -6YWkMyv94tZVNHwZpEpox7Ko07fKoZOI68GXvIz5HdkihCR0xwQ9aqkpk8zruFvh -/l8lqjRYyMEjVJ0bmBHDOJx+PYZspQ9AhnwC9FwCTyjLrnGfDzrIM/4RJTXq/LrF -YD3ZfBjVsqnTdXgDciLKOsMf7yzlLqn6niy2UUb9rwPW6mBo6oUWNmuF6R7As93E -JNyAKoFBbZQ+yODJgUEAnl6/f8UImKIYLEJAs/lvOCdLToD0PYFH4Ih86hzOtXVc -US4cK38acijnALXRdMbX5J+tB5O2UzU1/Dfkw/ZdFr4hc96SCvigY2q8lpJqPvi8 -ZVWb3vUNiSYE/CUapiVpy8JtynziWV+XrOvvLsi81xtZPCvM8hnIk2snYxnP/Okm -+Mpxm3+T/jRnhE6Z6/yzeAkzcLpmpnbtG3PrGqUNxCITIJRWCk4sbE6x/c+cCbqi -M+2HAgMBAAGjYzBhMB0GA1UdDgQWBBTdBAkHovV6fVJTEpKV7jiAJQ2mWTAPBgNV -HRMBAf8EBTADAQH/MB8GA1UdIwQYMBaAFN0ECQei9Xp9UlMSkpXuOIAlDaZZMA4G -A1UdDwEB/wQEAwIBhjANBgkqhkiG9w0BAQsFAAOCAgEAIBgRlCn7Jp0cHh5wYfGV -cpNxJK1ok1iOMq8bs3AD/CUrdIWQPXhq9LmLpZc7tRiRux6n+UBbkflVma8eEdBc -Hadm47GUBwwyOabqG7B52B2ccETjit3E+ZUfijhDPwGFpUenPUayvOUiaPd7nNgs -PgohyC0zrL/FgZkxdMF1ccW+sfAjRfSda/wZY52jvATGGAslu1OJD7OAUN5F7kR/ -q5R4ZJjT9ijdh9hwZXT7DrkT66cPYakylszeu+1jTBi7qUD3oFRuIIhxdRjqerQ0 -cuAjJ3dctpDqhiVAq+8zD8ufgr6iIPv2tS0a5sKFsXQP+8hlAqRSAUfdSSLBv9jr -a6x+3uxjMxW3IwiPxg+NQVrdjsW5j+VFP3jbutIbQLH+cU0/4IGiul607BXgk90I -H37hVZkLId6Tngr75qNJvTYw/ud3sqB1l7UtgYgXZSD32pAAn8lSzDLKNXz1PQ/Y -K9f1JmzJBjSWFupwWRoyeXkLtoh/D1JIPb9s2KJELtFOt3JY04kTlf5Eq/jXixtu -nLwsoFvVagCvXzfh1foQC5ichucmj87w7G6KVwuA406ywKBjYZC6VWg3dGq2ktuf -oYYitmUnDuy2n0Jg5GfCtdpBC8TTi2EbvPofkSvXRAdeuims2cXp71NIWuuA8ShY -Ic2wBlX7Jz9TkHCpBB5XJ7k= ------END CERTIFICATE----- - -# Issuer: CN=SSL.com Root Certification Authority ECC O=SSL Corporation -# Subject: CN=SSL.com Root Certification Authority ECC O=SSL Corporation -# Label: "SSL.com Root Certification Authority ECC" -# Serial: 8495723813297216424 -# MD5 Fingerprint: 2e:da:e4:39:7f:9c:8f:37:d1:70:9f:26:17:51:3a:8e -# SHA1 Fingerprint: c3:19:7c:39:24:e6:54:af:1b:c4:ab:20:95:7a:e2:c3:0e:13:02:6a -# SHA256 Fingerprint: 34:17:bb:06:cc:60:07:da:1b:96:1c:92:0b:8a:b4:ce:3f:ad:82:0e:4a:a3:0b:9a:cb:c4:a7:4e:bd:ce:bc:65 ------BEGIN CERTIFICATE----- -MIICjTCCAhSgAwIBAgIIdebfy8FoW6gwCgYIKoZIzj0EAwIwfDELMAkGA1UEBhMC -VVMxDjAMBgNVBAgMBVRleGFzMRAwDgYDVQQHDAdIb3VzdG9uMRgwFgYDVQQKDA9T -U0wgQ29ycG9yYXRpb24xMTAvBgNVBAMMKFNTTC5jb20gUm9vdCBDZXJ0aWZpY2F0 -aW9uIEF1dGhvcml0eSBFQ0MwHhcNMTYwMjEyMTgxNDAzWhcNNDEwMjEyMTgxNDAz -WjB8MQswCQYDVQQGEwJVUzEOMAwGA1UECAwFVGV4YXMxEDAOBgNVBAcMB0hvdXN0 -b24xGDAWBgNVBAoMD1NTTCBDb3Jwb3JhdGlvbjExMC8GA1UEAwwoU1NMLmNvbSBS -b290IENlcnRpZmljYXRpb24gQXV0aG9yaXR5IEVDQzB2MBAGByqGSM49AgEGBSuB -BAAiA2IABEVuqVDEpiM2nl8ojRfLliJkP9x6jh3MCLOicSS6jkm5BBtHllirLZXI -7Z4INcgn64mMU1jrYor+8FsPazFSY0E7ic3s7LaNGdM0B9y7xgZ/wkWV7Mt/qCPg -CemB+vNH06NjMGEwHQYDVR0OBBYEFILRhXMw5zUE044CkvvlpNHEIejNMA8GA1Ud -EwEB/wQFMAMBAf8wHwYDVR0jBBgwFoAUgtGFczDnNQTTjgKS++Wk0cQh6M0wDgYD -VR0PAQH/BAQDAgGGMAoGCCqGSM49BAMCA2cAMGQCMG/n61kRpGDPYbCWe+0F+S8T -kdzt5fxQaxFGRrMcIQBiu77D5+jNB5n5DQtdcj7EqgIwH7y6C+IwJPt8bYBVCpk+ -gA0z5Wajs6O7pdWLjwkspl1+4vAHCGht0nxpbl/f5Wpl ------END CERTIFICATE----- - -# Issuer: CN=SSL.com EV Root Certification Authority RSA R2 O=SSL Corporation -# Subject: CN=SSL.com EV Root Certification Authority RSA R2 O=SSL Corporation -# Label: "SSL.com EV Root Certification Authority RSA R2" -# Serial: 6248227494352943350 -# MD5 Fingerprint: e1:1e:31:58:1a:ae:54:53:02:f6:17:6a:11:7b:4d:95 -# SHA1 Fingerprint: 74:3a:f0:52:9b:d0:32:a0:f4:4a:83:cd:d4:ba:a9:7b:7c:2e:c4:9a -# SHA256 Fingerprint: 2e:7b:f1:6c:c2:24:85:a7:bb:e2:aa:86:96:75:07:61:b0:ae:39:be:3b:2f:e9:d0:cc:6d:4e:f7:34:91:42:5c ------BEGIN CERTIFICATE----- -MIIF6zCCA9OgAwIBAgIIVrYpzTS8ePYwDQYJKoZIhvcNAQELBQAwgYIxCzAJBgNV -BAYTAlVTMQ4wDAYDVQQIDAVUZXhhczEQMA4GA1UEBwwHSG91c3RvbjEYMBYGA1UE -CgwPU1NMIENvcnBvcmF0aW9uMTcwNQYDVQQDDC5TU0wuY29tIEVWIFJvb3QgQ2Vy -dGlmaWNhdGlvbiBBdXRob3JpdHkgUlNBIFIyMB4XDTE3MDUzMTE4MTQzN1oXDTQy -MDUzMDE4MTQzN1owgYIxCzAJBgNVBAYTAlVTMQ4wDAYDVQQIDAVUZXhhczEQMA4G -A1UEBwwHSG91c3RvbjEYMBYGA1UECgwPU1NMIENvcnBvcmF0aW9uMTcwNQYDVQQD -DC5TU0wuY29tIEVWIFJvb3QgQ2VydGlmaWNhdGlvbiBBdXRob3JpdHkgUlNBIFIy -MIICIjANBgkqhkiG9w0BAQEFAAOCAg8AMIICCgKCAgEAjzZlQOHWTcDXtOlG2mvq -M0fNTPl9fb69LT3w23jhhqXZuglXaO1XPqDQCEGD5yhBJB/jchXQARr7XnAjssuf -OePPxU7Gkm0mxnu7s9onnQqG6YE3Bf7wcXHswxzpY6IXFJ3vG2fThVUCAtZJycxa -4bH3bzKfydQ7iEGonL3Lq9ttewkfokxykNorCPzPPFTOZw+oz12WGQvE43LrrdF9 -HSfvkusQv1vrO6/PgN3B0pYEW3p+pKk8OHakYo6gOV7qd89dAFmPZiw+B6KjBSYR -aZfqhbcPlgtLyEDhULouisv3D5oi53+aNxPN8k0TayHRwMwi8qFG9kRpnMphNQcA -b9ZhCBHqurj26bNg5U257J8UZslXWNvNh2n4ioYSA0e/ZhN2rHd9NCSFg83XqpyQ -Gp8hLH94t2S42Oim9HizVcuE0jLEeK6jj2HdzghTreyI/BXkmg3mnxp3zkyPuBQV -PWKchjgGAGYS5Fl2WlPAApiiECtoRHuOec4zSnaqW4EWG7WK2NAAe15itAnWhmMO -pgWVSbooi4iTsjQc2KRVbrcc0N6ZVTsj9CLg+SlmJuwgUHfbSguPvuUCYHBBXtSu -UDkiFCbLsjtzdFVHB3mBOagwE0TlBIqulhMlQg+5U8Sb/M3kHN48+qvWBkofZ6aY -MBzdLNvcGJVXZsb/XItW9XcCAwEAAaNjMGEwDwYDVR0TAQH/BAUwAwEB/zAfBgNV -HSMEGDAWgBT5YLvU49U09rj1BoAlp3PbRmmonjAdBgNVHQ4EFgQU+WC71OPVNPa4 -9QaAJadz20ZpqJ4wDgYDVR0PAQH/BAQDAgGGMA0GCSqGSIb3DQEBCwUAA4ICAQBW -s47LCp1Jjr+kxJG7ZhcFUZh1++VQLHqe8RT6q9OKPv+RKY9ji9i0qVQBDb6Thi/5 -Sm3HXvVX+cpVHBK+Rw82xd9qt9t1wkclf7nxY/hoLVUE0fKNsKTPvDxeH3jnpaAg -cLAExbf3cqfeIg29MyVGjGSSJuM+LmOW2puMPfgYCdcDzH2GguDKBAdRUNf/ktUM -79qGn5nX67evaOI5JpS6aLe/g9Pqemc9YmeuJeVy6OLk7K4S9ksrPJ/psEDzOFSz -/bdoyNrGj1E8svuR3Bznm53htw1yj+KkxKl4+esUrMZDBcJlOSgYAsOCsp0FvmXt -ll9ldDz7CTUue5wT/RsPXcdtgTpWD8w74a8CLyKsRspGPKAcTNZEtF4uXBVmCeEm -Kf7GUmG6sXP/wwyc5WxqlD8UykAWlYTzWamsX0xhk23RO8yilQwipmdnRC652dKK -QbNmC1r7fSOl8hqw/96bg5Qu0T/fkreRrwU7ZcegbLHNYhLDkBvjJc40vG93drEQ -w/cFGsDWr3RiSBd3kmmQYRzelYB0VI8YHMPzA9C/pEN1hlMYegouCRw2n5H9gooi -S9EOUCXdywMMF8mDAAhONU2Ki+3wApRmLER/y5UnlhetCTCstnEXbosX9hwJ1C07 -mKVx01QT2WDz9UtmT/rx7iASjbSsV7FFY6GsdqnC+w== ------END CERTIFICATE----- - -# Issuer: CN=SSL.com EV Root Certification Authority ECC O=SSL Corporation -# Subject: CN=SSL.com EV Root Certification Authority ECC O=SSL Corporation -# Label: "SSL.com EV Root Certification Authority ECC" -# Serial: 3182246526754555285 -# MD5 Fingerprint: 59:53:22:65:83:42:01:54:c0:ce:42:b9:5a:7c:f2:90 -# SHA1 Fingerprint: 4c:dd:51:a3:d1:f5:20:32:14:b0:c6:c5:32:23:03:91:c7:46:42:6d -# SHA256 Fingerprint: 22:a2:c1:f7:bd:ed:70:4c:c1:e7:01:b5:f4:08:c3:10:88:0f:e9:56:b5:de:2a:4a:44:f9:9c:87:3a:25:a7:c8 ------BEGIN CERTIFICATE----- -MIIClDCCAhqgAwIBAgIILCmcWxbtBZUwCgYIKoZIzj0EAwIwfzELMAkGA1UEBhMC -VVMxDjAMBgNVBAgMBVRleGFzMRAwDgYDVQQHDAdIb3VzdG9uMRgwFgYDVQQKDA9T -U0wgQ29ycG9yYXRpb24xNDAyBgNVBAMMK1NTTC5jb20gRVYgUm9vdCBDZXJ0aWZp -Y2F0aW9uIEF1dGhvcml0eSBFQ0MwHhcNMTYwMjEyMTgxNTIzWhcNNDEwMjEyMTgx -NTIzWjB/MQswCQYDVQQGEwJVUzEOMAwGA1UECAwFVGV4YXMxEDAOBgNVBAcMB0hv -dXN0b24xGDAWBgNVBAoMD1NTTCBDb3Jwb3JhdGlvbjE0MDIGA1UEAwwrU1NMLmNv -bSBFViBSb290IENlcnRpZmljYXRpb24gQXV0aG9yaXR5IEVDQzB2MBAGByqGSM49 -AgEGBSuBBAAiA2IABKoSR5CYG/vvw0AHgyBO8TCCogbR8pKGYfL2IWjKAMTH6kMA -VIbc/R/fALhBYlzccBYy3h+Z1MzFB8gIH2EWB1E9fVwHU+M1OIzfzZ/ZLg1Kthku -WnBaBu2+8KGwytAJKaNjMGEwHQYDVR0OBBYEFFvKXuXe0oGqzagtZFG22XKbl+ZP -MA8GA1UdEwEB/wQFMAMBAf8wHwYDVR0jBBgwFoAUW8pe5d7SgarNqC1kUbbZcpuX -5k8wDgYDVR0PAQH/BAQDAgGGMAoGCCqGSM49BAMCA2gAMGUCMQCK5kCJN+vp1RPZ -ytRrJPOwPYdGWBrssd9v+1a6cGvHOMzosYxPD/fxZ3YOg9AeUY8CMD32IygmTMZg -h5Mmm7I1HrrW9zzRHM76JTymGoEVW/MSD2zuZYrJh6j5B+BimoxcSg== ------END CERTIFICATE----- - -# Issuer: CN=GlobalSign O=GlobalSign OU=GlobalSign Root CA - R6 -# Subject: CN=GlobalSign O=GlobalSign OU=GlobalSign Root CA - R6 -# Label: "GlobalSign Root CA - R6" -# Serial: 1417766617973444989252670301619537 -# MD5 Fingerprint: 4f:dd:07:e4:d4:22:64:39:1e:0c:37:42:ea:d1:c6:ae -# SHA1 Fingerprint: 80:94:64:0e:b5:a7:a1:ca:11:9c:1f:dd:d5:9f:81:02:63:a7:fb:d1 -# SHA256 Fingerprint: 2c:ab:ea:fe:37:d0:6c:a2:2a:ba:73:91:c0:03:3d:25:98:29:52:c4:53:64:73:49:76:3a:3a:b5:ad:6c:cf:69 ------BEGIN CERTIFICATE----- -MIIFgzCCA2ugAwIBAgIORea7A4Mzw4VlSOb/RVEwDQYJKoZIhvcNAQEMBQAwTDEg -MB4GA1UECxMXR2xvYmFsU2lnbiBSb290IENBIC0gUjYxEzARBgNVBAoTCkdsb2Jh -bFNpZ24xEzARBgNVBAMTCkdsb2JhbFNpZ24wHhcNMTQxMjEwMDAwMDAwWhcNMzQx -MjEwMDAwMDAwWjBMMSAwHgYDVQQLExdHbG9iYWxTaWduIFJvb3QgQ0EgLSBSNjET -MBEGA1UEChMKR2xvYmFsU2lnbjETMBEGA1UEAxMKR2xvYmFsU2lnbjCCAiIwDQYJ -KoZIhvcNAQEBBQADggIPADCCAgoCggIBAJUH6HPKZvnsFMp7PPcNCPG0RQssgrRI -xutbPK6DuEGSMxSkb3/pKszGsIhrxbaJ0cay/xTOURQh7ErdG1rG1ofuTToVBu1k -ZguSgMpE3nOUTvOniX9PeGMIyBJQbUJmL025eShNUhqKGoC3GYEOfsSKvGRMIRxD -aNc9PIrFsmbVkJq3MQbFvuJtMgamHvm566qjuL++gmNQ0PAYid/kD3n16qIfKtJw -LnvnvJO7bVPiSHyMEAc4/2ayd2F+4OqMPKq0pPbzlUoSB239jLKJz9CgYXfIWHSw -1CM69106yqLbnQneXUQtkPGBzVeS+n68UARjNN9rkxi+azayOeSsJDa38O+2HBNX -k7besvjihbdzorg1qkXy4J02oW9UivFyVm4uiMVRQkQVlO6jxTiWm05OWgtH8wY2 -SXcwvHE35absIQh1/OZhFj931dmRl4QKbNQCTXTAFO39OfuD8l4UoQSwC+n+7o/h -bguyCLNhZglqsQY6ZZZZwPA1/cnaKI0aEYdwgQqomnUdnjqGBQCe24DWJfncBZ4n -WUx2OVvq+aWh2IMP0f/fMBH5hc8zSPXKbWQULHpYT9NLCEnFlWQaYw55PfWzjMpY -rZxCRXluDocZXFSxZba/jJvcE+kNb7gu3GduyYsRtYQUigAZcIN5kZeR1Bonvzce -MgfYFGM8KEyvAgMBAAGjYzBhMA4GA1UdDwEB/wQEAwIBBjAPBgNVHRMBAf8EBTAD -AQH/MB0GA1UdDgQWBBSubAWjkxPioufi1xzWx/B/yGdToDAfBgNVHSMEGDAWgBSu -bAWjkxPioufi1xzWx/B/yGdToDANBgkqhkiG9w0BAQwFAAOCAgEAgyXt6NH9lVLN -nsAEoJFp5lzQhN7craJP6Ed41mWYqVuoPId8AorRbrcWc+ZfwFSY1XS+wc3iEZGt -Ixg93eFyRJa0lV7Ae46ZeBZDE1ZXs6KzO7V33EByrKPrmzU+sQghoefEQzd5Mr61 -55wsTLxDKZmOMNOsIeDjHfrYBzN2VAAiKrlNIC5waNrlU/yDXNOd8v9EDERm8tLj -vUYAGm0CuiVdjaExUd1URhxN25mW7xocBFymFe944Hn+Xds+qkxV/ZoVqW/hpvvf -cDDpw+5CRu3CkwWJ+n1jez/QcYF8AOiYrg54NMMl+68KnyBr3TsTjxKM4kEaSHpz -oHdpx7Zcf4LIHv5YGygrqGytXm3ABdJ7t+uA/iU3/gKbaKxCXcPu9czc8FB10jZp -nOZ7BN9uBmm23goJSFmH63sUYHpkqmlD75HHTOwY3WzvUy2MmeFe8nI+z1TIvWfs -pA9MRf/TuTAjB0yPEL+GltmZWrSZVxykzLsViVO6LAUP5MSeGbEYNNVMnbrt9x+v -JJUEeKgDu+6B5dpffItKoZB0JaezPkvILFa9x8jvOOJckvB595yEunQtYQEgfn7R -8k8HWV+LLUNS60YMlOH1Zkd5d9VUWx+tJDfLRVpOoERIyNiwmcUVhAn21klJwGW4 -5hpxbqCo8YLoRT5s1gLXCmeDBVrJpBA= ------END CERTIFICATE----- - -# Issuer: CN=OISTE WISeKey Global Root GC CA O=WISeKey OU=OISTE Foundation Endorsed -# Subject: CN=OISTE WISeKey Global Root GC CA O=WISeKey OU=OISTE Foundation Endorsed -# Label: "OISTE WISeKey Global Root GC CA" -# Serial: 44084345621038548146064804565436152554 -# MD5 Fingerprint: a9:d6:b9:2d:2f:93:64:f8:a5:69:ca:91:e9:68:07:23 -# SHA1 Fingerprint: e0:11:84:5e:34:de:be:88:81:b9:9c:f6:16:26:d1:96:1f:c3:b9:31 -# SHA256 Fingerprint: 85:60:f9:1c:36:24:da:ba:95:70:b5:fe:a0:db:e3:6f:f1:1a:83:23:be:94:86:85:4f:b3:f3:4a:55:71:19:8d ------BEGIN CERTIFICATE----- -MIICaTCCAe+gAwIBAgIQISpWDK7aDKtARb8roi066jAKBggqhkjOPQQDAzBtMQsw -CQYDVQQGEwJDSDEQMA4GA1UEChMHV0lTZUtleTEiMCAGA1UECxMZT0lTVEUgRm91 -bmRhdGlvbiBFbmRvcnNlZDEoMCYGA1UEAxMfT0lTVEUgV0lTZUtleSBHbG9iYWwg -Um9vdCBHQyBDQTAeFw0xNzA1MDkwOTQ4MzRaFw00MjA1MDkwOTU4MzNaMG0xCzAJ -BgNVBAYTAkNIMRAwDgYDVQQKEwdXSVNlS2V5MSIwIAYDVQQLExlPSVNURSBGb3Vu -ZGF0aW9uIEVuZG9yc2VkMSgwJgYDVQQDEx9PSVNURSBXSVNlS2V5IEdsb2JhbCBS -b290IEdDIENBMHYwEAYHKoZIzj0CAQYFK4EEACIDYgAETOlQwMYPchi82PG6s4ni -eUqjFqdrVCTbUf/q9Akkwwsin8tqJ4KBDdLArzHkdIJuyiXZjHWd8dvQmqJLIX4W -p2OQ0jnUsYd4XxiWD1AbNTcPasbc2RNNpI6QN+a9WzGRo1QwUjAOBgNVHQ8BAf8E -BAMCAQYwDwYDVR0TAQH/BAUwAwEB/zAdBgNVHQ4EFgQUSIcUrOPDnpBgOtfKie7T -rYy0UGYwEAYJKwYBBAGCNxUBBAMCAQAwCgYIKoZIzj0EAwMDaAAwZQIwJsdpW9zV -57LnyAyMjMPdeYwbY9XJUpROTYJKcx6ygISpJcBMWm1JKWB4E+J+SOtkAjEA2zQg -Mgj/mkkCtojeFK9dbJlxjRo/i9fgojaGHAeCOnZT/cKi7e97sIBPWA9LUzm9 ------END CERTIFICATE----- - -# Issuer: CN=UCA Global G2 Root O=UniTrust -# Subject: CN=UCA Global G2 Root O=UniTrust -# Label: "UCA Global G2 Root" -# Serial: 124779693093741543919145257850076631279 -# MD5 Fingerprint: 80:fe:f0:c4:4a:f0:5c:62:32:9f:1c:ba:78:a9:50:f8 -# SHA1 Fingerprint: 28:f9:78:16:19:7a:ff:18:25:18:aa:44:fe:c1:a0:ce:5c:b6:4c:8a -# SHA256 Fingerprint: 9b:ea:11:c9:76:fe:01:47:64:c1:be:56:a6:f9:14:b5:a5:60:31:7a:bd:99:88:39:33:82:e5:16:1a:a0:49:3c ------BEGIN CERTIFICATE----- -MIIFRjCCAy6gAwIBAgIQXd+x2lqj7V2+WmUgZQOQ7zANBgkqhkiG9w0BAQsFADA9 -MQswCQYDVQQGEwJDTjERMA8GA1UECgwIVW5pVHJ1c3QxGzAZBgNVBAMMElVDQSBH -bG9iYWwgRzIgUm9vdDAeFw0xNjAzMTEwMDAwMDBaFw00MDEyMzEwMDAwMDBaMD0x -CzAJBgNVBAYTAkNOMREwDwYDVQQKDAhVbmlUcnVzdDEbMBkGA1UEAwwSVUNBIEds -b2JhbCBHMiBSb290MIICIjANBgkqhkiG9w0BAQEFAAOCAg8AMIICCgKCAgEAxeYr -b3zvJgUno4Ek2m/LAfmZmqkywiKHYUGRO8vDaBsGxUypK8FnFyIdK+35KYmToni9 -kmugow2ifsqTs6bRjDXVdfkX9s9FxeV67HeToI8jrg4aA3++1NDtLnurRiNb/yzm -VHqUwCoV8MmNsHo7JOHXaOIxPAYzRrZUEaalLyJUKlgNAQLx+hVRZ2zA+te2G3/R -VogvGjqNO7uCEeBHANBSh6v7hn4PJGtAnTRnvI3HLYZveT6OqTwXS3+wmeOwcWDc -C/Vkw85DvG1xudLeJ1uK6NjGruFZfc8oLTW4lVYa8bJYS7cSN8h8s+1LgOGN+jIj -tm+3SJUIsUROhYw6AlQgL9+/V087OpAh18EmNVQg7Mc/R+zvWr9LesGtOxdQXGLY -D0tK3Cv6brxzks3sx1DoQZbXqX5t2Okdj4q1uViSukqSKwxW/YDrCPBeKW4bHAyv -j5OJrdu9o54hyokZ7N+1wxrrFv54NkzWbtA+FxyQF2smuvt6L78RHBgOLXMDj6Dl -NaBa4kx1HXHhOThTeEDMg5PXCp6dW4+K5OXgSORIskfNTip1KnvyIvbJvgmRlld6 -iIis7nCs+dwp4wwcOxJORNanTrAmyPPZGpeRaOrvjUYG0lZFWJo8DA+DuAUlwznP -O6Q0ibd5Ei9Hxeepl2n8pndntd978XplFeRhVmUCAwEAAaNCMEAwDgYDVR0PAQH/ -BAQDAgEGMA8GA1UdEwEB/wQFMAMBAf8wHQYDVR0OBBYEFIHEjMz15DD/pQwIX4wV -ZyF0Ad/fMA0GCSqGSIb3DQEBCwUAA4ICAQATZSL1jiutROTL/7lo5sOASD0Ee/oj -L3rtNtqyzm325p7lX1iPyzcyochltq44PTUbPrw7tgTQvPlJ9Zv3hcU2tsu8+Mg5 -1eRfB70VVJd0ysrtT7q6ZHafgbiERUlMjW+i67HM0cOU2kTC5uLqGOiiHycFutfl -1qnN3e92mI0ADs0b+gO3joBYDic/UvuUospeZcnWhNq5NXHzJsBPd+aBJ9J3O5oU -b3n09tDh05S60FdRvScFDcH9yBIw7m+NESsIndTUv4BFFJqIRNow6rSn4+7vW4LV -PtateJLbXDzz2K36uGt/xDYotgIVilQsnLAXc47QN6MUPJiVAAwpBVueSUmxX8fj -y88nZY41F7dXyDDZQVu5FLbowg+UMaeUmMxq67XhJ/UQqAHojhJi6IjMtX9Gl8Cb -EGY4GjZGXyJoPd/JxhMnq1MGrKI8hgZlb7F+sSlEmqO6SWkoaY/X5V+tBIZkbxqg -DMUIYs6Ao9Dz7GjevjPHF1t/gMRMTLGmhIrDO7gJzRSBuhjjVFc2/tsvfEehOjPI -+Vg7RE+xygKJBJYoaMVLuCaJu9YzL1DV/pqJuhgyklTGW+Cd+V7lDSKb9triyCGy -YiGqhkCyLmTTX8jjfhFnRR8F/uOi77Oos/N9j/gMHyIfLXC0uAE0djAA5SN4p1bX -UB+K+wb1whnw0A== ------END CERTIFICATE----- - -# Issuer: CN=UCA Extended Validation Root O=UniTrust -# Subject: CN=UCA Extended Validation Root O=UniTrust -# Label: "UCA Extended Validation Root" -# Serial: 106100277556486529736699587978573607008 -# MD5 Fingerprint: a1:f3:5f:43:c6:34:9b:da:bf:8c:7e:05:53:ad:96:e2 -# SHA1 Fingerprint: a3:a1:b0:6f:24:61:23:4a:e3:36:a5:c2:37:fc:a6:ff:dd:f0:d7:3a -# SHA256 Fingerprint: d4:3a:f9:b3:54:73:75:5c:96:84:fc:06:d7:d8:cb:70:ee:5c:28:e7:73:fb:29:4e:b4:1e:e7:17:22:92:4d:24 ------BEGIN CERTIFICATE----- -MIIFWjCCA0KgAwIBAgIQT9Irj/VkyDOeTzRYZiNwYDANBgkqhkiG9w0BAQsFADBH -MQswCQYDVQQGEwJDTjERMA8GA1UECgwIVW5pVHJ1c3QxJTAjBgNVBAMMHFVDQSBF -eHRlbmRlZCBWYWxpZGF0aW9uIFJvb3QwHhcNMTUwMzEzMDAwMDAwWhcNMzgxMjMx -MDAwMDAwWjBHMQswCQYDVQQGEwJDTjERMA8GA1UECgwIVW5pVHJ1c3QxJTAjBgNV -BAMMHFVDQSBFeHRlbmRlZCBWYWxpZGF0aW9uIFJvb3QwggIiMA0GCSqGSIb3DQEB -AQUAA4ICDwAwggIKAoICAQCpCQcoEwKwmeBkqh5DFnpzsZGgdT6o+uM4AHrsiWog -D4vFsJszA1qGxliG1cGFu0/GnEBNyr7uaZa4rYEwmnySBesFK5pI0Lh2PpbIILvS -sPGP2KxFRv+qZ2C0d35qHzwaUnoEPQc8hQ2E0B92CvdqFN9y4zR8V05WAT558aop -O2z6+I9tTcg1367r3CTueUWnhbYFiN6IXSV8l2RnCdm/WhUFhvMJHuxYMjMR83dk -sHYf5BA1FxvyDrFspCqjc/wJHx4yGVMR59mzLC52LqGj3n5qiAno8geK+LLNEOfi -c0CTuwjRP+H8C5SzJe98ptfRr5//lpr1kXuYC3fUfugH0mK1lTnj8/FtDw5lhIpj -VMWAtuCeS31HJqcBCF3RiJ7XwzJE+oJKCmhUfzhTA8ykADNkUVkLo4KRel7sFsLz -KuZi2irbWWIQJUoqgQtHB0MGcIfS+pMRKXpITeuUx3BNr2fVUbGAIAEBtHoIppB/ -TuDvB0GHr2qlXov7z1CymlSvw4m6WC31MJixNnI5fkkE/SmnTHnkBVfblLkWU41G -sx2VYVdWf6/wFlthWG82UBEL2KwrlRYaDh8IzTY0ZRBiZtWAXxQgXy0MoHgKaNYs -1+lvK9JKBZP8nm9rZ/+I8U6laUpSNwXqxhaN0sSZ0YIrO7o1dfdRUVjzyAfd5LQD -fwIDAQABo0IwQDAdBgNVHQ4EFgQU2XQ65DA9DfcS3H5aBZ8eNJr34RQwDwYDVR0T -AQH/BAUwAwEB/zAOBgNVHQ8BAf8EBAMCAYYwDQYJKoZIhvcNAQELBQADggIBADaN -l8xCFWQpN5smLNb7rhVpLGsaGvdftvkHTFnq88nIua7Mui563MD1sC3AO6+fcAUR -ap8lTwEpcOPlDOHqWnzcSbvBHiqB9RZLcpHIojG5qtr8nR/zXUACE/xOHAbKsxSQ -VBcZEhrxH9cMaVr2cXj0lH2RC47skFSOvG+hTKv8dGT9cZr4QQehzZHkPJrgmzI5 -c6sq1WnIeJEmMX3ixzDx/BR4dxIOE/TdFpS/S2d7cFOFyrC78zhNLJA5wA3CXWvp -4uXViI3WLL+rG761KIcSF3Ru/H38j9CHJrAb+7lsq+KePRXBOy5nAliRn+/4Qh8s -t2j1da3Ptfb/EX3C8CSlrdP6oDyp+l3cpaDvRKS+1ujl5BOWF3sGPjLtx7dCvHaj -2GU4Kzg1USEODm8uNBNA4StnDG1KQTAYI1oyVZnJF+A83vbsea0rWBmirSwiGpWO -vpaQXUJXxPkUAzUrHC1RVwinOt4/5Mi0A3PCwSaAuwtCH60NryZy2sy+s6ODWA2C -xR9GUeOcGMyNm43sSet1UNWMKFnKdDTajAshqx7qG+XH/RU+wBeq+yNuJkbL+vmx -cmtpzyKEC2IPrNkZAJSidjzULZrtBJ4tBmIQN1IchXIbJ+XMxjHsN+xjWZsLHXbM -fjKaiJUINlK73nZfdklJrX+9ZSCyycErdhh2n1ax ------END CERTIFICATE----- - -# Issuer: CN=Certigna Root CA O=Dhimyotis OU=0002 48146308100036 -# Subject: CN=Certigna Root CA O=Dhimyotis OU=0002 48146308100036 -# Label: "Certigna Root CA" -# Serial: 269714418870597844693661054334862075617 -# MD5 Fingerprint: 0e:5c:30:62:27:eb:5b:bc:d7:ae:62:ba:e9:d5:df:77 -# SHA1 Fingerprint: 2d:0d:52:14:ff:9e:ad:99:24:01:74:20:47:6e:6c:85:27:27:f5:43 -# SHA256 Fingerprint: d4:8d:3d:23:ee:db:50:a4:59:e5:51:97:60:1c:27:77:4b:9d:7b:18:c9:4d:5a:05:95:11:a1:02:50:b9:31:68 ------BEGIN CERTIFICATE----- -MIIGWzCCBEOgAwIBAgIRAMrpG4nxVQMNo+ZBbcTjpuEwDQYJKoZIhvcNAQELBQAw -WjELMAkGA1UEBhMCRlIxEjAQBgNVBAoMCURoaW15b3RpczEcMBoGA1UECwwTMDAw -MiA0ODE0NjMwODEwMDAzNjEZMBcGA1UEAwwQQ2VydGlnbmEgUm9vdCBDQTAeFw0x -MzEwMDEwODMyMjdaFw0zMzEwMDEwODMyMjdaMFoxCzAJBgNVBAYTAkZSMRIwEAYD -VQQKDAlEaGlteW90aXMxHDAaBgNVBAsMEzAwMDIgNDgxNDYzMDgxMDAwMzYxGTAX -BgNVBAMMEENlcnRpZ25hIFJvb3QgQ0EwggIiMA0GCSqGSIb3DQEBAQUAA4ICDwAw -ggIKAoICAQDNGDllGlmx6mQWDoyUJJV8g9PFOSbcDO8WV43X2KyjQn+Cyu3NW9sO -ty3tRQgXstmzy9YXUnIo245Onoq2C/mehJpNdt4iKVzSs9IGPjA5qXSjklYcoW9M -CiBtnyN6tMbaLOQdLNyzKNAT8kxOAkmhVECe5uUFoC2EyP+YbNDrihqECB63aCPu -I9Vwzm1RaRDuoXrC0SIxwoKF0vJVdlB8JXrJhFwLrN1CTivngqIkicuQstDuI7pm -TLtipPlTWmR7fJj6o0ieD5Wupxj0auwuA0Wv8HT4Ks16XdG+RCYyKfHx9WzMfgIh -C59vpD++nVPiz32pLHxYGpfhPTc3GGYo0kDFUYqMwy3OU4gkWGQwFsWq4NYKpkDf -ePb1BHxpE4S80dGnBs8B92jAqFe7OmGtBIyT46388NtEbVncSVmurJqZNjBBe3Yz -IoejwpKGbvlw7q6Hh5UbxHq9MfPU0uWZ/75I7HX1eBYdpnDBfzwboZL7z8g81sWT -Co/1VTp2lc5ZmIoJlXcymoO6LAQ6l73UL77XbJuiyn1tJslV1c/DeVIICZkHJC1k -JWumIWmbat10TWuXekG9qxf5kBdIjzb5LdXF2+6qhUVB+s06RbFo5jZMm5BX7CO5 -hwjCxAnxl4YqKE3idMDaxIzb3+KhF1nOJFl0Mdp//TBt2dzhauH8XwIDAQABo4IB -GjCCARYwDwYDVR0TAQH/BAUwAwEB/zAOBgNVHQ8BAf8EBAMCAQYwHQYDVR0OBBYE -FBiHVuBud+4kNTxOc5of1uHieX4rMB8GA1UdIwQYMBaAFBiHVuBud+4kNTxOc5of -1uHieX4rMEQGA1UdIAQ9MDswOQYEVR0gADAxMC8GCCsGAQUFBwIBFiNodHRwczov -L3d3d3cuY2VydGlnbmEuZnIvYXV0b3JpdGVzLzBtBgNVHR8EZjBkMC+gLaArhilo -dHRwOi8vY3JsLmNlcnRpZ25hLmZyL2NlcnRpZ25hcm9vdGNhLmNybDAxoC+gLYYr -aHR0cDovL2NybC5kaGlteW90aXMuY29tL2NlcnRpZ25hcm9vdGNhLmNybDANBgkq -hkiG9w0BAQsFAAOCAgEAlLieT/DjlQgi581oQfccVdV8AOItOoldaDgvUSILSo3L -6btdPrtcPbEo/uRTVRPPoZAbAh1fZkYJMyjhDSSXcNMQH+pkV5a7XdrnxIxPTGRG -HVyH41neQtGbqH6mid2PHMkwgu07nM3A6RngatgCdTer9zQoKJHyBApPNeNgJgH6 -0BGM+RFq7q89w1DTj18zeTyGqHNFkIwgtnJzFyO+B2XleJINugHA64wcZr+shncB -lA2c5uk5jR+mUYyZDDl34bSb+hxnV29qao6pK0xXeXpXIs/NX2NGjVxZOob4Mkdi -o2cNGJHc+6Zr9UhhcyNZjgKnvETq9Emd8VRY+WCv2hikLyhF3HqgiIZd8zvn/yk1 -gPxkQ5Tm4xxvvq0OKmOZK8l+hfZx6AYDlf7ej0gcWtSS6Cvu5zHbugRqh5jnxV/v -faci9wHYTfmJ0A6aBVmknpjZbyvKcL5kwlWj9Omvw5Ip3IgWJJk8jSaYtlu3zM63 -Nwf9JtmYhST/WSMDmu2dnajkXjjO11INb9I/bbEFa0nOipFGc/T2L/Coc3cOZayh -jWZSaX5LaAzHHjcng6WMxwLkFM1JAbBzs/3GkDpv0mztO+7skb6iQ12LAEpmJURw -3kAP+HwV96LOPNdeE4yBFxgX0b3xdxA61GU5wSesVywlVP+i2k+KYTlerj1KjL0= ------END CERTIFICATE----- - -# Issuer: CN=emSign Root CA - G1 O=eMudhra Technologies Limited OU=emSign PKI -# Subject: CN=emSign Root CA - G1 O=eMudhra Technologies Limited OU=emSign PKI -# Label: "emSign Root CA - G1" -# Serial: 235931866688319308814040 -# MD5 Fingerprint: 9c:42:84:57:dd:cb:0b:a7:2e:95:ad:b6:f3:da:bc:ac -# SHA1 Fingerprint: 8a:c7:ad:8f:73:ac:4e:c1:b5:75:4d:a5:40:f4:fc:cf:7c:b5:8e:8c -# SHA256 Fingerprint: 40:f6:af:03:46:a9:9a:a1:cd:1d:55:5a:4e:9c:ce:62:c7:f9:63:46:03:ee:40:66:15:83:3d:c8:c8:d0:03:67 ------BEGIN CERTIFICATE----- -MIIDlDCCAnygAwIBAgIKMfXkYgxsWO3W2DANBgkqhkiG9w0BAQsFADBnMQswCQYD -VQQGEwJJTjETMBEGA1UECxMKZW1TaWduIFBLSTElMCMGA1UEChMcZU11ZGhyYSBU -ZWNobm9sb2dpZXMgTGltaXRlZDEcMBoGA1UEAxMTZW1TaWduIFJvb3QgQ0EgLSBH -MTAeFw0xODAyMTgxODMwMDBaFw00MzAyMTgxODMwMDBaMGcxCzAJBgNVBAYTAklO -MRMwEQYDVQQLEwplbVNpZ24gUEtJMSUwIwYDVQQKExxlTXVkaHJhIFRlY2hub2xv -Z2llcyBMaW1pdGVkMRwwGgYDVQQDExNlbVNpZ24gUm9vdCBDQSAtIEcxMIIBIjAN -BgkqhkiG9w0BAQEFAAOCAQ8AMIIBCgKCAQEAk0u76WaK7p1b1TST0Bsew+eeuGQz -f2N4aLTNLnF115sgxk0pvLZoYIr3IZpWNVrzdr3YzZr/k1ZLpVkGoZM0Kd0WNHVO -8oG0x5ZOrRkVUkr+PHB1cM2vK6sVmjM8qrOLqs1D/fXqcP/tzxE7lM5OMhbTI0Aq -d7OvPAEsbO2ZLIvZTmmYsvePQbAyeGHWDV/D+qJAkh1cF+ZwPjXnorfCYuKrpDhM -tTk1b+oDafo6VGiFbdbyL0NVHpENDtjVaqSW0RM8LHhQ6DqS0hdW5TUaQBw+jSzt -Od9C4INBdN+jzcKGYEho42kLVACL5HZpIQ15TjQIXhTCzLG3rdd8cIrHhQIDAQAB -o0IwQDAdBgNVHQ4EFgQU++8Nhp6w492pufEhF38+/PB3KxowDgYDVR0PAQH/BAQD -AgEGMA8GA1UdEwEB/wQFMAMBAf8wDQYJKoZIhvcNAQELBQADggEBAFn/8oz1h31x -PaOfG1vR2vjTnGs2vZupYeveFix0PZ7mddrXuqe8QhfnPZHr5X3dPpzxz5KsbEjM -wiI/aTvFthUvozXGaCocV685743QNcMYDHsAVhzNixl03r4PEuDQqqE/AjSxcM6d -GNYIAwlG7mDgfrbESQRRfXBgvKqy/3lyeqYdPV8q+Mri/Tm3R7nrft8EI6/6nAYH -6ftjk4BAtcZsCjEozgyfz7MjNYBBjWzEN3uBL4ChQEKF6dk4jeihU80Bv2noWgby -RQuQ+q7hv53yrlc8pa6yVvSLZUDp/TGBLPQ5Cdjua6e0ph0VpZj3AYHYhX3zUVxx -iN66zB+Afko= ------END CERTIFICATE----- - -# Issuer: CN=emSign ECC Root CA - G3 O=eMudhra Technologies Limited OU=emSign PKI -# Subject: CN=emSign ECC Root CA - G3 O=eMudhra Technologies Limited OU=emSign PKI -# Label: "emSign ECC Root CA - G3" -# Serial: 287880440101571086945156 -# MD5 Fingerprint: ce:0b:72:d1:9f:88:8e:d0:50:03:e8:e3:b8:8b:67:40 -# SHA1 Fingerprint: 30:43:fa:4f:f2:57:dc:a0:c3:80:ee:2e:58:ea:78:b2:3f:e6:bb:c1 -# SHA256 Fingerprint: 86:a1:ec:ba:08:9c:4a:8d:3b:be:27:34:c6:12:ba:34:1d:81:3e:04:3c:f9:e8:a8:62:cd:5c:57:a3:6b:be:6b ------BEGIN CERTIFICATE----- -MIICTjCCAdOgAwIBAgIKPPYHqWhwDtqLhDAKBggqhkjOPQQDAzBrMQswCQYDVQQG -EwJJTjETMBEGA1UECxMKZW1TaWduIFBLSTElMCMGA1UEChMcZU11ZGhyYSBUZWNo -bm9sb2dpZXMgTGltaXRlZDEgMB4GA1UEAxMXZW1TaWduIEVDQyBSb290IENBIC0g -RzMwHhcNMTgwMjE4MTgzMDAwWhcNNDMwMjE4MTgzMDAwWjBrMQswCQYDVQQGEwJJ -TjETMBEGA1UECxMKZW1TaWduIFBLSTElMCMGA1UEChMcZU11ZGhyYSBUZWNobm9s -b2dpZXMgTGltaXRlZDEgMB4GA1UEAxMXZW1TaWduIEVDQyBSb290IENBIC0gRzMw -djAQBgcqhkjOPQIBBgUrgQQAIgNiAAQjpQy4LRL1KPOxst3iAhKAnjlfSU2fySU0 -WXTsuwYc58Byr+iuL+FBVIcUqEqy6HyC5ltqtdyzdc6LBtCGI79G1Y4PPwT01xyS -fvalY8L1X44uT6EYGQIrMgqCZH0Wk9GjQjBAMB0GA1UdDgQWBBR8XQKEE9TMipuB -zhccLikenEhjQjAOBgNVHQ8BAf8EBAMCAQYwDwYDVR0TAQH/BAUwAwEB/zAKBggq -hkjOPQQDAwNpADBmAjEAvvNhzwIQHWSVB7gYboiFBS+DCBeQyh+KTOgNG3qxrdWB -CUfvO6wIBHxcmbHtRwfSAjEAnbpV/KlK6O3t5nYBQnvI+GDZjVGLVTv7jHvrZQnD -+JbNR6iC8hZVdyR+EhCVBCyj ------END CERTIFICATE----- - -# Issuer: CN=emSign Root CA - C1 O=eMudhra Inc OU=emSign PKI -# Subject: CN=emSign Root CA - C1 O=eMudhra Inc OU=emSign PKI -# Label: "emSign Root CA - C1" -# Serial: 825510296613316004955058 -# MD5 Fingerprint: d8:e3:5d:01:21:fa:78:5a:b0:df:ba:d2:ee:2a:5f:68 -# SHA1 Fingerprint: e7:2e:f1:df:fc:b2:09:28:cf:5d:d4:d5:67:37:b1:51:cb:86:4f:01 -# SHA256 Fingerprint: 12:56:09:aa:30:1d:a0:a2:49:b9:7a:82:39:cb:6a:34:21:6f:44:dc:ac:9f:39:54:b1:42:92:f2:e8:c8:60:8f ------BEGIN CERTIFICATE----- -MIIDczCCAlugAwIBAgILAK7PALrEzzL4Q7IwDQYJKoZIhvcNAQELBQAwVjELMAkG -A1UEBhMCVVMxEzARBgNVBAsTCmVtU2lnbiBQS0kxFDASBgNVBAoTC2VNdWRocmEg -SW5jMRwwGgYDVQQDExNlbVNpZ24gUm9vdCBDQSAtIEMxMB4XDTE4MDIxODE4MzAw -MFoXDTQzMDIxODE4MzAwMFowVjELMAkGA1UEBhMCVVMxEzARBgNVBAsTCmVtU2ln -biBQS0kxFDASBgNVBAoTC2VNdWRocmEgSW5jMRwwGgYDVQQDExNlbVNpZ24gUm9v -dCBDQSAtIEMxMIIBIjANBgkqhkiG9w0BAQEFAAOCAQ8AMIIBCgKCAQEAz+upufGZ -BczYKCFK83M0UYRWEPWgTywS4/oTmifQz/l5GnRfHXk5/Fv4cI7gklL35CX5VIPZ -HdPIWoU/Xse2B+4+wM6ar6xWQio5JXDWv7V7Nq2s9nPczdcdioOl+yuQFTdrHCZH -3DspVpNqs8FqOp099cGXOFgFixwR4+S0uF2FHYP+eF8LRWgYSKVGczQ7/g/IdrvH -GPMF0Ybzhe3nudkyrVWIzqa2kbBPrH4VI5b2P/AgNBbeCsbEBEV5f6f9vtKppa+c -xSMq9zwhbL2vj07FOrLzNBL834AaSaTUqZX3noleoomslMuoaJuvimUnzYnu3Yy1 -aylwQ6BpC+S5DwIDAQABo0IwQDAdBgNVHQ4EFgQU/qHgcB4qAzlSWkK+XJGFehiq -TbUwDgYDVR0PAQH/BAQDAgEGMA8GA1UdEwEB/wQFMAMBAf8wDQYJKoZIhvcNAQEL -BQADggEBAMJKVvoVIXsoounlHfv4LcQ5lkFMOycsxGwYFYDGrK9HWS8mC+M2sO87 -/kOXSTKZEhVb3xEp/6tT+LvBeA+snFOvV71ojD1pM/CjoCNjO2RnIkSt1XHLVip4 -kqNPEjE2NuLe/gDEo2APJ62gsIq1NnpSob0n9CAnYuhNlCQT5AoE6TyrLshDCUrG -YQTlSTR+08TI9Q/Aqum6VF7zYytPT1DU/rl7mYw9wC68AivTxEDkigcxHpvOJpkT -+xHqmiIMERnHXhuBUDDIlhJu58tBf5E7oke3VIAb3ADMmpDqw8NQBmIMMMAVSKeo -WXzhriKi4gp6D/piq1JM4fHfyr6DDUI= ------END CERTIFICATE----- - -# Issuer: CN=emSign ECC Root CA - C3 O=eMudhra Inc OU=emSign PKI -# Subject: CN=emSign ECC Root CA - C3 O=eMudhra Inc OU=emSign PKI -# Label: "emSign ECC Root CA - C3" -# Serial: 582948710642506000014504 -# MD5 Fingerprint: 3e:53:b3:a3:81:ee:d7:10:f8:d3:b0:1d:17:92:f5:d5 -# SHA1 Fingerprint: b6:af:43:c2:9b:81:53:7d:f6:ef:6b:c3:1f:1f:60:15:0c:ee:48:66 -# SHA256 Fingerprint: bc:4d:80:9b:15:18:9d:78:db:3e:1d:8c:f4:f9:72:6a:79:5d:a1:64:3c:a5:f1:35:8e:1d:db:0e:dc:0d:7e:b3 ------BEGIN CERTIFICATE----- -MIICKzCCAbGgAwIBAgIKe3G2gla4EnycqDAKBggqhkjOPQQDAzBaMQswCQYDVQQG -EwJVUzETMBEGA1UECxMKZW1TaWduIFBLSTEUMBIGA1UEChMLZU11ZGhyYSBJbmMx -IDAeBgNVBAMTF2VtU2lnbiBFQ0MgUm9vdCBDQSAtIEMzMB4XDTE4MDIxODE4MzAw -MFoXDTQzMDIxODE4MzAwMFowWjELMAkGA1UEBhMCVVMxEzARBgNVBAsTCmVtU2ln -biBQS0kxFDASBgNVBAoTC2VNdWRocmEgSW5jMSAwHgYDVQQDExdlbVNpZ24gRUND -IFJvb3QgQ0EgLSBDMzB2MBAGByqGSM49AgEGBSuBBAAiA2IABP2lYa57JhAd6bci -MK4G9IGzsUJxlTm801Ljr6/58pc1kjZGDoeVjbk5Wum739D+yAdBPLtVb4Ojavti -sIGJAnB9SMVK4+kiVCJNk7tCDK93nCOmfddhEc5lx/h//vXyqaNCMEAwHQYDVR0O -BBYEFPtaSNCAIEDyqOkAB2kZd6fmw/TPMA4GA1UdDwEB/wQEAwIBBjAPBgNVHRMB -Af8EBTADAQH/MAoGCCqGSM49BAMDA2gAMGUCMQC02C8Cif22TGK6Q04ThHK1rt0c -3ta13FaPWEBaLd4gTCKDypOofu4SQMfWh0/434UCMBwUZOR8loMRnLDRWmFLpg9J -0wD8ofzkpf9/rdcw0Md3f76BB1UwUCAU9Vc4CqgxUQ== ------END CERTIFICATE----- - -# Issuer: CN=Hongkong Post Root CA 3 O=Hongkong Post -# Subject: CN=Hongkong Post Root CA 3 O=Hongkong Post -# Label: "Hongkong Post Root CA 3" -# Serial: 46170865288971385588281144162979347873371282084 -# MD5 Fingerprint: 11:fc:9f:bd:73:30:02:8a:fd:3f:f3:58:b9:cb:20:f0 -# SHA1 Fingerprint: 58:a2:d0:ec:20:52:81:5b:c1:f3:f8:64:02:24:4e:c2:8e:02:4b:02 -# SHA256 Fingerprint: 5a:2f:c0:3f:0c:83:b0:90:bb:fa:40:60:4b:09:88:44:6c:76:36:18:3d:f9:84:6e:17:10:1a:44:7f:b8:ef:d6 ------BEGIN CERTIFICATE----- -MIIFzzCCA7egAwIBAgIUCBZfikyl7ADJk0DfxMauI7gcWqQwDQYJKoZIhvcNAQEL -BQAwbzELMAkGA1UEBhMCSEsxEjAQBgNVBAgTCUhvbmcgS29uZzESMBAGA1UEBxMJ -SG9uZyBLb25nMRYwFAYDVQQKEw1Ib25na29uZyBQb3N0MSAwHgYDVQQDExdIb25n -a29uZyBQb3N0IFJvb3QgQ0EgMzAeFw0xNzA2MDMwMjI5NDZaFw00MjA2MDMwMjI5 -NDZaMG8xCzAJBgNVBAYTAkhLMRIwEAYDVQQIEwlIb25nIEtvbmcxEjAQBgNVBAcT -CUhvbmcgS29uZzEWMBQGA1UEChMNSG9uZ2tvbmcgUG9zdDEgMB4GA1UEAxMXSG9u -Z2tvbmcgUG9zdCBSb290IENBIDMwggIiMA0GCSqGSIb3DQEBAQUAA4ICDwAwggIK -AoICAQCziNfqzg8gTr7m1gNt7ln8wlffKWihgw4+aMdoWJwcYEuJQwy51BWy7sFO -dem1p+/l6TWZ5Mwc50tfjTMwIDNT2aa71T4Tjukfh0mtUC1Qyhi+AViiE3CWu4mI -VoBc+L0sPOFMV4i707mV78vH9toxdCim5lSJ9UExyuUmGs2C4HDaOym71QP1mbpV -9WTRYA6ziUm4ii8F0oRFKHyPaFASePwLtVPLwpgchKOesL4jpNrcyCse2m5FHomY -2vkALgbpDDtw1VAliJnLzXNg99X/NWfFobxeq81KuEXryGgeDQ0URhLj0mRiikKY -vLTGCAj4/ahMZJx2Ab0vqWwzD9g/KLg8aQFChn5pwckGyuV6RmXpwtZQQS4/t+Tt -bNe/JgERohYpSms0BpDsE9K2+2p20jzt8NYt3eEV7KObLyzJPivkaTv/ciWxNoZb -x39ri1UbSsUgYT2uy1DhCDq+sI9jQVMwCFk8mB13umOResoQUGC/8Ne8lYePl8X+ -l2oBlKN8W4UdKjk60FSh0Tlxnf0h+bV78OLgAo9uliQlLKAeLKjEiafv7ZkGL7YK -TE/bosw3Gq9HhS2KX8Q0NEwA/RiTZxPRN+ZItIsGxVd7GYYKecsAyVKvQv83j+Gj -Hno9UKtjBucVtT+2RTeUN7F+8kjDf8V1/peNRY8apxpyKBpADwIDAQABo2MwYTAP -BgNVHRMBAf8EBTADAQH/MA4GA1UdDwEB/wQEAwIBBjAfBgNVHSMEGDAWgBQXnc0e -i9Y5K3DTXNSguB+wAPzFYTAdBgNVHQ4EFgQUF53NHovWOStw01zUoLgfsAD8xWEw -DQYJKoZIhvcNAQELBQADggIBAFbVe27mIgHSQpsY1Q7XZiNc4/6gx5LS6ZStS6LG -7BJ8dNVI0lkUmcDrudHr9EgwW62nV3OZqdPlt9EuWSRY3GguLmLYauRwCy0gUCCk -MpXRAJi70/33MvJJrsZ64Ee+bs7Lo3I6LWldy8joRTnU+kLBEUx3XZL7av9YROXr -gZ6voJmtvqkBZss4HTzfQx/0TW60uhdG/H39h4F5ag0zD/ov+BS5gLNdTaqX4fnk -GMX41TiMJjz98iji7lpJiCzfeT2OnpA8vUFKOt1b9pq0zj8lMH8yfaIDlNDceqFS -3m6TjRgm/VWsvY+b0s+v54Ysyx8Jb6NvqYTUc79NoXQbTiNg8swOqn+knEwlqLJm -Ozj/2ZQw9nKEvmhVEA/GcywWaZMH/rFF7buiVWqw2rVKAiUnhde3t4ZEFolsgCs+ -l6mc1X5VTMbeRRAc6uk7nwNT7u56AQIWeNTowr5GdogTPyK7SBIdUgC0An4hGh6c -JfTzPV4e0hz5sy229zdcxsshTrD3mUcYhcErulWuBurQB7Lcq9CClnXO0lD+mefP -L5/ndtFhKvshuzHQqp9HpLIiyhY6UFfEW0NnxWViA0kB60PZ2Pierc+xYw5F9KBa -LJstxabArahH9CdMOA0uG0k7UvToiIMrVCjU8jVStDKDYmlkDJGcn5fqdBb9HxEG -mpv0 ------END CERTIFICATE----- - -# Issuer: CN=Entrust Root Certification Authority - G4 O=Entrust, Inc. OU=See www.entrust.net/legal-terms/(c) 2015 Entrust, Inc. - for authorized use only -# Subject: CN=Entrust Root Certification Authority - G4 O=Entrust, Inc. OU=See www.entrust.net/legal-terms/(c) 2015 Entrust, Inc. - for authorized use only -# Label: "Entrust Root Certification Authority - G4" -# Serial: 289383649854506086828220374796556676440 -# MD5 Fingerprint: 89:53:f1:83:23:b7:7c:8e:05:f1:8c:71:38:4e:1f:88 -# SHA1 Fingerprint: 14:88:4e:86:26:37:b0:26:af:59:62:5c:40:77:ec:35:29:ba:96:01 -# SHA256 Fingerprint: db:35:17:d1:f6:73:2a:2d:5a:b9:7c:53:3e:c7:07:79:ee:32:70:a6:2f:b4:ac:42:38:37:24:60:e6:f0:1e:88 ------BEGIN CERTIFICATE----- -MIIGSzCCBDOgAwIBAgIRANm1Q3+vqTkPAAAAAFVlrVgwDQYJKoZIhvcNAQELBQAw -gb4xCzAJBgNVBAYTAlVTMRYwFAYDVQQKEw1FbnRydXN0LCBJbmMuMSgwJgYDVQQL -Ex9TZWUgd3d3LmVudHJ1c3QubmV0L2xlZ2FsLXRlcm1zMTkwNwYDVQQLEzAoYykg -MjAxNSBFbnRydXN0LCBJbmMuIC0gZm9yIGF1dGhvcml6ZWQgdXNlIG9ubHkxMjAw -BgNVBAMTKUVudHJ1c3QgUm9vdCBDZXJ0aWZpY2F0aW9uIEF1dGhvcml0eSAtIEc0 -MB4XDTE1MDUyNzExMTExNloXDTM3MTIyNzExNDExNlowgb4xCzAJBgNVBAYTAlVT -MRYwFAYDVQQKEw1FbnRydXN0LCBJbmMuMSgwJgYDVQQLEx9TZWUgd3d3LmVudHJ1 -c3QubmV0L2xlZ2FsLXRlcm1zMTkwNwYDVQQLEzAoYykgMjAxNSBFbnRydXN0LCBJ -bmMuIC0gZm9yIGF1dGhvcml6ZWQgdXNlIG9ubHkxMjAwBgNVBAMTKUVudHJ1c3Qg -Um9vdCBDZXJ0aWZpY2F0aW9uIEF1dGhvcml0eSAtIEc0MIICIjANBgkqhkiG9w0B -AQEFAAOCAg8AMIICCgKCAgEAsewsQu7i0TD/pZJH4i3DumSXbcr3DbVZwbPLqGgZ -2K+EbTBwXX7zLtJTmeH+H17ZSK9dE43b/2MzTdMAArzE+NEGCJR5WIoV3imz/f3E -T+iq4qA7ec2/a0My3dl0ELn39GjUu9CH1apLiipvKgS1sqbHoHrmSKvS0VnM1n4j -5pds8ELl3FFLFUHtSUrJ3hCX1nbB76W1NhSXNdh4IjVS70O92yfbYVaCNNzLiGAM -C1rlLAHGVK/XqsEQe9IFWrhAnoanw5CGAlZSCXqc0ieCU0plUmr1POeo8pyvi73T -DtTUXm6Hnmo9RR3RXRv06QqsYJn7ibT/mCzPfB3pAqoEmh643IhuJbNsZvc8kPNX -wbMv9W3y+8qh+CmdRouzavbmZwe+LGcKKh9asj5XxNMhIWNlUpEbsZmOeX7m640A -2Vqq6nPopIICR5b+W45UYaPrL0swsIsjdXJ8ITzI9vF01Bx7owVV7rtNOzK+mndm -nqxpkCIHH2E6lr7lmk/MBTwoWdPBDFSoWWG9yHJM6Nyfh3+9nEg2XpWjDrk4JFX8 -dWbrAuMINClKxuMrLzOg2qOGpRKX/YAr2hRC45K9PvJdXmd0LhyIRyk0X+IyqJwl -N4y6mACXi0mWHv0liqzc2thddG5msP9E36EYxr5ILzeUePiVSj9/E15dWf10hkNj -c0kCAwEAAaNCMEAwDwYDVR0TAQH/BAUwAwEB/zAOBgNVHQ8BAf8EBAMCAQYwHQYD -VR0OBBYEFJ84xFYjwznooHFs6FRM5Og6sb9nMA0GCSqGSIb3DQEBCwUAA4ICAQAS -5UKme4sPDORGpbZgQIeMJX6tuGguW8ZAdjwD+MlZ9POrYs4QjbRaZIxowLByQzTS -Gwv2LFPSypBLhmb8qoMi9IsabyZIrHZ3CL/FmFz0Jomee8O5ZDIBf9PD3Vht7LGr -hFV0d4QEJ1JrhkzO3bll/9bGXp+aEJlLdWr+aumXIOTkdnrG0CSqkM0gkLpHZPt/ -B7NTeLUKYvJzQ85BK4FqLoUWlFPUa19yIqtRLULVAJyZv967lDtX/Zr1hstWO1uI -AeV8KEsD+UmDfLJ/fOPtjqF/YFOOVZ1QNBIPt5d7bIdKROf1beyAN/BYGW5KaHbw -H5Lk6rWS02FREAutp9lfx1/cH6NcjKF+m7ee01ZvZl4HliDtC3T7Zk6LERXpgUl+ -b7DUUH8i119lAg2m9IUe2K4GS0qn0jFmwvjO5QimpAKWRGhXxNUzzxkvFMSUHHuk -2fCfDrGA4tGeEWSpiBE6doLlYsKA2KSD7ZPvfC+QsDJMlhVoSFLUmQjAJOgc47Ol -IQ6SwJAfzyBfyjs4x7dtOvPmRLgOMWuIjnDrnBdSqEGULoe256YSxXXfW8AKbnuk -5F6G+TaU33fD6Q3AOfF5u0aOq0NZJ7cguyPpVkAh7DE9ZapD8j3fcEThuk0mEDuY -n/PIjhs4ViFqUZPTkcpG2om3PVODLAgfi49T3f+sHw== ------END CERTIFICATE----- - -# Issuer: CN=Microsoft ECC Root Certificate Authority 2017 O=Microsoft Corporation -# Subject: CN=Microsoft ECC Root Certificate Authority 2017 O=Microsoft Corporation -# Label: "Microsoft ECC Root Certificate Authority 2017" -# Serial: 136839042543790627607696632466672567020 -# MD5 Fingerprint: dd:a1:03:e6:4a:93:10:d1:bf:f0:19:42:cb:fe:ed:67 -# SHA1 Fingerprint: 99:9a:64:c3:7f:f4:7d:9f:ab:95:f1:47:69:89:14:60:ee:c4:c3:c5 -# SHA256 Fingerprint: 35:8d:f3:9d:76:4a:f9:e1:b7:66:e9:c9:72:df:35:2e:e1:5c:fa:c2:27:af:6a:d1:d7:0e:8e:4a:6e:dc:ba:02 ------BEGIN CERTIFICATE----- -MIICWTCCAd+gAwIBAgIQZvI9r4fei7FK6gxXMQHC7DAKBggqhkjOPQQDAzBlMQsw -CQYDVQQGEwJVUzEeMBwGA1UEChMVTWljcm9zb2Z0IENvcnBvcmF0aW9uMTYwNAYD -VQQDEy1NaWNyb3NvZnQgRUNDIFJvb3QgQ2VydGlmaWNhdGUgQXV0aG9yaXR5IDIw -MTcwHhcNMTkxMjE4MjMwNjQ1WhcNNDIwNzE4MjMxNjA0WjBlMQswCQYDVQQGEwJV -UzEeMBwGA1UEChMVTWljcm9zb2Z0IENvcnBvcmF0aW9uMTYwNAYDVQQDEy1NaWNy -b3NvZnQgRUNDIFJvb3QgQ2VydGlmaWNhdGUgQXV0aG9yaXR5IDIwMTcwdjAQBgcq -hkjOPQIBBgUrgQQAIgNiAATUvD0CQnVBEyPNgASGAlEvaqiBYgtlzPbKnR5vSmZR -ogPZnZH6thaxjG7efM3beaYvzrvOcS/lpaso7GMEZpn4+vKTEAXhgShC48Zo9OYb -hGBKia/teQ87zvH2RPUBeMCjVDBSMA4GA1UdDwEB/wQEAwIBhjAPBgNVHRMBAf8E -BTADAQH/MB0GA1UdDgQWBBTIy5lycFIM+Oa+sgRXKSrPQhDtNTAQBgkrBgEEAYI3 -FQEEAwIBADAKBggqhkjOPQQDAwNoADBlAjBY8k3qDPlfXu5gKcs68tvWMoQZP3zV -L8KxzJOuULsJMsbG7X7JNpQS5GiFBqIb0C8CMQCZ6Ra0DvpWSNSkMBaReNtUjGUB -iudQZsIxtzm6uBoiB078a1QWIP8rtedMDE2mT3M= ------END CERTIFICATE----- - -# Issuer: CN=Microsoft RSA Root Certificate Authority 2017 O=Microsoft Corporation -# Subject: CN=Microsoft RSA Root Certificate Authority 2017 O=Microsoft Corporation -# Label: "Microsoft RSA Root Certificate Authority 2017" -# Serial: 40975477897264996090493496164228220339 -# MD5 Fingerprint: 10:ff:00:ff:cf:c9:f8:c7:7a:c0:ee:35:8e:c9:0f:47 -# SHA1 Fingerprint: 73:a5:e6:4a:3b:ff:83:16:ff:0e:dc:cc:61:8a:90:6e:4e:ae:4d:74 -# SHA256 Fingerprint: c7:41:f7:0f:4b:2a:8d:88:bf:2e:71:c1:41:22:ef:53:ef:10:eb:a0:cf:a5:e6:4c:fa:20:f4:18:85:30:73:e0 ------BEGIN CERTIFICATE----- -MIIFqDCCA5CgAwIBAgIQHtOXCV/YtLNHcB6qvn9FszANBgkqhkiG9w0BAQwFADBl -MQswCQYDVQQGEwJVUzEeMBwGA1UEChMVTWljcm9zb2Z0IENvcnBvcmF0aW9uMTYw -NAYDVQQDEy1NaWNyb3NvZnQgUlNBIFJvb3QgQ2VydGlmaWNhdGUgQXV0aG9yaXR5 -IDIwMTcwHhcNMTkxMjE4MjI1MTIyWhcNNDIwNzE4MjMwMDIzWjBlMQswCQYDVQQG -EwJVUzEeMBwGA1UEChMVTWljcm9zb2Z0IENvcnBvcmF0aW9uMTYwNAYDVQQDEy1N -aWNyb3NvZnQgUlNBIFJvb3QgQ2VydGlmaWNhdGUgQXV0aG9yaXR5IDIwMTcwggIi -MA0GCSqGSIb3DQEBAQUAA4ICDwAwggIKAoICAQDKW76UM4wplZEWCpW9R2LBifOZ -Nt9GkMml7Xhqb0eRaPgnZ1AzHaGm++DlQ6OEAlcBXZxIQIJTELy/xztokLaCLeX0 -ZdDMbRnMlfl7rEqUrQ7eS0MdhweSE5CAg2Q1OQT85elss7YfUJQ4ZVBcF0a5toW1 -HLUX6NZFndiyJrDKxHBKrmCk3bPZ7Pw71VdyvD/IybLeS2v4I2wDwAW9lcfNcztm -gGTjGqwu+UcF8ga2m3P1eDNbx6H7JyqhtJqRjJHTOoI+dkC0zVJhUXAoP8XFWvLJ -jEm7FFtNyP9nTUwSlq31/niol4fX/V4ggNyhSyL71Imtus5Hl0dVe49FyGcohJUc -aDDv70ngNXtk55iwlNpNhTs+VcQor1fznhPbRiefHqJeRIOkpcrVE7NLP8TjwuaG -YaRSMLl6IE9vDzhTyzMMEyuP1pq9KsgtsRx9S1HKR9FIJ3Jdh+vVReZIZZ2vUpC6 -W6IYZVcSn2i51BVrlMRpIpj0M+Dt+VGOQVDJNE92kKz8OMHY4Xu54+OU4UZpyw4K -UGsTuqwPN1q3ErWQgR5WrlcihtnJ0tHXUeOrO8ZV/R4O03QK0dqq6mm4lyiPSMQH -+FJDOvTKVTUssKZqwJz58oHhEmrARdlns87/I6KJClTUFLkqqNfs+avNJVgyeY+Q -W5g5xAgGwax/Dj0ApQIDAQABo1QwUjAOBgNVHQ8BAf8EBAMCAYYwDwYDVR0TAQH/ -BAUwAwEB/zAdBgNVHQ4EFgQUCctZf4aycI8awznjwNnpv7tNsiMwEAYJKwYBBAGC -NxUBBAMCAQAwDQYJKoZIhvcNAQEMBQADggIBAKyvPl3CEZaJjqPnktaXFbgToqZC -LgLNFgVZJ8og6Lq46BrsTaiXVq5lQ7GPAJtSzVXNUzltYkyLDVt8LkS/gxCP81OC -gMNPOsduET/m4xaRhPtthH80dK2Jp86519efhGSSvpWhrQlTM93uCupKUY5vVau6 -tZRGrox/2KJQJWVggEbbMwSubLWYdFQl3JPk+ONVFT24bcMKpBLBaYVu32TxU5nh -SnUgnZUP5NbcA/FZGOhHibJXWpS2qdgXKxdJ5XbLwVaZOjex/2kskZGT4d9Mozd2 -TaGf+G0eHdP67Pv0RR0Tbc/3WeUiJ3IrhvNXuzDtJE3cfVa7o7P4NHmJweDyAmH3 -pvwPuxwXC65B2Xy9J6P9LjrRk5Sxcx0ki69bIImtt2dmefU6xqaWM/5TkshGsRGR -xpl/j8nWZjEgQRCHLQzWwa80mMpkg/sTV9HB8Dx6jKXB/ZUhoHHBk2dxEuqPiApp -GWSZI1b7rCoucL5mxAyE7+WL85MB+GqQk2dLsmijtWKP6T+MejteD+eMuMZ87zf9 -dOLITzNy4ZQ5bb0Sr74MTnB8G2+NszKTc0QWbej09+CVgI+WXTik9KveCjCHk9hN -AHFiRSdLOkKEW39lt2c0Ui2cFmuqqNh7o0JMcccMyj6D5KbvtwEwXlGjefVwaaZB -RA+GsCyRxj3qrg+E ------END CERTIFICATE----- - -# Issuer: CN=e-Szigno Root CA 2017 O=Microsec Ltd. -# Subject: CN=e-Szigno Root CA 2017 O=Microsec Ltd. -# Label: "e-Szigno Root CA 2017" -# Serial: 411379200276854331539784714 -# MD5 Fingerprint: de:1f:f6:9e:84:ae:a7:b4:21:ce:1e:58:7d:d1:84:98 -# SHA1 Fingerprint: 89:d4:83:03:4f:9e:9a:48:80:5f:72:37:d4:a9:a6:ef:cb:7c:1f:d1 -# SHA256 Fingerprint: be:b0:0b:30:83:9b:9b:c3:2c:32:e4:44:79:05:95:06:41:f2:64:21:b1:5e:d0:89:19:8b:51:8a:e2:ea:1b:99 ------BEGIN CERTIFICATE----- -MIICQDCCAeWgAwIBAgIMAVRI7yH9l1kN9QQKMAoGCCqGSM49BAMCMHExCzAJBgNV -BAYTAkhVMREwDwYDVQQHDAhCdWRhcGVzdDEWMBQGA1UECgwNTWljcm9zZWMgTHRk -LjEXMBUGA1UEYQwOVkFUSFUtMjM1ODQ0OTcxHjAcBgNVBAMMFWUtU3ppZ25vIFJv -b3QgQ0EgMjAxNzAeFw0xNzA4MjIxMjA3MDZaFw00MjA4MjIxMjA3MDZaMHExCzAJ -BgNVBAYTAkhVMREwDwYDVQQHDAhCdWRhcGVzdDEWMBQGA1UECgwNTWljcm9zZWMg -THRkLjEXMBUGA1UEYQwOVkFUSFUtMjM1ODQ0OTcxHjAcBgNVBAMMFWUtU3ppZ25v -IFJvb3QgQ0EgMjAxNzBZMBMGByqGSM49AgEGCCqGSM49AwEHA0IABJbcPYrYsHtv -xie+RJCxs1YVe45DJH0ahFnuY2iyxl6H0BVIHqiQrb1TotreOpCmYF9oMrWGQd+H -Wyx7xf58etqjYzBhMA8GA1UdEwEB/wQFMAMBAf8wDgYDVR0PAQH/BAQDAgEGMB0G -A1UdDgQWBBSHERUI0arBeAyxr87GyZDvvzAEwDAfBgNVHSMEGDAWgBSHERUI0arB -eAyxr87GyZDvvzAEwDAKBggqhkjOPQQDAgNJADBGAiEAtVfd14pVCzbhhkT61Nlo -jbjcI4qKDdQvfepz7L9NbKgCIQDLpbQS+ue16M9+k/zzNY9vTlp8tLxOsvxyqltZ -+efcMQ== ------END CERTIFICATE----- - -# Issuer: O=CERTSIGN SA OU=certSIGN ROOT CA G2 -# Subject: O=CERTSIGN SA OU=certSIGN ROOT CA G2 -# Label: "certSIGN Root CA G2" -# Serial: 313609486401300475190 -# MD5 Fingerprint: 8c:f1:75:8a:c6:19:cf:94:b7:f7:65:20:87:c3:97:c7 -# SHA1 Fingerprint: 26:f9:93:b4:ed:3d:28:27:b0:b9:4b:a7:e9:15:1d:a3:8d:92:e5:32 -# SHA256 Fingerprint: 65:7c:fe:2f:a7:3f:aa:38:46:25:71:f3:32:a2:36:3a:46:fc:e7:02:09:51:71:07:02:cd:fb:b6:ee:da:33:05 ------BEGIN CERTIFICATE----- -MIIFRzCCAy+gAwIBAgIJEQA0tk7GNi02MA0GCSqGSIb3DQEBCwUAMEExCzAJBgNV -BAYTAlJPMRQwEgYDVQQKEwtDRVJUU0lHTiBTQTEcMBoGA1UECxMTY2VydFNJR04g -Uk9PVCBDQSBHMjAeFw0xNzAyMDYwOTI3MzVaFw00MjAyMDYwOTI3MzVaMEExCzAJ -BgNVBAYTAlJPMRQwEgYDVQQKEwtDRVJUU0lHTiBTQTEcMBoGA1UECxMTY2VydFNJ -R04gUk9PVCBDQSBHMjCCAiIwDQYJKoZIhvcNAQEBBQADggIPADCCAgoCggIBAMDF -dRmRfUR0dIf+DjuW3NgBFszuY5HnC2/OOwppGnzC46+CjobXXo9X69MhWf05N0Iw -vlDqtg+piNguLWkh59E3GE59kdUWX2tbAMI5Qw02hVK5U2UPHULlj88F0+7cDBrZ -uIt4ImfkabBoxTzkbFpG583H+u/E7Eu9aqSs/cwoUe+StCmrqzWaTOTECMYmzPhp -n+Sc8CnTXPnGFiWeI8MgwT0PPzhAsP6CRDiqWhqKa2NYOLQV07YRaXseVO6MGiKs -cpc/I1mbySKEwQdPzH/iV8oScLumZfNpdWO9lfsbl83kqK/20U6o2YpxJM02PbyW -xPFsqa7lzw1uKA2wDrXKUXt4FMMgL3/7FFXhEZn91QqhngLjYl/rNUssuHLoPj1P -rCy7Lobio3aP5ZMqz6WryFyNSwb/EkaseMsUBzXgqd+L6a8VTxaJW732jcZZroiF -DsGJ6x9nxUWO/203Nit4ZoORUSs9/1F3dmKh7Gc+PoGD4FapUB8fepmrY7+EF3fx -DTvf95xhszWYijqy7DwaNz9+j5LP2RIUZNoQAhVB/0/E6xyjyfqZ90bp4RjZsbgy -LcsUDFDYg2WD7rlcz8sFWkz6GZdr1l0T08JcVLwyc6B49fFtHsufpaafItzRUZ6C -eWRgKRM+o/1Pcmqr4tTluCRVLERLiohEnMqE0yo7AgMBAAGjQjBAMA8GA1UdEwEB -/wQFMAMBAf8wDgYDVR0PAQH/BAQDAgEGMB0GA1UdDgQWBBSCIS1mxteg4BXrzkwJ -d8RgnlRuAzANBgkqhkiG9w0BAQsFAAOCAgEAYN4auOfyYILVAzOBywaK8SJJ6ejq -kX/GM15oGQOGO0MBzwdw5AgeZYWR5hEit/UCI46uuR59H35s5r0l1ZUa8gWmr4UC -b6741jH/JclKyMeKqdmfS0mbEVeZkkMR3rYzpMzXjWR91M08KCy0mpbqTfXERMQl -qiCA2ClV9+BB/AYm/7k29UMUA2Z44RGx2iBfRgB4ACGlHgAoYXhvqAEBj500mv/0 -OJD7uNGzcgbJceaBxXntC6Z58hMLnPddDnskk7RI24Zf3lCGeOdA5jGokHZwYa+c -NywRtYK3qq4kNFtyDGkNzVmf9nGvnAvRCjj5BiKDUyUM/FHE5r7iOZULJK2v0ZXk -ltd0ZGtxTgI8qoXzIKNDOXZbbFD+mpwUHmUUihW9o4JFWklWatKcsWMy5WHgUyIO -pwpJ6st+H6jiYoD2EEVSmAYY3qXNL3+q1Ok+CHLsIwMCPKaq2LxndD0UF/tUSxfj -03k9bWtJySgOLnRQvwzZRjoQhsmnP+mg7H/rpXdYaXHmgwo38oZJar55CJD2AhZk -PuXaTH4MNMn5X7azKFGnpyuqSfqNZSlO42sTp5SjLVFteAxEy9/eCG/Oo2Sr05WE -1LlSVHJ7liXMvGnjSG4N0MedJ5qq+BOS3R7fY581qRY27Iy4g/Q9iY/NtBde17MX -QRBdJ3NghVdJIgc= ------END CERTIFICATE----- - -# Issuer: CN=Trustwave Global Certification Authority O=Trustwave Holdings, Inc. -# Subject: CN=Trustwave Global Certification Authority O=Trustwave Holdings, Inc. -# Label: "Trustwave Global Certification Authority" -# Serial: 1846098327275375458322922162 -# MD5 Fingerprint: f8:1c:18:2d:2f:ba:5f:6d:a1:6c:bc:c7:ab:91:c7:0e -# SHA1 Fingerprint: 2f:8f:36:4f:e1:58:97:44:21:59:87:a5:2a:9a:d0:69:95:26:7f:b5 -# SHA256 Fingerprint: 97:55:20:15:f5:dd:fc:3c:87:88:c0:06:94:45:55:40:88:94:45:00:84:f1:00:86:70:86:bc:1a:2b:b5:8d:c8 ------BEGIN CERTIFICATE----- -MIIF2jCCA8KgAwIBAgIMBfcOhtpJ80Y1LrqyMA0GCSqGSIb3DQEBCwUAMIGIMQsw -CQYDVQQGEwJVUzERMA8GA1UECAwISWxsaW5vaXMxEDAOBgNVBAcMB0NoaWNhZ28x -ITAfBgNVBAoMGFRydXN0d2F2ZSBIb2xkaW5ncywgSW5jLjExMC8GA1UEAwwoVHJ1 -c3R3YXZlIEdsb2JhbCBDZXJ0aWZpY2F0aW9uIEF1dGhvcml0eTAeFw0xNzA4MjMx -OTM0MTJaFw00MjA4MjMxOTM0MTJaMIGIMQswCQYDVQQGEwJVUzERMA8GA1UECAwI -SWxsaW5vaXMxEDAOBgNVBAcMB0NoaWNhZ28xITAfBgNVBAoMGFRydXN0d2F2ZSBI -b2xkaW5ncywgSW5jLjExMC8GA1UEAwwoVHJ1c3R3YXZlIEdsb2JhbCBDZXJ0aWZp -Y2F0aW9uIEF1dGhvcml0eTCCAiIwDQYJKoZIhvcNAQEBBQADggIPADCCAgoCggIB -ALldUShLPDeS0YLOvR29zd24q88KPuFd5dyqCblXAj7mY2Hf8g+CY66j96xz0Xzn -swuvCAAJWX/NKSqIk4cXGIDtiLK0thAfLdZfVaITXdHG6wZWiYj+rDKd/VzDBcdu -7oaJuogDnXIhhpCujwOl3J+IKMujkkkP7NAP4m1ET4BqstTnoApTAbqOl5F2brz8 -1Ws25kCI1nsvXwXoLG0R8+eyvpJETNKXpP7ScoFDB5zpET71ixpZfR9oWN0EACyW -80OzfpgZdNmcc9kYvkHHNHnZ9GLCQ7mzJ7Aiy/k9UscwR7PJPrhq4ufogXBeQotP -JqX+OsIgbrv4Fo7NDKm0G2x2EOFYeUY+VM6AqFcJNykbmROPDMjWLBz7BegIlT1l -RtzuzWniTY+HKE40Cz7PFNm73bZQmq131BnW2hqIyE4bJ3XYsgjxroMwuREOzYfw -hI0Vcnyh78zyiGG69Gm7DIwLdVcEuE4qFC49DxweMqZiNu5m4iK4BUBjECLzMx10 -coos9TkpoNPnG4CELcU9402x/RpvumUHO1jsQkUm+9jaJXLE9gCxInm943xZYkqc -BW89zubWR2OZxiRvchLIrH+QtAuRcOi35hYQcRfO3gZPSEF9NUqjifLJS3tBEW1n -twiYTOURGa5CgNz7kAXU+FDKvuStx8KU1xad5hePrzb7AgMBAAGjQjBAMA8GA1Ud -EwEB/wQFMAMBAf8wHQYDVR0OBBYEFJngGWcNYtt2s9o9uFvo/ULSMQ6HMA4GA1Ud -DwEB/wQEAwIBBjANBgkqhkiG9w0BAQsFAAOCAgEAmHNw4rDT7TnsTGDZqRKGFx6W -0OhUKDtkLSGm+J1WE2pIPU/HPinbbViDVD2HfSMF1OQc3Og4ZYbFdada2zUFvXfe -uyk3QAUHw5RSn8pk3fEbK9xGChACMf1KaA0HZJDmHvUqoai7PF35owgLEQzxPy0Q -lG/+4jSHg9bP5Rs1bdID4bANqKCqRieCNqcVtgimQlRXtpla4gt5kNdXElE1GYhB -aCXUNxeEFfsBctyV3lImIJgm4nb1J2/6ADtKYdkNy1GTKv0WBpanI5ojSP5RvbbE -sLFUzt5sQa0WZ37b/TjNuThOssFgy50X31ieemKyJo90lZvkWx3SD92YHJtZuSPT -MaCm/zjdzyBP6VhWOmfD0faZmZ26NraAL4hHT4a/RDqA5Dccprrql5gR0IRiR2Qe -qu5AvzSxnI9O4fKSTx+O856X3vOmeWqJcU9LJxdI/uz0UA9PSX3MReO9ekDFQdxh -VicGaeVyQYHTtgGJoC86cnn+OjC/QezHYj6RS8fZMXZC+fc8Y+wmjHMMfRod6qh8 -h6jCJ3zhM0EPz8/8AKAigJ5Kp28AsEFFtyLKaEjFQqKu3R3y4G5OBVixwJAWKqQ9 -EEC+j2Jjg6mcgn0tAumDMHzLJ8n9HmYAsC7TIS+OMxZsmO0QqAfWzJPP29FpHOTK -yeC2nOnOcXHebD8WpHk= ------END CERTIFICATE----- - -# Issuer: CN=Trustwave Global ECC P256 Certification Authority O=Trustwave Holdings, Inc. -# Subject: CN=Trustwave Global ECC P256 Certification Authority O=Trustwave Holdings, Inc. -# Label: "Trustwave Global ECC P256 Certification Authority" -# Serial: 4151900041497450638097112925 -# MD5 Fingerprint: 5b:44:e3:8d:5d:36:86:26:e8:0d:05:d2:59:a7:83:54 -# SHA1 Fingerprint: b4:90:82:dd:45:0c:be:8b:5b:b1:66:d3:e2:a4:08:26:cd:ed:42:cf -# SHA256 Fingerprint: 94:5b:bc:82:5e:a5:54:f4:89:d1:fd:51:a7:3d:df:2e:a6:24:ac:70:19:a0:52:05:22:5c:22:a7:8c:cf:a8:b4 ------BEGIN CERTIFICATE----- -MIICYDCCAgegAwIBAgIMDWpfCD8oXD5Rld9dMAoGCCqGSM49BAMCMIGRMQswCQYD -VQQGEwJVUzERMA8GA1UECBMISWxsaW5vaXMxEDAOBgNVBAcTB0NoaWNhZ28xITAf -BgNVBAoTGFRydXN0d2F2ZSBIb2xkaW5ncywgSW5jLjE6MDgGA1UEAxMxVHJ1c3R3 -YXZlIEdsb2JhbCBFQ0MgUDI1NiBDZXJ0aWZpY2F0aW9uIEF1dGhvcml0eTAeFw0x -NzA4MjMxOTM1MTBaFw00MjA4MjMxOTM1MTBaMIGRMQswCQYDVQQGEwJVUzERMA8G -A1UECBMISWxsaW5vaXMxEDAOBgNVBAcTB0NoaWNhZ28xITAfBgNVBAoTGFRydXN0 -d2F2ZSBIb2xkaW5ncywgSW5jLjE6MDgGA1UEAxMxVHJ1c3R3YXZlIEdsb2JhbCBF -Q0MgUDI1NiBDZXJ0aWZpY2F0aW9uIEF1dGhvcml0eTBZMBMGByqGSM49AgEGCCqG -SM49AwEHA0IABH77bOYj43MyCMpg5lOcunSNGLB4kFKA3TjASh3RqMyTpJcGOMoN -FWLGjgEqZZ2q3zSRLoHB5DOSMcT9CTqmP62jQzBBMA8GA1UdEwEB/wQFMAMBAf8w -DwYDVR0PAQH/BAUDAwcGADAdBgNVHQ4EFgQUo0EGrJBt0UrrdaVKEJmzsaGLSvcw -CgYIKoZIzj0EAwIDRwAwRAIgB+ZU2g6gWrKuEZ+Hxbb/ad4lvvigtwjzRM4q3wgh -DDcCIC0mA6AFvWvR9lz4ZcyGbbOcNEhjhAnFjXca4syc4XR7 ------END CERTIFICATE----- - -# Issuer: CN=Trustwave Global ECC P384 Certification Authority O=Trustwave Holdings, Inc. -# Subject: CN=Trustwave Global ECC P384 Certification Authority O=Trustwave Holdings, Inc. -# Label: "Trustwave Global ECC P384 Certification Authority" -# Serial: 2704997926503831671788816187 -# MD5 Fingerprint: ea:cf:60:c4:3b:b9:15:29:40:a1:97:ed:78:27:93:d6 -# SHA1 Fingerprint: e7:f3:a3:c8:cf:6f:c3:04:2e:6d:0e:67:32:c5:9e:68:95:0d:5e:d2 -# SHA256 Fingerprint: 55:90:38:59:c8:c0:c3:eb:b8:75:9e:ce:4e:25:57:22:5f:f5:75:8b:bd:38:eb:d4:82:76:60:1e:1b:d5:80:97 ------BEGIN CERTIFICATE----- -MIICnTCCAiSgAwIBAgIMCL2Fl2yZJ6SAaEc7MAoGCCqGSM49BAMDMIGRMQswCQYD -VQQGEwJVUzERMA8GA1UECBMISWxsaW5vaXMxEDAOBgNVBAcTB0NoaWNhZ28xITAf -BgNVBAoTGFRydXN0d2F2ZSBIb2xkaW5ncywgSW5jLjE6MDgGA1UEAxMxVHJ1c3R3 -YXZlIEdsb2JhbCBFQ0MgUDM4NCBDZXJ0aWZpY2F0aW9uIEF1dGhvcml0eTAeFw0x -NzA4MjMxOTM2NDNaFw00MjA4MjMxOTM2NDNaMIGRMQswCQYDVQQGEwJVUzERMA8G -A1UECBMISWxsaW5vaXMxEDAOBgNVBAcTB0NoaWNhZ28xITAfBgNVBAoTGFRydXN0 -d2F2ZSBIb2xkaW5ncywgSW5jLjE6MDgGA1UEAxMxVHJ1c3R3YXZlIEdsb2JhbCBF -Q0MgUDM4NCBDZXJ0aWZpY2F0aW9uIEF1dGhvcml0eTB2MBAGByqGSM49AgEGBSuB -BAAiA2IABGvaDXU1CDFHBa5FmVXxERMuSvgQMSOjfoPTfygIOiYaOs+Xgh+AtycJ -j9GOMMQKmw6sWASr9zZ9lCOkmwqKi6vr/TklZvFe/oyujUF5nQlgziip04pt89ZF -1PKYhDhloKNDMEEwDwYDVR0TAQH/BAUwAwEB/zAPBgNVHQ8BAf8EBQMDBwYAMB0G -A1UdDgQWBBRVqYSJ0sEyvRjLbKYHTsjnnb6CkDAKBggqhkjOPQQDAwNnADBkAjA3 -AZKXRRJ+oPM+rRk6ct30UJMDEr5E0k9BpIycnR+j9sKS50gU/k6bpZFXrsY3crsC -MGclCrEMXu6pY5Jv5ZAL/mYiykf9ijH3g/56vxC+GCsej/YpHpRZ744hN8tRmKVu -Sw== ------END CERTIFICATE----- - -# Issuer: CN=NAVER Global Root Certification Authority O=NAVER BUSINESS PLATFORM Corp. -# Subject: CN=NAVER Global Root Certification Authority O=NAVER BUSINESS PLATFORM Corp. -# Label: "NAVER Global Root Certification Authority" -# Serial: 9013692873798656336226253319739695165984492813 -# MD5 Fingerprint: c8:7e:41:f6:25:3b:f5:09:b3:17:e8:46:3d:bf:d0:9b -# SHA1 Fingerprint: 8f:6b:f2:a9:27:4a:da:14:a0:c4:f4:8e:61:27:f9:c0:1e:78:5d:d1 -# SHA256 Fingerprint: 88:f4:38:dc:f8:ff:d1:fa:8f:42:91:15:ff:e5:f8:2a:e1:e0:6e:0c:70:c3:75:fa:ad:71:7b:34:a4:9e:72:65 ------BEGIN CERTIFICATE----- -MIIFojCCA4qgAwIBAgIUAZQwHqIL3fXFMyqxQ0Rx+NZQTQ0wDQYJKoZIhvcNAQEM -BQAwaTELMAkGA1UEBhMCS1IxJjAkBgNVBAoMHU5BVkVSIEJVU0lORVNTIFBMQVRG -T1JNIENvcnAuMTIwMAYDVQQDDClOQVZFUiBHbG9iYWwgUm9vdCBDZXJ0aWZpY2F0 -aW9uIEF1dGhvcml0eTAeFw0xNzA4MTgwODU4NDJaFw0zNzA4MTgyMzU5NTlaMGkx -CzAJBgNVBAYTAktSMSYwJAYDVQQKDB1OQVZFUiBCVVNJTkVTUyBQTEFURk9STSBD -b3JwLjEyMDAGA1UEAwwpTkFWRVIgR2xvYmFsIFJvb3QgQ2VydGlmaWNhdGlvbiBB -dXRob3JpdHkwggIiMA0GCSqGSIb3DQEBAQUAA4ICDwAwggIKAoICAQC21PGTXLVA -iQqrDZBbUGOukJR0F0Vy1ntlWilLp1agS7gvQnXp2XskWjFlqxcX0TM62RHcQDaH -38dq6SZeWYp34+hInDEW+j6RscrJo+KfziFTowI2MMtSAuXaMl3Dxeb57hHHi8lE -HoSTGEq0n+USZGnQJoViAbbJAh2+g1G7XNr4rRVqmfeSVPc0W+m/6imBEtRTkZaz -kVrd/pBzKPswRrXKCAfHcXLJZtM0l/aM9BhK4dA9WkW2aacp+yPOiNgSnABIqKYP -szuSjXEOdMWLyEz59JuOuDxp7W87UC9Y7cSw0BwbagzivESq2M0UXZR4Yb8Obtoq -vC8MC3GmsxY/nOb5zJ9TNeIDoKAYv7vxvvTWjIcNQvcGufFt7QSUqP620wbGQGHf -nZ3zVHbOUzoBppJB7ASjjw2i1QnK1sua8e9DXcCrpUHPXFNwcMmIpi3Ua2FzUCaG -YQ5fG8Ir4ozVu53BA0K6lNpfqbDKzE0K70dpAy8i+/Eozr9dUGWokG2zdLAIx6yo -0es+nPxdGoMuK8u180SdOqcXYZaicdNwlhVNt0xz7hlcxVs+Qf6sdWA7G2POAN3a -CJBitOUt7kinaxeZVL6HSuOpXgRM6xBtVNbv8ejyYhbLgGvtPe31HzClrkvJE+2K -AQHJuFFYwGY6sWZLxNUxAmLpdIQM201GLQIDAQABo0IwQDAdBgNVHQ4EFgQU0p+I -36HNLL3s9TsBAZMzJ7LrYEswDgYDVR0PAQH/BAQDAgEGMA8GA1UdEwEB/wQFMAMB -Af8wDQYJKoZIhvcNAQEMBQADggIBADLKgLOdPVQG3dLSLvCkASELZ0jKbY7gyKoN -qo0hV4/GPnrK21HUUrPUloSlWGB/5QuOH/XcChWB5Tu2tyIvCZwTFrFsDDUIbatj -cu3cvuzHV+YwIHHW1xDBE1UBjCpD5EHxzzp6U5LOogMFDTjfArsQLtk70pt6wKGm -+LUx5vR1yblTmXVHIloUFcd4G7ad6Qz4G3bxhYTeodoS76TiEJd6eN4MUZeoIUCL -hr0N8F5OSza7OyAfikJW4Qsav3vQIkMsRIz75Sq0bBwcupTgE34h5prCy8VCZLQe -lHsIJchxzIdFV4XTnyliIoNRlwAYl3dqmJLJfGBs32x9SuRwTMKeuB330DTHD8z7 -p/8Dvq1wkNoL3chtl1+afwkyQf3NosxabUzyqkn+Zvjp2DXrDige7kgvOtB5CTh8 -piKCk5XQA76+AqAF3SAi428diDRgxuYKuQl1C/AH6GmWNcf7I4GOODm4RStDeKLR -LBT/DShycpWbXgnbiUSYqqFJu3FS8r/2/yehNq+4tneI3TqkbZs0kNwUXTC/t+sX -5Ie3cdCh13cV1ELX8vMxmV2b3RZtP+oGI/hGoiLtk/bdmuYqh7GYVPEi92tF4+KO -dh2ajcQGjTa3FPOdVGm3jjzVpG2Tgbet9r1ke8LJaDmgkpzNNIaRkPpkUZ3+/uul -9XXeifdy ------END CERTIFICATE----- - -# Issuer: CN=AC RAIZ FNMT-RCM SERVIDORES SEGUROS O=FNMT-RCM OU=Ceres -# Subject: CN=AC RAIZ FNMT-RCM SERVIDORES SEGUROS O=FNMT-RCM OU=Ceres -# Label: "AC RAIZ FNMT-RCM SERVIDORES SEGUROS" -# Serial: 131542671362353147877283741781055151509 -# MD5 Fingerprint: 19:36:9c:52:03:2f:d2:d1:bb:23:cc:dd:1e:12:55:bb -# SHA1 Fingerprint: 62:ff:d9:9e:c0:65:0d:03:ce:75:93:d2:ed:3f:2d:32:c9:e3:e5:4a -# SHA256 Fingerprint: 55:41:53:b1:3d:2c:f9:dd:b7:53:bf:be:1a:4e:0a:e0:8d:0a:a4:18:70:58:fe:60:a2:b8:62:b2:e4:b8:7b:cb ------BEGIN CERTIFICATE----- -MIICbjCCAfOgAwIBAgIQYvYybOXE42hcG2LdnC6dlTAKBggqhkjOPQQDAzB4MQsw -CQYDVQQGEwJFUzERMA8GA1UECgwIRk5NVC1SQ00xDjAMBgNVBAsMBUNlcmVzMRgw -FgYDVQRhDA9WQVRFUy1RMjgyNjAwNEoxLDAqBgNVBAMMI0FDIFJBSVogRk5NVC1S -Q00gU0VSVklET1JFUyBTRUdVUk9TMB4XDTE4MTIyMDA5MzczM1oXDTQzMTIyMDA5 -MzczM1oweDELMAkGA1UEBhMCRVMxETAPBgNVBAoMCEZOTVQtUkNNMQ4wDAYDVQQL -DAVDZXJlczEYMBYGA1UEYQwPVkFURVMtUTI4MjYwMDRKMSwwKgYDVQQDDCNBQyBS -QUlaIEZOTVQtUkNNIFNFUlZJRE9SRVMgU0VHVVJPUzB2MBAGByqGSM49AgEGBSuB -BAAiA2IABPa6V1PIyqvfNkpSIeSX0oNnnvBlUdBeh8dHsVnyV0ebAAKTRBdp20LH -sbI6GA60XYyzZl2hNPk2LEnb80b8s0RpRBNm/dfF/a82Tc4DTQdxz69qBdKiQ1oK -Um8BA06Oi6NCMEAwDwYDVR0TAQH/BAUwAwEB/zAOBgNVHQ8BAf8EBAMCAQYwHQYD -VR0OBBYEFAG5L++/EYZg8k/QQW6rcx/n0m5JMAoGCCqGSM49BAMDA2kAMGYCMQCu -SuMrQMN0EfKVrRYj3k4MGuZdpSRea0R7/DjiT8ucRRcRTBQnJlU5dUoDzBOQn5IC -MQD6SmxgiHPz7riYYqnOK8LZiqZwMR2vsJRM60/G49HzYqc8/5MuB1xJAWdpEgJy -v+c= ------END CERTIFICATE----- - -# Issuer: CN=GlobalSign Root R46 O=GlobalSign nv-sa -# Subject: CN=GlobalSign Root R46 O=GlobalSign nv-sa -# Label: "GlobalSign Root R46" -# Serial: 1552617688466950547958867513931858518042577 -# MD5 Fingerprint: c4:14:30:e4:fa:66:43:94:2a:6a:1b:24:5f:19:d0:ef -# SHA1 Fingerprint: 53:a2:b0:4b:ca:6b:d6:45:e6:39:8a:8e:c4:0d:d2:bf:77:c3:a2:90 -# SHA256 Fingerprint: 4f:a3:12:6d:8d:3a:11:d1:c4:85:5a:4f:80:7c:ba:d6:cf:91:9d:3a:5a:88:b0:3b:ea:2c:63:72:d9:3c:40:c9 ------BEGIN CERTIFICATE----- -MIIFWjCCA0KgAwIBAgISEdK7udcjGJ5AXwqdLdDfJWfRMA0GCSqGSIb3DQEBDAUA -MEYxCzAJBgNVBAYTAkJFMRkwFwYDVQQKExBHbG9iYWxTaWduIG52LXNhMRwwGgYD -VQQDExNHbG9iYWxTaWduIFJvb3QgUjQ2MB4XDTE5MDMyMDAwMDAwMFoXDTQ2MDMy -MDAwMDAwMFowRjELMAkGA1UEBhMCQkUxGTAXBgNVBAoTEEdsb2JhbFNpZ24gbnYt -c2ExHDAaBgNVBAMTE0dsb2JhbFNpZ24gUm9vdCBSNDYwggIiMA0GCSqGSIb3DQEB -AQUAA4ICDwAwggIKAoICAQCsrHQy6LNl5brtQyYdpokNRbopiLKkHWPd08EsCVeJ -OaFV6Wc0dwxu5FUdUiXSE2te4R2pt32JMl8Nnp8semNgQB+msLZ4j5lUlghYruQG -vGIFAha/r6gjA7aUD7xubMLL1aa7DOn2wQL7Id5m3RerdELv8HQvJfTqa1VbkNud -316HCkD7rRlr+/fKYIje2sGP1q7Vf9Q8g+7XFkyDRTNrJ9CG0Bwta/OrffGFqfUo -0q3v84RLHIf8E6M6cqJaESvWJ3En7YEtbWaBkoe0G1h6zD8K+kZPTXhc+CtI4wSE -y132tGqzZfxCnlEmIyDLPRT5ge1lFgBPGmSXZgjPjHvjK8Cd+RTyG/FWaha/LIWF -zXg4mutCagI0GIMXTpRW+LaCtfOW3T3zvn8gdz57GSNrLNRyc0NXfeD412lPFzYE -+cCQYDdF3uYM2HSNrpyibXRdQr4G9dlkbgIQrImwTDsHTUB+JMWKmIJ5jqSngiCN -I/onccnfxkF0oE32kRbcRoxfKWMxWXEM2G/CtjJ9++ZdU6Z+Ffy7dXxd7Pj2Fxzs -x2sZy/N78CsHpdlseVR2bJ0cpm4O6XkMqCNqo98bMDGfsVR7/mrLZqrcZdCinkqa -ByFrgY/bxFn63iLABJzjqls2k+g9vXqhnQt2sQvHnf3PmKgGwvgqo6GDoLclcqUC -4wIDAQABo0IwQDAOBgNVHQ8BAf8EBAMCAYYwDwYDVR0TAQH/BAUwAwEB/zAdBgNV -HQ4EFgQUA1yrc4GHqMywptWU4jaWSf8FmSwwDQYJKoZIhvcNAQEMBQADggIBAHx4 -7PYCLLtbfpIrXTncvtgdokIzTfnvpCo7RGkerNlFo048p9gkUbJUHJNOxO97k4Vg -JuoJSOD1u8fpaNK7ajFxzHmuEajwmf3lH7wvqMxX63bEIaZHU1VNaL8FpO7XJqti -2kM3S+LGteWygxk6x9PbTZ4IevPuzz5i+6zoYMzRx6Fcg0XERczzF2sUyQQCPtIk -pnnpHs6i58FZFZ8d4kuaPp92CC1r2LpXFNqD6v6MVenQTqnMdzGxRBF6XLE+0xRF -FRhiJBPSy03OXIPBNvIQtQ6IbbjhVp+J3pZmOUdkLG5NrmJ7v2B0GbhWrJKsFjLt -rWhV/pi60zTe9Mlhww6G9kuEYO4Ne7UyWHmRVSyBQ7N0H3qqJZ4d16GLuc1CLgSk -ZoNNiTW2bKg2SnkheCLQQrzRQDGQob4Ez8pn7fXwgNNgyYMqIgXQBztSvwyeqiv5 -u+YfjyW6hY0XHgL+XVAEV8/+LbzvXMAaq7afJMbfc2hIkCwU9D9SGuTSyxTDYWnP -4vkYxboznxSjBF25cfe1lNj2M8FawTSLfJvdkzrnE6JwYZ+vj+vYxXX4M2bUdGc6 -N3ec592kD3ZDZopD8p/7DEJ4Y9HiD2971KE9dJeFt0g5QdYg/NA6s/rob8SKunE3 -vouXsXgxT7PntgMTzlSdriVZzH81Xwj3QEUxeCp6 ------END CERTIFICATE----- - -# Issuer: CN=GlobalSign Root E46 O=GlobalSign nv-sa -# Subject: CN=GlobalSign Root E46 O=GlobalSign nv-sa -# Label: "GlobalSign Root E46" -# Serial: 1552617690338932563915843282459653771421763 -# MD5 Fingerprint: b5:b8:66:ed:de:08:83:e3:c9:e2:01:34:06:ac:51:6f -# SHA1 Fingerprint: 39:b4:6c:d5:fe:80:06:eb:e2:2f:4a:bb:08:33:a0:af:db:b9:dd:84 -# SHA256 Fingerprint: cb:b9:c4:4d:84:b8:04:3e:10:50:ea:31:a6:9f:51:49:55:d7:bf:d2:e2:c6:b4:93:01:01:9a:d6:1d:9f:50:58 ------BEGIN CERTIFICATE----- -MIICCzCCAZGgAwIBAgISEdK7ujNu1LzmJGjFDYQdmOhDMAoGCCqGSM49BAMDMEYx -CzAJBgNVBAYTAkJFMRkwFwYDVQQKExBHbG9iYWxTaWduIG52LXNhMRwwGgYDVQQD -ExNHbG9iYWxTaWduIFJvb3QgRTQ2MB4XDTE5MDMyMDAwMDAwMFoXDTQ2MDMyMDAw -MDAwMFowRjELMAkGA1UEBhMCQkUxGTAXBgNVBAoTEEdsb2JhbFNpZ24gbnYtc2Ex -HDAaBgNVBAMTE0dsb2JhbFNpZ24gUm9vdCBFNDYwdjAQBgcqhkjOPQIBBgUrgQQA -IgNiAAScDrHPt+ieUnd1NPqlRqetMhkytAepJ8qUuwzSChDH2omwlwxwEwkBjtjq -R+q+soArzfwoDdusvKSGN+1wCAB16pMLey5SnCNoIwZD7JIvU4Tb+0cUB+hflGdd -yXqBPCCjQjBAMA4GA1UdDwEB/wQEAwIBhjAPBgNVHRMBAf8EBTADAQH/MB0GA1Ud -DgQWBBQxCpCPtsad0kRLgLWi5h+xEk8blTAKBggqhkjOPQQDAwNoADBlAjEA31SQ -7Zvvi5QCkxeCmb6zniz2C5GMn0oUsfZkvLtoURMMA/cVi4RguYv/Uo7njLwcAjA8 -+RHUjE7AwWHCFUyqqx0LMV87HOIAl0Qx5v5zli/altP+CAezNIm8BZ/3Hobui3A= ------END CERTIFICATE----- - -# Issuer: CN=ANF Secure Server Root CA O=ANF Autoridad de Certificacion OU=ANF CA Raiz -# Subject: CN=ANF Secure Server Root CA O=ANF Autoridad de Certificacion OU=ANF CA Raiz -# Label: "ANF Secure Server Root CA" -# Serial: 996390341000653745 -# MD5 Fingerprint: 26:a6:44:5a:d9:af:4e:2f:b2:1d:b6:65:b0:4e:e8:96 -# SHA1 Fingerprint: 5b:6e:68:d0:cc:15:b6:a0:5f:1e:c1:5f:ae:02:fc:6b:2f:5d:6f:74 -# SHA256 Fingerprint: fb:8f:ec:75:91:69:b9:10:6b:1e:51:16:44:c6:18:c5:13:04:37:3f:6c:06:43:08:8d:8b:ef:fd:1b:99:75:99 ------BEGIN CERTIFICATE----- -MIIF7zCCA9egAwIBAgIIDdPjvGz5a7EwDQYJKoZIhvcNAQELBQAwgYQxEjAQBgNV -BAUTCUc2MzI4NzUxMDELMAkGA1UEBhMCRVMxJzAlBgNVBAoTHkFORiBBdXRvcmlk -YWQgZGUgQ2VydGlmaWNhY2lvbjEUMBIGA1UECxMLQU5GIENBIFJhaXoxIjAgBgNV -BAMTGUFORiBTZWN1cmUgU2VydmVyIFJvb3QgQ0EwHhcNMTkwOTA0MTAwMDM4WhcN -MzkwODMwMTAwMDM4WjCBhDESMBAGA1UEBRMJRzYzMjg3NTEwMQswCQYDVQQGEwJF -UzEnMCUGA1UEChMeQU5GIEF1dG9yaWRhZCBkZSBDZXJ0aWZpY2FjaW9uMRQwEgYD -VQQLEwtBTkYgQ0EgUmFpejEiMCAGA1UEAxMZQU5GIFNlY3VyZSBTZXJ2ZXIgUm9v -dCBDQTCCAiIwDQYJKoZIhvcNAQEBBQADggIPADCCAgoCggIBANvrayvmZFSVgpCj -cqQZAZ2cC4Ffc0m6p6zzBE57lgvsEeBbphzOG9INgxwruJ4dfkUyYA8H6XdYfp9q -yGFOtibBTI3/TO80sh9l2Ll49a2pcbnvT1gdpd50IJeh7WhM3pIXS7yr/2WanvtH -2Vdy8wmhrnZEE26cLUQ5vPnHO6RYPUG9tMJJo8gN0pcvB2VSAKduyK9o7PQUlrZX -H1bDOZ8rbeTzPvY1ZNoMHKGESy9LS+IsJJ1tk0DrtSOOMspvRdOoiXsezx76W0OL -zc2oD2rKDF65nkeP8Nm2CgtYZRczuSPkdxl9y0oukntPLxB3sY0vaJxizOBQ+OyR -p1RMVwnVdmPF6GUe7m1qzwmd+nxPrWAI/VaZDxUse6mAq4xhj0oHdkLePfTdsiQz -W7i1o0TJrH93PB0j7IKppuLIBkwC/qxcmZkLLxCKpvR/1Yd0DVlJRfbwcVw5Kda/ -SiOL9V8BY9KHcyi1Swr1+KuCLH5zJTIdC2MKF4EA/7Z2Xue0sUDKIbvVgFHlSFJn -LNJhiQcND85Cd8BEc5xEUKDbEAotlRyBr+Qc5RQe8TZBAQIvfXOn3kLMTOmJDVb3 -n5HUA8ZsyY/b2BzgQJhdZpmYgG4t/wHFzstGH6wCxkPmrqKEPMVOHj1tyRRM4y5B -u8o5vzY8KhmqQYdOpc5LMnndkEl/AgMBAAGjYzBhMB8GA1UdIwQYMBaAFJxf0Gxj -o1+TypOYCK2Mh6UsXME3MB0GA1UdDgQWBBScX9BsY6Nfk8qTmAitjIelLFzBNzAO -BgNVHQ8BAf8EBAMCAYYwDwYDVR0TAQH/BAUwAwEB/zANBgkqhkiG9w0BAQsFAAOC -AgEATh65isagmD9uw2nAalxJUqzLK114OMHVVISfk/CHGT0sZonrDUL8zPB1hT+L -9IBdeeUXZ701guLyPI59WzbLWoAAKfLOKyzxj6ptBZNscsdW699QIyjlRRA96Gej -rw5VD5AJYu9LWaL2U/HANeQvwSS9eS9OICI7/RogsKQOLHDtdD+4E5UGUcjohybK -pFtqFiGS3XNgnhAY3jyB6ugYw3yJ8otQPr0R4hUDqDZ9MwFsSBXXiJCZBMXM5gf0 -vPSQ7RPi6ovDj6MzD8EpTBNO2hVWcXNyglD2mjN8orGoGjR0ZVzO0eurU+AagNjq -OknkJjCb5RyKqKkVMoaZkgoQI1YS4PbOTOK7vtuNknMBZi9iPrJyJ0U27U1W45eZ -/zo1PqVUSlJZS2Db7v54EX9K3BR5YLZrZAPbFYPhor72I5dQ8AkzNqdxliXzuUJ9 -2zg/LFis6ELhDtjTO0wugumDLmsx2d1Hhk9tl5EuT+IocTUW0fJz/iUrB0ckYyfI -+PbZa/wSMVYIwFNCr5zQM378BvAxRAMU8Vjq8moNqRGyg77FGr8H6lnco4g175x2 -MjxNBiLOFeXdntiP2t7SxDnlF4HPOEfrf4htWRvfn0IUrn7PqLBmZdo3r5+qPeoo -tt7VMVgWglvquxl1AnMaykgaIZOQCo6ThKd9OyMYkomgjaw= ------END CERTIFICATE----- - -# Issuer: CN=Certum EC-384 CA O=Asseco Data Systems S.A. OU=Certum Certification Authority -# Subject: CN=Certum EC-384 CA O=Asseco Data Systems S.A. OU=Certum Certification Authority -# Label: "Certum EC-384 CA" -# Serial: 160250656287871593594747141429395092468 -# MD5 Fingerprint: b6:65:b3:96:60:97:12:a1:ec:4e:e1:3d:a3:c6:c9:f1 -# SHA1 Fingerprint: f3:3e:78:3c:ac:df:f4:a2:cc:ac:67:55:69:56:d7:e5:16:3c:e1:ed -# SHA256 Fingerprint: 6b:32:80:85:62:53:18:aa:50:d1:73:c9:8d:8b:da:09:d5:7e:27:41:3d:11:4c:f7:87:a0:f5:d0:6c:03:0c:f6 ------BEGIN CERTIFICATE----- -MIICZTCCAeugAwIBAgIQeI8nXIESUiClBNAt3bpz9DAKBggqhkjOPQQDAzB0MQsw -CQYDVQQGEwJQTDEhMB8GA1UEChMYQXNzZWNvIERhdGEgU3lzdGVtcyBTLkEuMScw -JQYDVQQLEx5DZXJ0dW0gQ2VydGlmaWNhdGlvbiBBdXRob3JpdHkxGTAXBgNVBAMT -EENlcnR1bSBFQy0zODQgQ0EwHhcNMTgwMzI2MDcyNDU0WhcNNDMwMzI2MDcyNDU0 -WjB0MQswCQYDVQQGEwJQTDEhMB8GA1UEChMYQXNzZWNvIERhdGEgU3lzdGVtcyBT -LkEuMScwJQYDVQQLEx5DZXJ0dW0gQ2VydGlmaWNhdGlvbiBBdXRob3JpdHkxGTAX -BgNVBAMTEENlcnR1bSBFQy0zODQgQ0EwdjAQBgcqhkjOPQIBBgUrgQQAIgNiAATE -KI6rGFtqvm5kN2PkzeyrOvfMobgOgknXhimfoZTy42B4mIF4Bk3y7JoOV2CDn7Tm -Fy8as10CW4kjPMIRBSqniBMY81CE1700LCeJVf/OTOffph8oxPBUw7l8t1Ot68Kj -QjBAMA8GA1UdEwEB/wQFMAMBAf8wHQYDVR0OBBYEFI0GZnQkdjrzife81r1HfS+8 -EF9LMA4GA1UdDwEB/wQEAwIBBjAKBggqhkjOPQQDAwNoADBlAjADVS2m5hjEfO/J -UG7BJw+ch69u1RsIGL2SKcHvlJF40jocVYli5RsJHrpka/F2tNQCMQC0QoSZ/6vn -nvuRlydd3LBbMHHOXjgaatkl5+r3YZJW+OraNsKHZZYuciUvf9/DE8k= ------END CERTIFICATE----- - -# Issuer: CN=Certum Trusted Root CA O=Asseco Data Systems S.A. OU=Certum Certification Authority -# Subject: CN=Certum Trusted Root CA O=Asseco Data Systems S.A. OU=Certum Certification Authority -# Label: "Certum Trusted Root CA" -# Serial: 40870380103424195783807378461123655149 -# MD5 Fingerprint: 51:e1:c2:e7:fe:4c:84:af:59:0e:2f:f4:54:6f:ea:29 -# SHA1 Fingerprint: c8:83:44:c0:18:ae:9f:cc:f1:87:b7:8f:22:d1:c5:d7:45:84:ba:e5 -# SHA256 Fingerprint: fe:76:96:57:38:55:77:3e:37:a9:5e:7a:d4:d9:cc:96:c3:01:57:c1:5d:31:76:5b:a9:b1:57:04:e1:ae:78:fd ------BEGIN CERTIFICATE----- -MIIFwDCCA6igAwIBAgIQHr9ZULjJgDdMBvfrVU+17TANBgkqhkiG9w0BAQ0FADB6 -MQswCQYDVQQGEwJQTDEhMB8GA1UEChMYQXNzZWNvIERhdGEgU3lzdGVtcyBTLkEu -MScwJQYDVQQLEx5DZXJ0dW0gQ2VydGlmaWNhdGlvbiBBdXRob3JpdHkxHzAdBgNV -BAMTFkNlcnR1bSBUcnVzdGVkIFJvb3QgQ0EwHhcNMTgwMzE2MTIxMDEzWhcNNDMw -MzE2MTIxMDEzWjB6MQswCQYDVQQGEwJQTDEhMB8GA1UEChMYQXNzZWNvIERhdGEg -U3lzdGVtcyBTLkEuMScwJQYDVQQLEx5DZXJ0dW0gQ2VydGlmaWNhdGlvbiBBdXRo -b3JpdHkxHzAdBgNVBAMTFkNlcnR1bSBUcnVzdGVkIFJvb3QgQ0EwggIiMA0GCSqG -SIb3DQEBAQUAA4ICDwAwggIKAoICAQDRLY67tzbqbTeRn06TpwXkKQMlzhyC93yZ -n0EGze2jusDbCSzBfN8pfktlL5On1AFrAygYo9idBcEq2EXxkd7fO9CAAozPOA/q -p1x4EaTByIVcJdPTsuclzxFUl6s1wB52HO8AU5853BSlLCIls3Jy/I2z5T4IHhQq -NwuIPMqw9MjCoa68wb4pZ1Xi/K1ZXP69VyywkI3C7Te2fJmItdUDmj0VDT06qKhF -8JVOJVkdzZhpu9PMMsmN74H+rX2Ju7pgE8pllWeg8xn2A1bUatMn4qGtg/BKEiJ3 -HAVz4hlxQsDsdUaakFjgao4rpUYwBI4Zshfjvqm6f1bxJAPXsiEodg42MEx51UGa -mqi4NboMOvJEGyCI98Ul1z3G4z5D3Yf+xOr1Uz5MZf87Sst4WmsXXw3Hw09Omiqi -7VdNIuJGmj8PkTQkfVXjjJU30xrwCSss0smNtA0Aq2cpKNgB9RkEth2+dv5yXMSF -ytKAQd8FqKPVhJBPC/PgP5sZ0jeJP/J7UhyM9uH3PAeXjA6iWYEMspA90+NZRu0P -qafegGtaqge2Gcu8V/OXIXoMsSt0Puvap2ctTMSYnjYJdmZm/Bo/6khUHL4wvYBQ -v3y1zgD2DGHZ5yQD4OMBgQ692IU0iL2yNqh7XAjlRICMb/gv1SHKHRzQ+8S1h9E6 -Tsd2tTVItQIDAQABo0IwQDAPBgNVHRMBAf8EBTADAQH/MB0GA1UdDgQWBBSM+xx1 -vALTn04uSNn5YFSqxLNP+jAOBgNVHQ8BAf8EBAMCAQYwDQYJKoZIhvcNAQENBQAD -ggIBAEii1QALLtA/vBzVtVRJHlpr9OTy4EA34MwUe7nJ+jW1dReTagVphZzNTxl4 -WxmB82M+w85bj/UvXgF2Ez8sALnNllI5SW0ETsXpD4YN4fqzX4IS8TrOZgYkNCvo -zMrnadyHncI013nR03e4qllY/p0m+jiGPp2Kh2RX5Rc64vmNueMzeMGQ2Ljdt4NR -5MTMI9UGfOZR0800McD2RrsLrfw9EAUqO0qRJe6M1ISHgCq8CYyqOhNf6DR5UMEQ -GfnTKB7U0VEwKbOukGfWHwpjscWpxkIxYxeU72nLL/qMFH3EQxiJ2fAyQOaA4kZf -5ePBAFmo+eggvIksDkc0C+pXwlM2/KfUrzHN/gLldfq5Jwn58/U7yn2fqSLLiMmq -0Uc9NneoWWRrJ8/vJ8HjJLWG965+Mk2weWjROeiQWMODvA8s1pfrzgzhIMfatz7D -P78v3DSk+yshzWePS/Tj6tQ/50+6uaWTRRxmHyH6ZF5v4HaUMst19W7l9o/HuKTM -qJZ9ZPskWkoDbGs4xugDQ5r3V7mzKWmTOPQD8rv7gmsHINFSH5pkAnuYZttcTVoP -0ISVoDwUQwbKytu4QTbaakRnh6+v40URFWkIsr4WOZckbxJF0WddCajJFdr60qZf -E2Efv4WstK2tBZQIgx51F9NxO5NQI1mg7TyRVJ12AMXDuDjb ------END CERTIFICATE----- - -# Issuer: CN=TunTrust Root CA O=Agence Nationale de Certification Electronique -# Subject: CN=TunTrust Root CA O=Agence Nationale de Certification Electronique -# Label: "TunTrust Root CA" -# Serial: 108534058042236574382096126452369648152337120275 -# MD5 Fingerprint: 85:13:b9:90:5b:36:5c:b6:5e:b8:5a:f8:e0:31:57:b4 -# SHA1 Fingerprint: cf:e9:70:84:0f:e0:73:0f:9d:f6:0c:7f:2c:4b:ee:20:46:34:9c:bb -# SHA256 Fingerprint: 2e:44:10:2a:b5:8c:b8:54:19:45:1c:8e:19:d9:ac:f3:66:2c:af:bc:61:4b:6a:53:96:0a:30:f7:d0:e2:eb:41 ------BEGIN CERTIFICATE----- -MIIFszCCA5ugAwIBAgIUEwLV4kBMkkaGFmddtLu7sms+/BMwDQYJKoZIhvcNAQEL -BQAwYTELMAkGA1UEBhMCVE4xNzA1BgNVBAoMLkFnZW5jZSBOYXRpb25hbGUgZGUg -Q2VydGlmaWNhdGlvbiBFbGVjdHJvbmlxdWUxGTAXBgNVBAMMEFR1blRydXN0IFJv -b3QgQ0EwHhcNMTkwNDI2MDg1NzU2WhcNNDQwNDI2MDg1NzU2WjBhMQswCQYDVQQG -EwJUTjE3MDUGA1UECgwuQWdlbmNlIE5hdGlvbmFsZSBkZSBDZXJ0aWZpY2F0aW9u -IEVsZWN0cm9uaXF1ZTEZMBcGA1UEAwwQVHVuVHJ1c3QgUm9vdCBDQTCCAiIwDQYJ -KoZIhvcNAQEBBQADggIPADCCAgoCggIBAMPN0/y9BFPdDCA61YguBUtB9YOCfvdZ -n56eY+hz2vYGqU8ftPkLHzmMmiDQfgbU7DTZhrx1W4eI8NLZ1KMKsmwb60ksPqxd -2JQDoOw05TDENX37Jk0bbjBU2PWARZw5rZzJJQRNmpA+TkBuimvNKWfGzC3gdOgF -VwpIUPp6Q9p+7FuaDmJ2/uqdHYVy7BG7NegfJ7/Boce7SBbdVtfMTqDhuazb1YMZ -GoXRlJfXyqNlC/M4+QKu3fZnz8k/9YosRxqZbwUN/dAdgjH8KcwAWJeRTIAAHDOF -li/LQcKLEITDCSSJH7UP2dl3RxiSlGBcx5kDPP73lad9UKGAwqmDrViWVSHbhlnU -r8a83YFuB9tgYv7sEG7aaAH0gxupPqJbI9dkxt/con3YS7qC0lH4Zr8GRuR5KiY2 -eY8fTpkdso8MDhz/yV3A/ZAQprE38806JG60hZC/gLkMjNWb1sjxVj8agIl6qeIb -MlEsPvLfe/ZdeikZjuXIvTZxi11Mwh0/rViizz1wTaZQmCXcI/m4WEEIcb9PuISg -jwBUFfyRbVinljvrS5YnzWuioYasDXxU5mZMZl+QviGaAkYt5IPCgLnPSz7ofzwB -7I9ezX/SKEIBlYrilz0QIX32nRzFNKHsLA4KUiwSVXAkPcvCFDVDXSdOvsC9qnyW -5/yeYa1E0wCXAgMBAAGjYzBhMB0GA1UdDgQWBBQGmpsfU33x9aTI04Y+oXNZtPdE -ITAPBgNVHRMBAf8EBTADAQH/MB8GA1UdIwQYMBaAFAaamx9TffH1pMjThj6hc1m0 -90QhMA4GA1UdDwEB/wQEAwIBBjANBgkqhkiG9w0BAQsFAAOCAgEAqgVutt0Vyb+z -xiD2BkewhpMl0425yAA/l/VSJ4hxyXT968pk21vvHl26v9Hr7lxpuhbI87mP0zYu -QEkHDVneixCwSQXi/5E/S7fdAo74gShczNxtr18UnH1YeA32gAm56Q6XKRm4t+v4 -FstVEuTGfbvE7Pi1HE4+Z7/FXxttbUcoqgRYYdZ2vyJ/0Adqp2RT8JeNnYA/u8EH -22Wv5psymsNUk8QcCMNE+3tjEUPRahphanltkE8pjkcFwRJpadbGNjHh/PqAulxP -xOu3Mqz4dWEX1xAZufHSCe96Qp1bWgvUxpVOKs7/B9dPfhgGiPEZtdmYu65xxBzn -dFlY7wyJz4sfdZMaBBSSSFCp61cpABbjNhzI+L/wM9VBD8TMPN3pM0MBkRArHtG5 -Xc0yGYuPjCB31yLEQtyEFpslbei0VXF/sHyz03FJuc9SpAQ/3D2gu68zngowYI7b -nV2UqL1g52KAdoGDDIzMMEZJ4gzSqK/rYXHv5yJiqfdcZGyfFoxnNidF9Ql7v/YQ -CvGwjVRDjAS6oz/v4jXH+XTgbzRB0L9zZVcg+ZtnemZoJE6AZb0QmQZZ8mWvuMZH -u/2QeItBcy6vVR/cO5JyboTT0GFMDcx2V+IthSIVNg3rAZ3r2OvEhJn7wAzMMujj -d9qDRIueVSjAi1jTkD5OGwDxFa2DK5o= ------END CERTIFICATE----- - -# Issuer: CN=HARICA TLS RSA Root CA 2021 O=Hellenic Academic and Research Institutions CA -# Subject: CN=HARICA TLS RSA Root CA 2021 O=Hellenic Academic and Research Institutions CA -# Label: "HARICA TLS RSA Root CA 2021" -# Serial: 76817823531813593706434026085292783742 -# MD5 Fingerprint: 65:47:9b:58:86:dd:2c:f0:fc:a2:84:1f:1e:96:c4:91 -# SHA1 Fingerprint: 02:2d:05:82:fa:88:ce:14:0c:06:79:de:7f:14:10:e9:45:d7:a5:6d -# SHA256 Fingerprint: d9:5d:0e:8e:da:79:52:5b:f9:be:b1:1b:14:d2:10:0d:32:94:98:5f:0c:62:d9:fa:bd:9c:d9:99:ec:cb:7b:1d ------BEGIN CERTIFICATE----- -MIIFpDCCA4ygAwIBAgIQOcqTHO9D88aOk8f0ZIk4fjANBgkqhkiG9w0BAQsFADBs -MQswCQYDVQQGEwJHUjE3MDUGA1UECgwuSGVsbGVuaWMgQWNhZGVtaWMgYW5kIFJl -c2VhcmNoIEluc3RpdHV0aW9ucyBDQTEkMCIGA1UEAwwbSEFSSUNBIFRMUyBSU0Eg -Um9vdCBDQSAyMDIxMB4XDTIxMDIxOTEwNTUzOFoXDTQ1MDIxMzEwNTUzN1owbDEL -MAkGA1UEBhMCR1IxNzA1BgNVBAoMLkhlbGxlbmljIEFjYWRlbWljIGFuZCBSZXNl -YXJjaCBJbnN0aXR1dGlvbnMgQ0ExJDAiBgNVBAMMG0hBUklDQSBUTFMgUlNBIFJv -b3QgQ0EgMjAyMTCCAiIwDQYJKoZIhvcNAQEBBQADggIPADCCAgoCggIBAIvC569l -mwVnlskNJLnQDmT8zuIkGCyEf3dRywQRNrhe7Wlxp57kJQmXZ8FHws+RFjZiPTgE -4VGC/6zStGndLuwRo0Xua2s7TL+MjaQenRG56Tj5eg4MmOIjHdFOY9TnuEFE+2uv -a9of08WRiFukiZLRgeaMOVig1mlDqa2YUlhu2wr7a89o+uOkXjpFc5gH6l8Cct4M -pbOfrqkdtx2z/IpZ525yZa31MJQjB/OCFks1mJxTuy/K5FrZx40d/JiZ+yykgmvw -Kh+OC19xXFyuQnspiYHLA6OZyoieC0AJQTPb5lh6/a6ZcMBaD9YThnEvdmn8kN3b -LW7R8pv1GmuebxWMevBLKKAiOIAkbDakO/IwkfN4E8/BPzWr8R0RI7VDIp4BkrcY -AuUR0YLbFQDMYTfBKnya4dC6s1BG7oKsnTH4+yPiAwBIcKMJJnkVU2DzOFytOOqB -AGMUuTNe3QvboEUHGjMJ+E20pwKmafTCWQWIZYVWrkvL4N48fS0ayOn7H6NhStYq -E613TBoYm5EPWNgGVMWX+Ko/IIqmhaZ39qb8HOLubpQzKoNQhArlT4b4UEV4AIHr -W2jjJo3Me1xR9BQsQL4aYB16cmEdH2MtiKrOokWQCPxrvrNQKlr9qEgYRtaQQJKQ -CoReaDH46+0N0x3GfZkYVVYnZS6NRcUk7M7jAgMBAAGjQjBAMA8GA1UdEwEB/wQF -MAMBAf8wHQYDVR0OBBYEFApII6ZgpJIKM+qTW8VX6iVNvRLuMA4GA1UdDwEB/wQE -AwIBhjANBgkqhkiG9w0BAQsFAAOCAgEAPpBIqm5iFSVmewzVjIuJndftTgfvnNAU -X15QvWiWkKQUEapobQk1OUAJ2vQJLDSle1mESSmXdMgHHkdt8s4cUCbjnj1AUz/3 -f5Z2EMVGpdAgS1D0NTsY9FVqQRtHBmg8uwkIYtlfVUKqrFOFrJVWNlar5AWMxaja -H6NpvVMPxP/cyuN+8kyIhkdGGvMA9YCRotxDQpSbIPDRzbLrLFPCU3hKTwSUQZqP -JzLB5UkZv/HywouoCjkxKLR9YjYsTewfM7Z+d21+UPCfDtcRj88YxeMn/ibvBZ3P -zzfF0HvaO7AWhAw6k9a+F9sPPg4ZeAnHqQJyIkv3N3a6dcSFA1pj1bF1BcK5vZSt -jBWZp5N99sXzqnTPBIWUmAD04vnKJGW/4GKvyMX6ssmeVkjaef2WdhW+o45WxLM0 -/L5H9MG0qPzVMIho7suuyWPEdr6sOBjhXlzPrjoiUevRi7PzKzMHVIf6tLITe7pT -BGIBnfHAT+7hOtSLIBD6Alfm78ELt5BGnBkpjNxvoEppaZS3JGWg/6w/zgH7IS79 -aPib8qXPMThcFarmlwDB31qlpzmq6YR/PFGoOtmUW4y/Twhx5duoXNTSpv4Ao8YW -xw/ogM4cKGR0GQjTQuPOAF1/sdwTsOEFy9EgqoZ0njnnkf3/W9b3raYvAwtt41dU -63ZTGI0RmLo= ------END CERTIFICATE----- - -# Issuer: CN=HARICA TLS ECC Root CA 2021 O=Hellenic Academic and Research Institutions CA -# Subject: CN=HARICA TLS ECC Root CA 2021 O=Hellenic Academic and Research Institutions CA -# Label: "HARICA TLS ECC Root CA 2021" -# Serial: 137515985548005187474074462014555733966 -# MD5 Fingerprint: ae:f7:4c:e5:66:35:d1:b7:9b:8c:22:93:74:d3:4b:b0 -# SHA1 Fingerprint: bc:b0:c1:9d:e9:98:92:70:19:38:57:e9:8d:a7:b4:5d:6e:ee:01:48 -# SHA256 Fingerprint: 3f:99:cc:47:4a:cf:ce:4d:fe:d5:87:94:66:5e:47:8d:15:47:73:9f:2e:78:0f:1b:b4:ca:9b:13:30:97:d4:01 ------BEGIN CERTIFICATE----- -MIICVDCCAdugAwIBAgIQZ3SdjXfYO2rbIvT/WeK/zjAKBggqhkjOPQQDAzBsMQsw -CQYDVQQGEwJHUjE3MDUGA1UECgwuSGVsbGVuaWMgQWNhZGVtaWMgYW5kIFJlc2Vh -cmNoIEluc3RpdHV0aW9ucyBDQTEkMCIGA1UEAwwbSEFSSUNBIFRMUyBFQ0MgUm9v -dCBDQSAyMDIxMB4XDTIxMDIxOTExMDExMFoXDTQ1MDIxMzExMDEwOVowbDELMAkG -A1UEBhMCR1IxNzA1BgNVBAoMLkhlbGxlbmljIEFjYWRlbWljIGFuZCBSZXNlYXJj -aCBJbnN0aXR1dGlvbnMgQ0ExJDAiBgNVBAMMG0hBUklDQSBUTFMgRUNDIFJvb3Qg -Q0EgMjAyMTB2MBAGByqGSM49AgEGBSuBBAAiA2IABDgI/rGgltJ6rK9JOtDA4MM7 -KKrxcm1lAEeIhPyaJmuqS7psBAqIXhfyVYf8MLA04jRYVxqEU+kw2anylnTDUR9Y -STHMmE5gEYd103KUkE+bECUqqHgtvpBBWJAVcqeht6NCMEAwDwYDVR0TAQH/BAUw -AwEB/zAdBgNVHQ4EFgQUyRtTgRL+BNUW0aq8mm+3oJUZbsowDgYDVR0PAQH/BAQD -AgGGMAoGCCqGSM49BAMDA2cAMGQCMBHervjcToiwqfAircJRQO9gcS3ujwLEXQNw -SaSS6sUUiHCm0w2wqsosQJz76YJumgIwK0eaB8bRwoF8yguWGEEbo/QwCZ61IygN -nxS2PFOiTAZpffpskcYqSUXm7LcT4Tps ------END CERTIFICATE----- - -# Issuer: CN=Autoridad de Certificacion Firmaprofesional CIF A62634068 -# Subject: CN=Autoridad de Certificacion Firmaprofesional CIF A62634068 -# Label: "Autoridad de Certificacion Firmaprofesional CIF A62634068" -# Serial: 1977337328857672817 -# MD5 Fingerprint: 4e:6e:9b:54:4c:ca:b7:fa:48:e4:90:b1:15:4b:1c:a3 -# SHA1 Fingerprint: 0b:be:c2:27:22:49:cb:39:aa:db:35:5c:53:e3:8c:ae:78:ff:b6:fe -# SHA256 Fingerprint: 57:de:05:83:ef:d2:b2:6e:03:61:da:99:da:9d:f4:64:8d:ef:7e:e8:44:1c:3b:72:8a:fa:9b:cd:e0:f9:b2:6a ------BEGIN CERTIFICATE----- -MIIGFDCCA/ygAwIBAgIIG3Dp0v+ubHEwDQYJKoZIhvcNAQELBQAwUTELMAkGA1UE -BhMCRVMxQjBABgNVBAMMOUF1dG9yaWRhZCBkZSBDZXJ0aWZpY2FjaW9uIEZpcm1h -cHJvZmVzaW9uYWwgQ0lGIEE2MjYzNDA2ODAeFw0xNDA5MjMxNTIyMDdaFw0zNjA1 -MDUxNTIyMDdaMFExCzAJBgNVBAYTAkVTMUIwQAYDVQQDDDlBdXRvcmlkYWQgZGUg -Q2VydGlmaWNhY2lvbiBGaXJtYXByb2Zlc2lvbmFsIENJRiBBNjI2MzQwNjgwggIi -MA0GCSqGSIb3DQEBAQUAA4ICDwAwggIKAoICAQDKlmuO6vj78aI14H9M2uDDUtd9 -thDIAl6zQyrET2qyyhxdKJp4ERppWVevtSBC5IsP5t9bpgOSL/UR5GLXMnE42QQM -cas9UX4PB99jBVzpv5RvwSmCwLTaUbDBPLutN0pcyvFLNg4kq7/DhHf9qFD0sefG -L9ItWY16Ck6WaVICqjaY7Pz6FIMMNx/Jkjd/14Et5cS54D40/mf0PmbR0/RAz15i -NA9wBj4gGFrO93IbJWyTdBSTo3OxDqqHECNZXyAFGUftaI6SEspd/NYrspI8IM/h -X68gvqB2f3bl7BqGYTM+53u0P6APjqK5am+5hyZvQWyIplD9amML9ZMWGxmPsu2b -m8mQ9QEM3xk9Dz44I8kvjwzRAv4bVdZO0I08r0+k8/6vKtMFnXkIoctXMbScyJCy -Z/QYFpM6/EfY0XiWMR+6KwxfXZmtY4laJCB22N/9q06mIqqdXuYnin1oKaPnirja -EbsXLZmdEyRG98Xi2J+Of8ePdG1asuhy9azuJBCtLxTa/y2aRnFHvkLfuwHb9H/T -KI8xWVvTyQKmtFLKbpf7Q8UIJm+K9Lv9nyiqDdVF8xM6HdjAeI9BZzwelGSuewvF -6NkBiDkal4ZkQdU7hwxu+g/GvUgUvzlN1J5Bto+WHWOWk9mVBngxaJ43BjuAiUVh -OSPHG0SjFeUc+JIwuwIDAQABo4HvMIHsMB0GA1UdDgQWBBRlzeurNR4APn7VdMAc -tHNHDhpkLzASBgNVHRMBAf8ECDAGAQH/AgEBMIGmBgNVHSAEgZ4wgZswgZgGBFUd -IAAwgY8wLwYIKwYBBQUHAgEWI2h0dHA6Ly93d3cuZmlybWFwcm9mZXNpb25hbC5j -b20vY3BzMFwGCCsGAQUFBwICMFAeTgBQAGEAcwBlAG8AIABkAGUAIABsAGEAIABC -AG8AbgBhAG4AbwB2AGEAIAA0ADcAIABCAGEAcgBjAGUAbABvAG4AYQAgADAAOAAw -ADEANzAOBgNVHQ8BAf8EBAMCAQYwDQYJKoZIhvcNAQELBQADggIBAHSHKAIrdx9m -iWTtj3QuRhy7qPj4Cx2Dtjqn6EWKB7fgPiDL4QjbEwj4KKE1soCzC1HA01aajTNF -Sa9J8OA9B3pFE1r/yJfY0xgsfZb43aJlQ3CTkBW6kN/oGbDbLIpgD7dvlAceHabJ -hfa9NPhAeGIQcDq+fUs5gakQ1JZBu/hfHAsdCPKxsIl68veg4MSPi3i1O1ilI45P -Vf42O+AMt8oqMEEgtIDNrvx2ZnOorm7hfNoD6JQg5iKj0B+QXSBTFCZX2lSX3xZE -EAEeiGaPcjiT3SC3NL7X8e5jjkd5KAb881lFJWAiMxujX6i6KtoaPc1A6ozuBRWV -1aUsIC+nmCjuRfzxuIgALI9C2lHVnOUTaHFFQ4ueCyE8S1wF3BqfmI7avSKecs2t -CsvMo2ebKHTEm9caPARYpoKdrcd7b/+Alun4jWq9GJAd/0kakFI3ky88Al2CdgtR -5xbHV/g4+afNmyJU72OwFW1TZQNKXkqgsqeOSQBZONXH9IBk9W6VULgRfhVwOEqw -f9DEMnDAGf/JOC0ULGb0QkTmVXYbgBVX/8Cnp6o5qtjTcNAuuuuUavpfNIbnYrX9 -ivAwhZTJryQCL2/W3Wf+47BVTwSYT6RBVuKT0Gro1vP7ZeDOdcQxWQzugsgMYDNK -GbqEZycPvEJdvSRUDewdcAZfpLz6IHxV ------END CERTIFICATE----- - -# Issuer: CN=vTrus ECC Root CA O=iTrusChina Co.,Ltd. -# Subject: CN=vTrus ECC Root CA O=iTrusChina Co.,Ltd. -# Label: "vTrus ECC Root CA" -# Serial: 630369271402956006249506845124680065938238527194 -# MD5 Fingerprint: de:4b:c1:f5:52:8c:9b:43:e1:3e:8f:55:54:17:8d:85 -# SHA1 Fingerprint: f6:9c:db:b0:fc:f6:02:13:b6:52:32:a6:a3:91:3f:16:70:da:c3:e1 -# SHA256 Fingerprint: 30:fb:ba:2c:32:23:8e:2a:98:54:7a:f9:79:31:e5:50:42:8b:9b:3f:1c:8e:eb:66:33:dc:fa:86:c5:b2:7d:d3 ------BEGIN CERTIFICATE----- -MIICDzCCAZWgAwIBAgIUbmq8WapTvpg5Z6LSa6Q75m0c1towCgYIKoZIzj0EAwMw -RzELMAkGA1UEBhMCQ04xHDAaBgNVBAoTE2lUcnVzQ2hpbmEgQ28uLEx0ZC4xGjAY -BgNVBAMTEXZUcnVzIEVDQyBSb290IENBMB4XDTE4MDczMTA3MjY0NFoXDTQzMDcz -MTA3MjY0NFowRzELMAkGA1UEBhMCQ04xHDAaBgNVBAoTE2lUcnVzQ2hpbmEgQ28u -LEx0ZC4xGjAYBgNVBAMTEXZUcnVzIEVDQyBSb290IENBMHYwEAYHKoZIzj0CAQYF -K4EEACIDYgAEZVBKrox5lkqqHAjDo6LN/llWQXf9JpRCux3NCNtzslt188+cToL0 -v/hhJoVs1oVbcnDS/dtitN9Ti72xRFhiQgnH+n9bEOf+QP3A2MMrMudwpremIFUd -e4BdS49nTPEQo0IwQDAdBgNVHQ4EFgQUmDnNvtiyjPeyq+GtJK97fKHbH88wDwYD -VR0TAQH/BAUwAwEB/zAOBgNVHQ8BAf8EBAMCAQYwCgYIKoZIzj0EAwMDaAAwZQIw -V53dVvHH4+m4SVBrm2nDb+zDfSXkV5UTQJtS0zvzQBm8JsctBp61ezaf9SXUY2sA -AjEA6dPGnlaaKsyh2j/IZivTWJwghfqrkYpwcBE4YGQLYgmRWAD5Tfs0aNoJrSEG -GJTO ------END CERTIFICATE----- - -# Issuer: CN=vTrus Root CA O=iTrusChina Co.,Ltd. -# Subject: CN=vTrus Root CA O=iTrusChina Co.,Ltd. -# Label: "vTrus Root CA" -# Serial: 387574501246983434957692974888460947164905180485 -# MD5 Fingerprint: b8:c9:37:df:fa:6b:31:84:64:c5:ea:11:6a:1b:75:fc -# SHA1 Fingerprint: 84:1a:69:fb:f5:cd:1a:25:34:13:3d:e3:f8:fc:b8:99:d0:c9:14:b7 -# SHA256 Fingerprint: 8a:71:de:65:59:33:6f:42:6c:26:e5:38:80:d0:0d:88:a1:8d:a4:c6:a9:1f:0d:cb:61:94:e2:06:c5:c9:63:87 ------BEGIN CERTIFICATE----- -MIIFVjCCAz6gAwIBAgIUQ+NxE9izWRRdt86M/TX9b7wFjUUwDQYJKoZIhvcNAQEL -BQAwQzELMAkGA1UEBhMCQ04xHDAaBgNVBAoTE2lUcnVzQ2hpbmEgQ28uLEx0ZC4x -FjAUBgNVBAMTDXZUcnVzIFJvb3QgQ0EwHhcNMTgwNzMxMDcyNDA1WhcNNDMwNzMx -MDcyNDA1WjBDMQswCQYDVQQGEwJDTjEcMBoGA1UEChMTaVRydXNDaGluYSBDby4s -THRkLjEWMBQGA1UEAxMNdlRydXMgUm9vdCBDQTCCAiIwDQYJKoZIhvcNAQEBBQAD -ggIPADCCAgoCggIBAL1VfGHTuB0EYgWgrmy3cLRB6ksDXhA/kFocizuwZotsSKYc -IrrVQJLuM7IjWcmOvFjai57QGfIvWcaMY1q6n6MLsLOaXLoRuBLpDLvPbmyAhykU -AyyNJJrIZIO1aqwTLDPxn9wsYTwaP3BVm60AUn/PBLn+NvqcwBauYv6WTEN+VRS+ -GrPSbcKvdmaVayqwlHeFXgQPYh1jdfdr58tbmnDsPmcF8P4HCIDPKNsFxhQnL4Z9 -8Cfe/+Z+M0jnCx5Y0ScrUw5XSmXX+6KAYPxMvDVTAWqXcoKv8R1w6Jz1717CbMdH -flqUhSZNO7rrTOiwCcJlwp2dCZtOtZcFrPUGoPc2BX70kLJrxLT5ZOrpGgrIDajt -J8nU57O5q4IikCc9Kuh8kO+8T/3iCiSn3mUkpF3qwHYw03dQ+A0Em5Q2AXPKBlim -0zvc+gRGE1WKyURHuFE5Gi7oNOJ5y1lKCn+8pu8fA2dqWSslYpPZUxlmPCdiKYZN -pGvu/9ROutW04o5IWgAZCfEF2c6Rsffr6TlP9m8EQ5pV9T4FFL2/s1m02I4zhKOQ -UqqzApVg+QxMaPnu1RcN+HFXtSXkKe5lXa/R7jwXC1pDxaWG6iSe4gUH3DRCEpHW -OXSuTEGC2/KmSNGzm/MzqvOmwMVO9fSddmPmAsYiS8GVP1BkLFTltvA8Kc9XAgMB -AAGjQjBAMB0GA1UdDgQWBBRUYnBj8XWEQ1iO0RYgscasGrz2iTAPBgNVHRMBAf8E -BTADAQH/MA4GA1UdDwEB/wQEAwIBBjANBgkqhkiG9w0BAQsFAAOCAgEAKbqSSaet -8PFww+SX8J+pJdVrnjT+5hpk9jprUrIQeBqfTNqK2uwcN1LgQkv7bHbKJAs5EhWd -nxEt/Hlk3ODg9d3gV8mlsnZwUKT+twpw1aA08XXXTUm6EdGz2OyC/+sOxL9kLX1j -bhd47F18iMjrjld22VkE+rxSH0Ws8HqA7Oxvdq6R2xCOBNyS36D25q5J08FsEhvM -Kar5CKXiNxTKsbhm7xqC5PD48acWabfbqWE8n/Uxy+QARsIvdLGx14HuqCaVvIiv -TDUHKgLKeBRtRytAVunLKmChZwOgzoy8sHJnxDHO2zTlJQNgJXtxmOTAGytfdELS -S8VZCAeHvsXDf+eW2eHcKJfWjwXj9ZtOyh1QRwVTsMo554WgicEFOwE30z9J4nfr -I8iIZjs9OXYhRvHsXyO466JmdXTBQPfYaJqT4i2pLr0cox7IdMakLXogqzu4sEb9 -b91fUlV1YvCXoHzXOP0l382gmxDPi7g4Xl7FtKYCNqEeXxzP4padKar9mK5S4fNB -UvupLnKWnyfjqnN9+BojZns7q2WwMgFLFT49ok8MKzWixtlnEjUwzXYuFrOZnk1P -Ti07NEPhmg4NpGaXutIcSkwsKouLgU9xGqndXHt7CMUADTdA43x7VF8vhV929ven -sBxXVsFy6K2ir40zSbofitzmdHxghm+Hl3s= ------END CERTIFICATE----- - -# Issuer: CN=ISRG Root X2 O=Internet Security Research Group -# Subject: CN=ISRG Root X2 O=Internet Security Research Group -# Label: "ISRG Root X2" -# Serial: 87493402998870891108772069816698636114 -# MD5 Fingerprint: d3:9e:c4:1e:23:3c:a6:df:cf:a3:7e:6d:e0:14:e6:e5 -# SHA1 Fingerprint: bd:b1:b9:3c:d5:97:8d:45:c6:26:14:55:f8:db:95:c7:5a:d1:53:af -# SHA256 Fingerprint: 69:72:9b:8e:15:a8:6e:fc:17:7a:57:af:b7:17:1d:fc:64:ad:d2:8c:2f:ca:8c:f1:50:7e:34:45:3c:cb:14:70 ------BEGIN CERTIFICATE----- -MIICGzCCAaGgAwIBAgIQQdKd0XLq7qeAwSxs6S+HUjAKBggqhkjOPQQDAzBPMQsw -CQYDVQQGEwJVUzEpMCcGA1UEChMgSW50ZXJuZXQgU2VjdXJpdHkgUmVzZWFyY2gg -R3JvdXAxFTATBgNVBAMTDElTUkcgUm9vdCBYMjAeFw0yMDA5MDQwMDAwMDBaFw00 -MDA5MTcxNjAwMDBaME8xCzAJBgNVBAYTAlVTMSkwJwYDVQQKEyBJbnRlcm5ldCBT -ZWN1cml0eSBSZXNlYXJjaCBHcm91cDEVMBMGA1UEAxMMSVNSRyBSb290IFgyMHYw -EAYHKoZIzj0CAQYFK4EEACIDYgAEzZvVn4CDCuwJSvMWSj5cz3es3mcFDR0HttwW -+1qLFNvicWDEukWVEYmO6gbf9yoWHKS5xcUy4APgHoIYOIvXRdgKam7mAHf7AlF9 -ItgKbppbd9/w+kHsOdx1ymgHDB/qo0IwQDAOBgNVHQ8BAf8EBAMCAQYwDwYDVR0T -AQH/BAUwAwEB/zAdBgNVHQ4EFgQUfEKWrt5LSDv6kviejM9ti6lyN5UwCgYIKoZI -zj0EAwMDaAAwZQIwe3lORlCEwkSHRhtFcP9Ymd70/aTSVaYgLXTWNLxBo1BfASdW -tL4ndQavEi51mI38AjEAi/V3bNTIZargCyzuFJ0nN6T5U6VR5CmD1/iQMVtCnwr1 -/q4AaOeMSQ+2b1tbFfLn ------END CERTIFICATE----- - -# Issuer: CN=HiPKI Root CA - G1 O=Chunghwa Telecom Co., Ltd. -# Subject: CN=HiPKI Root CA - G1 O=Chunghwa Telecom Co., Ltd. -# Label: "HiPKI Root CA - G1" -# Serial: 60966262342023497858655262305426234976 -# MD5 Fingerprint: 69:45:df:16:65:4b:e8:68:9a:8f:76:5f:ff:80:9e:d3 -# SHA1 Fingerprint: 6a:92:e4:a8:ee:1b:ec:96:45:37:e3:29:57:49:cd:96:e3:e5:d2:60 -# SHA256 Fingerprint: f0:15:ce:3c:c2:39:bf:ef:06:4b:e9:f1:d2:c4:17:e1:a0:26:4a:0a:94:be:1f:0c:8d:12:18:64:eb:69:49:cc ------BEGIN CERTIFICATE----- -MIIFajCCA1KgAwIBAgIQLd2szmKXlKFD6LDNdmpeYDANBgkqhkiG9w0BAQsFADBP -MQswCQYDVQQGEwJUVzEjMCEGA1UECgwaQ2h1bmdod2EgVGVsZWNvbSBDby4sIEx0 -ZC4xGzAZBgNVBAMMEkhpUEtJIFJvb3QgQ0EgLSBHMTAeFw0xOTAyMjIwOTQ2MDRa -Fw0zNzEyMzExNTU5NTlaME8xCzAJBgNVBAYTAlRXMSMwIQYDVQQKDBpDaHVuZ2h3 -YSBUZWxlY29tIENvLiwgTHRkLjEbMBkGA1UEAwwSSGlQS0kgUm9vdCBDQSAtIEcx -MIICIjANBgkqhkiG9w0BAQEFAAOCAg8AMIICCgKCAgEA9B5/UnMyDHPkvRN0o9Qw -qNCuS9i233VHZvR85zkEHmpwINJaR3JnVfSl6J3VHiGh8Ge6zCFovkRTv4354twv -Vcg3Px+kwJyz5HdcoEb+d/oaoDjq7Zpy3iu9lFc6uux55199QmQ5eiY29yTw1S+6 -lZgRZq2XNdZ1AYDgr/SEYYwNHl98h5ZeQa/rh+r4XfEuiAU+TCK72h8q3VJGZDnz -Qs7ZngyzsHeXZJzA9KMuH5UHsBffMNsAGJZMoYFL3QRtU6M9/Aes1MU3guvklQgZ -KILSQjqj2FPseYlgSGDIcpJQ3AOPgz+yQlda22rpEZfdhSi8MEyr48KxRURHH+CK -FgeW0iEPU8DtqX7UTuybCeyvQqww1r/REEXgphaypcXTT3OUM3ECoWqj1jOXTyFj -HluP2cFeRXF3D4FdXyGarYPM+l7WjSNfGz1BryB1ZlpK9p/7qxj3ccC2HTHsOyDr -y+K49a6SsvfhhEvyovKTmiKe0xRvNlS9H15ZFblzqMF8b3ti6RZsR1pl8w4Rm0bZ -/W3c1pzAtH2lsN0/Vm+h+fbkEkj9Bn8SV7apI09bA8PgcSojt/ewsTu8mL3WmKgM -a/aOEmem8rJY5AIJEzypuxC00jBF8ez3ABHfZfjcK0NVvxaXxA/VLGGEqnKG/uY6 -fsI/fe78LxQ+5oXdUG+3Se0CAwEAAaNCMEAwDwYDVR0TAQH/BAUwAwEB/zAdBgNV -HQ4EFgQU8ncX+l6o/vY9cdVouslGDDjYr7AwDgYDVR0PAQH/BAQDAgGGMA0GCSqG -SIb3DQEBCwUAA4ICAQBQUfB13HAE4/+qddRxosuej6ip0691x1TPOhwEmSKsxBHi -7zNKpiMdDg1H2DfHb680f0+BazVP6XKlMeJ45/dOlBhbQH3PayFUhuaVevvGyuqc -SE5XCV0vrPSltJczWNWseanMX/mF+lLFjfiRFOs6DRfQUsJ748JzjkZ4Bjgs6Fza -ZsT0pPBWGTMpWmWSBUdGSquEwx4noR8RkpkndZMPvDY7l1ePJlsMu5wP1G4wB9Tc -XzZoZjmDlicmisjEOf6aIW/Vcobpf2Lll07QJNBAsNB1CI69aO4I1258EHBGG3zg -iLKecoaZAeO/n0kZtCW+VmWuF2PlHt/o/0elv+EmBYTksMCv5wiZqAxeJoBF1Pho -L5aPruJKHJwWDBNvOIf2u8g0X5IDUXlwpt/L9ZlNec1OvFefQ05rLisY+GpzjLrF -Ne85akEez3GoorKGB1s6yeHvP2UEgEcyRHCVTjFnanRbEEV16rCf0OY1/k6fi8wr -kkVbbiVghUbN0aqwdmaTd5a+g744tiROJgvM7XpWGuDpWsZkrUx6AEhEL7lAuxM+ -vhV4nYWBSipX3tUZQ9rbyltHhoMLP7YNdnhzeSJesYAfz77RP1YQmCuVh6EfnWQU -YDksswBVLuT1sw5XxJFBAJw/6KXf6vb/yPCtbVKoF6ubYfwSUTXkJf2vqmqGOQ== ------END CERTIFICATE----- - -# Issuer: CN=GlobalSign O=GlobalSign OU=GlobalSign ECC Root CA - R4 -# Subject: CN=GlobalSign O=GlobalSign OU=GlobalSign ECC Root CA - R4 -# Label: "GlobalSign ECC Root CA - R4" -# Serial: 159662223612894884239637590694 -# MD5 Fingerprint: 26:29:f8:6d:e1:88:bf:a2:65:7f:aa:c4:cd:0f:7f:fc -# SHA1 Fingerprint: 6b:a0:b0:98:e1:71:ef:5a:ad:fe:48:15:80:77:10:f4:bd:6f:0b:28 -# SHA256 Fingerprint: b0:85:d7:0b:96:4f:19:1a:73:e4:af:0d:54:ae:7a:0e:07:aa:fd:af:9b:71:dd:08:62:13:8a:b7:32:5a:24:a2 ------BEGIN CERTIFICATE----- -MIIB3DCCAYOgAwIBAgINAgPlfvU/k/2lCSGypjAKBggqhkjOPQQDAjBQMSQwIgYD -VQQLExtHbG9iYWxTaWduIEVDQyBSb290IENBIC0gUjQxEzARBgNVBAoTCkdsb2Jh -bFNpZ24xEzARBgNVBAMTCkdsb2JhbFNpZ24wHhcNMTIxMTEzMDAwMDAwWhcNMzgw -MTE5MDMxNDA3WjBQMSQwIgYDVQQLExtHbG9iYWxTaWduIEVDQyBSb290IENBIC0g -UjQxEzARBgNVBAoTCkdsb2JhbFNpZ24xEzARBgNVBAMTCkdsb2JhbFNpZ24wWTAT -BgcqhkjOPQIBBggqhkjOPQMBBwNCAAS4xnnTj2wlDp8uORkcA6SumuU5BwkWymOx -uYb4ilfBV85C+nOh92VC/x7BALJucw7/xyHlGKSq2XE/qNS5zowdo0IwQDAOBgNV -HQ8BAf8EBAMCAYYwDwYDVR0TAQH/BAUwAwEB/zAdBgNVHQ4EFgQUVLB7rUW44kB/ -+wpu+74zyTyjhNUwCgYIKoZIzj0EAwIDRwAwRAIgIk90crlgr/HmnKAWBVBfw147 -bmF0774BxL4YSFlhgjICICadVGNA3jdgUM/I2O2dgq43mLyjj0xMqTQrbO/7lZsm ------END CERTIFICATE----- - -# Issuer: CN=GTS Root R1 O=Google Trust Services LLC -# Subject: CN=GTS Root R1 O=Google Trust Services LLC -# Label: "GTS Root R1" -# Serial: 159662320309726417404178440727 -# MD5 Fingerprint: 05:fe:d0:bf:71:a8:a3:76:63:da:01:e0:d8:52:dc:40 -# SHA1 Fingerprint: e5:8c:1c:c4:91:3b:38:63:4b:e9:10:6e:e3:ad:8e:6b:9d:d9:81:4a -# SHA256 Fingerprint: d9:47:43:2a:bd:e7:b7:fa:90:fc:2e:6b:59:10:1b:12:80:e0:e1:c7:e4:e4:0f:a3:c6:88:7f:ff:57:a7:f4:cf ------BEGIN CERTIFICATE----- -MIIFVzCCAz+gAwIBAgINAgPlk28xsBNJiGuiFzANBgkqhkiG9w0BAQwFADBHMQsw -CQYDVQQGEwJVUzEiMCAGA1UEChMZR29vZ2xlIFRydXN0IFNlcnZpY2VzIExMQzEU -MBIGA1UEAxMLR1RTIFJvb3QgUjEwHhcNMTYwNjIyMDAwMDAwWhcNMzYwNjIyMDAw -MDAwWjBHMQswCQYDVQQGEwJVUzEiMCAGA1UEChMZR29vZ2xlIFRydXN0IFNlcnZp -Y2VzIExMQzEUMBIGA1UEAxMLR1RTIFJvb3QgUjEwggIiMA0GCSqGSIb3DQEBAQUA -A4ICDwAwggIKAoICAQC2EQKLHuOhd5s73L+UPreVp0A8of2C+X0yBoJx9vaMf/vo -27xqLpeXo4xL+Sv2sfnOhB2x+cWX3u+58qPpvBKJXqeqUqv4IyfLpLGcY9vXmX7w -Cl7raKb0xlpHDU0QM+NOsROjyBhsS+z8CZDfnWQpJSMHobTSPS5g4M/SCYe7zUjw -TcLCeoiKu7rPWRnWr4+wB7CeMfGCwcDfLqZtbBkOtdh+JhpFAz2weaSUKK0Pfybl -qAj+lug8aJRT7oM6iCsVlgmy4HqMLnXWnOunVmSPlk9orj2XwoSPwLxAwAtcvfaH -szVsrBhQf4TgTM2S0yDpM7xSma8ytSmzJSq0SPly4cpk9+aCEI3oncKKiPo4Zor8 -Y/kB+Xj9e1x3+naH+uzfsQ55lVe0vSbv1gHR6xYKu44LtcXFilWr06zqkUspzBmk -MiVOKvFlRNACzqrOSbTqn3yDsEB750Orp2yjj32JgfpMpf/VjsPOS+C12LOORc92 -wO1AK/1TD7Cn1TsNsYqiA94xrcx36m97PtbfkSIS5r762DL8EGMUUXLeXdYWk70p -aDPvOmbsB4om3xPXV2V4J95eSRQAogB/mqghtqmxlbCluQ0WEdrHbEg8QOB+DVrN -VjzRlwW5y0vtOUucxD/SVRNuJLDWcfr0wbrM7Rv1/oFB2ACYPTrIrnqYNxgFlQID -AQABo0IwQDAOBgNVHQ8BAf8EBAMCAYYwDwYDVR0TAQH/BAUwAwEB/zAdBgNVHQ4E -FgQU5K8rJnEaK0gnhS9SZizv8IkTcT4wDQYJKoZIhvcNAQEMBQADggIBAJ+qQibb -C5u+/x6Wki4+omVKapi6Ist9wTrYggoGxval3sBOh2Z5ofmmWJyq+bXmYOfg6LEe -QkEzCzc9zolwFcq1JKjPa7XSQCGYzyI0zzvFIoTgxQ6KfF2I5DUkzps+GlQebtuy -h6f88/qBVRRiClmpIgUxPoLW7ttXNLwzldMXG+gnoot7TiYaelpkttGsN/H9oPM4 -7HLwEXWdyzRSjeZ2axfG34arJ45JK3VmgRAhpuo+9K4l/3wV3s6MJT/KYnAK9y8J -ZgfIPxz88NtFMN9iiMG1D53Dn0reWVlHxYciNuaCp+0KueIHoI17eko8cdLiA6Ef -MgfdG+RCzgwARWGAtQsgWSl4vflVy2PFPEz0tv/bal8xa5meLMFrUKTX5hgUvYU/ -Z6tGn6D/Qqc6f1zLXbBwHSs09dR2CQzreExZBfMzQsNhFRAbd03OIozUhfJFfbdT -6u9AWpQKXCBfTkBdYiJ23//OYb2MI3jSNwLgjt7RETeJ9r/tSQdirpLsQBqvFAnZ -0E6yove+7u7Y/9waLd64NnHi/Hm3lCXRSHNboTXns5lndcEZOitHTtNCjv0xyBZm -2tIMPNuzjsmhDYAPexZ3FL//2wmUspO8IFgV6dtxQ/PeEMMA3KgqlbbC1j+Qa3bb -bP6MvPJwNQzcmRk13NfIRmPVNnGuV/u3gm3c ------END CERTIFICATE----- - -# Issuer: CN=GTS Root R2 O=Google Trust Services LLC -# Subject: CN=GTS Root R2 O=Google Trust Services LLC -# Label: "GTS Root R2" -# Serial: 159662449406622349769042896298 -# MD5 Fingerprint: 1e:39:c0:53:e6:1e:29:82:0b:ca:52:55:36:5d:57:dc -# SHA1 Fingerprint: 9a:44:49:76:32:db:de:fa:d0:bc:fb:5a:7b:17:bd:9e:56:09:24:94 -# SHA256 Fingerprint: 8d:25:cd:97:22:9d:bf:70:35:6b:da:4e:b3:cc:73:40:31:e2:4c:f0:0f:af:cf:d3:2d:c7:6e:b5:84:1c:7e:a8 ------BEGIN CERTIFICATE----- -MIIFVzCCAz+gAwIBAgINAgPlrsWNBCUaqxElqjANBgkqhkiG9w0BAQwFADBHMQsw -CQYDVQQGEwJVUzEiMCAGA1UEChMZR29vZ2xlIFRydXN0IFNlcnZpY2VzIExMQzEU -MBIGA1UEAxMLR1RTIFJvb3QgUjIwHhcNMTYwNjIyMDAwMDAwWhcNMzYwNjIyMDAw -MDAwWjBHMQswCQYDVQQGEwJVUzEiMCAGA1UEChMZR29vZ2xlIFRydXN0IFNlcnZp -Y2VzIExMQzEUMBIGA1UEAxMLR1RTIFJvb3QgUjIwggIiMA0GCSqGSIb3DQEBAQUA -A4ICDwAwggIKAoICAQDO3v2m++zsFDQ8BwZabFn3GTXd98GdVarTzTukk3LvCvpt -nfbwhYBboUhSnznFt+4orO/LdmgUud+tAWyZH8QiHZ/+cnfgLFuv5AS/T3KgGjSY -6Dlo7JUle3ah5mm5hRm9iYz+re026nO8/4Piy33B0s5Ks40FnotJk9/BW9BuXvAu -MC6C/Pq8tBcKSOWIm8Wba96wyrQD8Nr0kLhlZPdcTK3ofmZemde4wj7I0BOdre7k -RXuJVfeKH2JShBKzwkCX44ofR5GmdFrS+LFjKBC4swm4VndAoiaYecb+3yXuPuWg -f9RhD1FLPD+M2uFwdNjCaKH5wQzpoeJ/u1U8dgbuak7MkogwTZq9TwtImoS1mKPV -+3PBV2HdKFZ1E66HjucMUQkQdYhMvI35ezzUIkgfKtzra7tEscszcTJGr61K8Yzo -dDqs5xoic4DSMPclQsciOzsSrZYuxsN2B6ogtzVJV+mSSeh2FnIxZyuWfoqjx5RW -Ir9qS34BIbIjMt/kmkRtWVtd9QCgHJvGeJeNkP+byKq0rxFROV7Z+2et1VsRnTKa -G73VululycslaVNVJ1zgyjbLiGH7HrfQy+4W+9OmTN6SpdTi3/UGVN4unUu0kzCq -gc7dGtxRcw1PcOnlthYhGXmy5okLdWTK1au8CcEYof/UVKGFPP0UJAOyh9OktwID -AQABo0IwQDAOBgNVHQ8BAf8EBAMCAYYwDwYDVR0TAQH/BAUwAwEB/zAdBgNVHQ4E -FgQUu//KjiOfT5nK2+JopqUVJxce2Q4wDQYJKoZIhvcNAQEMBQADggIBAB/Kzt3H -vqGf2SdMC9wXmBFqiN495nFWcrKeGk6c1SuYJF2ba3uwM4IJvd8lRuqYnrYb/oM8 -0mJhwQTtzuDFycgTE1XnqGOtjHsB/ncw4c5omwX4Eu55MaBBRTUoCnGkJE+M3DyC -B19m3H0Q/gxhswWV7uGugQ+o+MePTagjAiZrHYNSVc61LwDKgEDg4XSsYPWHgJ2u -NmSRXbBoGOqKYcl3qJfEycel/FVL8/B/uWU9J2jQzGv6U53hkRrJXRqWbTKH7QMg -yALOWr7Z6v2yTcQvG99fevX4i8buMTolUVVnjWQye+mew4K6Ki3pHrTgSAai/Gev -HyICc/sgCq+dVEuhzf9gR7A/Xe8bVr2XIZYtCtFenTgCR2y59PYjJbigapordwj6 -xLEokCZYCDzifqrXPW+6MYgKBesntaFJ7qBFVHvmJ2WZICGoo7z7GJa7Um8M7YNR -TOlZ4iBgxcJlkoKM8xAfDoqXvneCbT+PHV28SSe9zE8P4c52hgQjxcCMElv924Sg -JPFI/2R80L5cFtHvma3AH/vLrrw4IgYmZNralw4/KBVEqE8AyvCazM90arQ+POuV -7LXTWtiBmelDGDfrs7vRWGJB82bSj6p4lVQgw1oudCvV0b4YacCs1aTPObpRhANl -6WLAYv7YTVWW4tAR+kg0Eeye7QUd5MjWHYbL ------END CERTIFICATE----- - -# Issuer: CN=GTS Root R3 O=Google Trust Services LLC -# Subject: CN=GTS Root R3 O=Google Trust Services LLC -# Label: "GTS Root R3" -# Serial: 159662495401136852707857743206 -# MD5 Fingerprint: 3e:e7:9d:58:02:94:46:51:94:e5:e0:22:4a:8b:e7:73 -# SHA1 Fingerprint: ed:e5:71:80:2b:c8:92:b9:5b:83:3c:d2:32:68:3f:09:cd:a0:1e:46 -# SHA256 Fingerprint: 34:d8:a7:3e:e2:08:d9:bc:db:0d:95:65:20:93:4b:4e:40:e6:94:82:59:6e:8b:6f:73:c8:42:6b:01:0a:6f:48 ------BEGIN CERTIFICATE----- -MIICCTCCAY6gAwIBAgINAgPluILrIPglJ209ZjAKBggqhkjOPQQDAzBHMQswCQYD -VQQGEwJVUzEiMCAGA1UEChMZR29vZ2xlIFRydXN0IFNlcnZpY2VzIExMQzEUMBIG -A1UEAxMLR1RTIFJvb3QgUjMwHhcNMTYwNjIyMDAwMDAwWhcNMzYwNjIyMDAwMDAw -WjBHMQswCQYDVQQGEwJVUzEiMCAGA1UEChMZR29vZ2xlIFRydXN0IFNlcnZpY2Vz -IExMQzEUMBIGA1UEAxMLR1RTIFJvb3QgUjMwdjAQBgcqhkjOPQIBBgUrgQQAIgNi -AAQfTzOHMymKoYTey8chWEGJ6ladK0uFxh1MJ7x/JlFyb+Kf1qPKzEUURout736G -jOyxfi//qXGdGIRFBEFVbivqJn+7kAHjSxm65FSWRQmx1WyRRK2EE46ajA2ADDL2 -4CejQjBAMA4GA1UdDwEB/wQEAwIBhjAPBgNVHRMBAf8EBTADAQH/MB0GA1UdDgQW -BBTB8Sa6oC2uhYHP0/EqEr24Cmf9vDAKBggqhkjOPQQDAwNpADBmAjEA9uEglRR7 -VKOQFhG/hMjqb2sXnh5GmCCbn9MN2azTL818+FsuVbu/3ZL3pAzcMeGiAjEA/Jdm -ZuVDFhOD3cffL74UOO0BzrEXGhF16b0DjyZ+hOXJYKaV11RZt+cRLInUue4X ------END CERTIFICATE----- - -# Issuer: CN=GTS Root R4 O=Google Trust Services LLC -# Subject: CN=GTS Root R4 O=Google Trust Services LLC -# Label: "GTS Root R4" -# Serial: 159662532700760215368942768210 -# MD5 Fingerprint: 43:96:83:77:19:4d:76:b3:9d:65:52:e4:1d:22:a5:e8 -# SHA1 Fingerprint: 77:d3:03:67:b5:e0:0c:15:f6:0c:38:61:df:7c:e1:3b:92:46:4d:47 -# SHA256 Fingerprint: 34:9d:fa:40:58:c5:e2:63:12:3b:39:8a:e7:95:57:3c:4e:13:13:c8:3f:e6:8f:93:55:6c:d5:e8:03:1b:3c:7d ------BEGIN CERTIFICATE----- -MIICCTCCAY6gAwIBAgINAgPlwGjvYxqccpBQUjAKBggqhkjOPQQDAzBHMQswCQYD -VQQGEwJVUzEiMCAGA1UEChMZR29vZ2xlIFRydXN0IFNlcnZpY2VzIExMQzEUMBIG -A1UEAxMLR1RTIFJvb3QgUjQwHhcNMTYwNjIyMDAwMDAwWhcNMzYwNjIyMDAwMDAw -WjBHMQswCQYDVQQGEwJVUzEiMCAGA1UEChMZR29vZ2xlIFRydXN0IFNlcnZpY2Vz -IExMQzEUMBIGA1UEAxMLR1RTIFJvb3QgUjQwdjAQBgcqhkjOPQIBBgUrgQQAIgNi -AATzdHOnaItgrkO4NcWBMHtLSZ37wWHO5t5GvWvVYRg1rkDdc/eJkTBa6zzuhXyi -QHY7qca4R9gq55KRanPpsXI5nymfopjTX15YhmUPoYRlBtHci8nHc8iMai/lxKvR -HYqjQjBAMA4GA1UdDwEB/wQEAwIBhjAPBgNVHRMBAf8EBTADAQH/MB0GA1UdDgQW -BBSATNbrdP9JNqPV2Py1PsVq8JQdjDAKBggqhkjOPQQDAwNpADBmAjEA6ED/g94D -9J+uHXqnLrmvT/aDHQ4thQEd0dlq7A/Cr8deVl5c1RxYIigL9zC2L7F8AjEA8GE8 -p/SgguMh1YQdc4acLa/KNJvxn7kjNuK8YAOdgLOaVsjh4rsUecrNIdSUtUlD ------END CERTIFICATE----- - -# Issuer: CN=Telia Root CA v2 O=Telia Finland Oyj -# Subject: CN=Telia Root CA v2 O=Telia Finland Oyj -# Label: "Telia Root CA v2" -# Serial: 7288924052977061235122729490515358 -# MD5 Fingerprint: 0e:8f:ac:aa:82:df:85:b1:f4:dc:10:1c:fc:99:d9:48 -# SHA1 Fingerprint: b9:99:cd:d1:73:50:8a:c4:47:05:08:9c:8c:88:fb:be:a0:2b:40:cd -# SHA256 Fingerprint: 24:2b:69:74:2f:cb:1e:5b:2a:bf:98:89:8b:94:57:21:87:54:4e:5b:4d:99:11:78:65:73:62:1f:6a:74:b8:2c ------BEGIN CERTIFICATE----- -MIIFdDCCA1ygAwIBAgIPAWdfJ9b+euPkrL4JWwWeMA0GCSqGSIb3DQEBCwUAMEQx -CzAJBgNVBAYTAkZJMRowGAYDVQQKDBFUZWxpYSBGaW5sYW5kIE95ajEZMBcGA1UE -AwwQVGVsaWEgUm9vdCBDQSB2MjAeFw0xODExMjkxMTU1NTRaFw00MzExMjkxMTU1 -NTRaMEQxCzAJBgNVBAYTAkZJMRowGAYDVQQKDBFUZWxpYSBGaW5sYW5kIE95ajEZ -MBcGA1UEAwwQVGVsaWEgUm9vdCBDQSB2MjCCAiIwDQYJKoZIhvcNAQEBBQADggIP -ADCCAgoCggIBALLQPwe84nvQa5n44ndp586dpAO8gm2h/oFlH0wnrI4AuhZ76zBq -AMCzdGh+sq/H1WKzej9Qyow2RCRj0jbpDIX2Q3bVTKFgcmfiKDOlyzG4OiIjNLh9 -vVYiQJ3q9HsDrWj8soFPmNB06o3lfc1jw6P23pLCWBnglrvFxKk9pXSW/q/5iaq9 -lRdU2HhE8Qx3FZLgmEKnpNaqIJLNwaCzlrI6hEKNfdWV5Nbb6WLEWLN5xYzTNTOD -n3WhUidhOPFZPY5Q4L15POdslv5e2QJltI5c0BE0312/UqeBAMN/mUWZFdUXyApT -7GPzmX3MaRKGwhfwAZ6/hLzRUssbkmbOpFPlob/E2wnW5olWK8jjfN7j/4nlNW4o -6GwLI1GpJQXrSPjdscr6bAhR77cYbETKJuFzxokGgeWKrLDiKca5JLNrRBH0pUPC -TEPlcDaMtjNXepUugqD0XBCzYYP2AgWGLnwtbNwDRm41k9V6lS/eINhbfpSQBGq6 -WT0EBXWdN6IOLj3rwaRSg/7Qa9RmjtzG6RJOHSpXqhC8fF6CfaamyfItufUXJ63R -DolUK5X6wK0dmBR4M0KGCqlztft0DbcbMBnEWg4cJ7faGND/isgFuvGqHKI3t+ZI -pEYslOqodmJHixBTB0hXbOKSTbauBcvcwUpej6w9GU7C7WB1K9vBykLVAgMBAAGj -YzBhMB8GA1UdIwQYMBaAFHKs5DN5qkWH9v2sHZ7Wxy+G2CQ5MB0GA1UdDgQWBBRy -rOQzeapFh/b9rB2e1scvhtgkOTAOBgNVHQ8BAf8EBAMCAQYwDwYDVR0TAQH/BAUw -AwEB/zANBgkqhkiG9w0BAQsFAAOCAgEAoDtZpwmUPjaE0n4vOaWWl/oRrfxn83EJ -8rKJhGdEr7nv7ZbsnGTbMjBvZ5qsfl+yqwE2foH65IRe0qw24GtixX1LDoJt0nZi -0f6X+J8wfBj5tFJ3gh1229MdqfDBmgC9bXXYfef6xzijnHDoRnkDry5023X4blMM -A8iZGok1GTzTyVR8qPAs5m4HeW9q4ebqkYJpCh3DflminmtGFZhb069GHWLIzoBS -SRE/yQQSwxN8PzuKlts8oB4KtItUsiRnDe+Cy748fdHif64W1lZYudogsYMVoe+K -TTJvQS8TUoKU1xrBeKJR3Stwbbca+few4GeXVtt8YVMJAygCQMez2P2ccGrGKMOF -6eLtGpOg3kuYooQ+BXcBlj37tCAPnHICehIv1aO6UXivKitEZU61/Qrowc15h2Er -3oBXRb9n8ZuRXqWk7FlIEA04x7D6w0RtBPV4UBySllva9bguulvP5fBqnUsvWHMt -Ty3EHD70sz+rFQ47GUGKpMFXEmZxTPpT41frYpUJnlTd0cI8Vzy9OK2YZLe4A5pT -VmBds9hCG1xLEooc6+t9xnppxyd/pPiL8uSUZodL6ZQHCRJ5irLrdATczvREWeAW -ysUsWNc8e89ihmpQfTU2Zqf7N+cox9jQraVplI/owd8k+BsHMYeB2F326CjYSlKA -rBPuUBQemMc= ------END CERTIFICATE----- - -# Issuer: CN=D-TRUST BR Root CA 1 2020 O=D-Trust GmbH -# Subject: CN=D-TRUST BR Root CA 1 2020 O=D-Trust GmbH -# Label: "D-TRUST BR Root CA 1 2020" -# Serial: 165870826978392376648679885835942448534 -# MD5 Fingerprint: b5:aa:4b:d5:ed:f7:e3:55:2e:8f:72:0a:f3:75:b8:ed -# SHA1 Fingerprint: 1f:5b:98:f0:e3:b5:f7:74:3c:ed:e6:b0:36:7d:32:cd:f4:09:41:67 -# SHA256 Fingerprint: e5:9a:aa:81:60:09:c2:2b:ff:5b:25:ba:d3:7d:f3:06:f0:49:79:7c:1f:81:d8:5a:b0:89:e6:57:bd:8f:00:44 ------BEGIN CERTIFICATE----- -MIIC2zCCAmCgAwIBAgIQfMmPK4TX3+oPyWWa00tNljAKBggqhkjOPQQDAzBIMQsw -CQYDVQQGEwJERTEVMBMGA1UEChMMRC1UcnVzdCBHbWJIMSIwIAYDVQQDExlELVRS -VVNUIEJSIFJvb3QgQ0EgMSAyMDIwMB4XDTIwMDIxMTA5NDUwMFoXDTM1MDIxMTA5 -NDQ1OVowSDELMAkGA1UEBhMCREUxFTATBgNVBAoTDEQtVHJ1c3QgR21iSDEiMCAG -A1UEAxMZRC1UUlVTVCBCUiBSb290IENBIDEgMjAyMDB2MBAGByqGSM49AgEGBSuB -BAAiA2IABMbLxyjR+4T1mu9CFCDhQ2tuda38KwOE1HaTJddZO0Flax7mNCq7dPYS -zuht56vkPE4/RAiLzRZxy7+SmfSk1zxQVFKQhYN4lGdnoxwJGT11NIXe7WB9xwy0 -QVK5buXuQqOCAQ0wggEJMA8GA1UdEwEB/wQFMAMBAf8wHQYDVR0OBBYEFHOREKv/ -VbNafAkl1bK6CKBrqx9tMA4GA1UdDwEB/wQEAwIBBjCBxgYDVR0fBIG+MIG7MD6g -PKA6hjhodHRwOi8vY3JsLmQtdHJ1c3QubmV0L2NybC9kLXRydXN0X2JyX3Jvb3Rf -Y2FfMV8yMDIwLmNybDB5oHegdYZzbGRhcDovL2RpcmVjdG9yeS5kLXRydXN0Lm5l -dC9DTj1ELVRSVVNUJTIwQlIlMjBSb290JTIwQ0ElMjAxJTIwMjAyMCxPPUQtVHJ1 -c3QlMjBHbWJILEM9REU/Y2VydGlmaWNhdGVyZXZvY2F0aW9ubGlzdDAKBggqhkjO -PQQDAwNpADBmAjEAlJAtE/rhY/hhY+ithXhUkZy4kzg+GkHaQBZTQgjKL47xPoFW -wKrY7RjEsK70PvomAjEA8yjixtsrmfu3Ubgko6SUeho/5jbiA1czijDLgsfWFBHV -dWNbFJWcHwHP2NVypw87 ------END CERTIFICATE----- - -# Issuer: CN=D-TRUST EV Root CA 1 2020 O=D-Trust GmbH -# Subject: CN=D-TRUST EV Root CA 1 2020 O=D-Trust GmbH -# Label: "D-TRUST EV Root CA 1 2020" -# Serial: 126288379621884218666039612629459926992 -# MD5 Fingerprint: 8c:2d:9d:70:9f:48:99:11:06:11:fb:e9:cb:30:c0:6e -# SHA1 Fingerprint: 61:db:8c:21:59:69:03:90:d8:7c:9c:12:86:54:cf:9d:3d:f4:dd:07 -# SHA256 Fingerprint: 08:17:0d:1a:a3:64:53:90:1a:2f:95:92:45:e3:47:db:0c:8d:37:ab:aa:bc:56:b8:1a:a1:00:dc:95:89:70:db ------BEGIN CERTIFICATE----- -MIIC2zCCAmCgAwIBAgIQXwJB13qHfEwDo6yWjfv/0DAKBggqhkjOPQQDAzBIMQsw -CQYDVQQGEwJERTEVMBMGA1UEChMMRC1UcnVzdCBHbWJIMSIwIAYDVQQDExlELVRS -VVNUIEVWIFJvb3QgQ0EgMSAyMDIwMB4XDTIwMDIxMTEwMDAwMFoXDTM1MDIxMTA5 -NTk1OVowSDELMAkGA1UEBhMCREUxFTATBgNVBAoTDEQtVHJ1c3QgR21iSDEiMCAG -A1UEAxMZRC1UUlVTVCBFViBSb290IENBIDEgMjAyMDB2MBAGByqGSM49AgEGBSuB -BAAiA2IABPEL3YZDIBnfl4XoIkqbz52Yv7QFJsnL46bSj8WeeHsxiamJrSc8ZRCC -/N/DnU7wMyPE0jL1HLDfMxddxfCxivnvubcUyilKwg+pf3VlSSowZ/Rk99Yad9rD -wpdhQntJraOCAQ0wggEJMA8GA1UdEwEB/wQFMAMBAf8wHQYDVR0OBBYEFH8QARY3 -OqQo5FD4pPfsazK2/umLMA4GA1UdDwEB/wQEAwIBBjCBxgYDVR0fBIG+MIG7MD6g -PKA6hjhodHRwOi8vY3JsLmQtdHJ1c3QubmV0L2NybC9kLXRydXN0X2V2X3Jvb3Rf -Y2FfMV8yMDIwLmNybDB5oHegdYZzbGRhcDovL2RpcmVjdG9yeS5kLXRydXN0Lm5l -dC9DTj1ELVRSVVNUJTIwRVYlMjBSb290JTIwQ0ElMjAxJTIwMjAyMCxPPUQtVHJ1 -c3QlMjBHbWJILEM9REU/Y2VydGlmaWNhdGVyZXZvY2F0aW9ubGlzdDAKBggqhkjO -PQQDAwNpADBmAjEAyjzGKnXCXnViOTYAYFqLwZOZzNnbQTs7h5kXO9XMT8oi96CA -y/m0sRtW9XLS/BnRAjEAkfcwkz8QRitxpNA7RJvAKQIFskF3UfN5Wp6OFKBOQtJb -gfM0agPnIjhQW+0ZT0MW ------END CERTIFICATE----- - -# Issuer: CN=DigiCert TLS ECC P384 Root G5 O=DigiCert, Inc. -# Subject: CN=DigiCert TLS ECC P384 Root G5 O=DigiCert, Inc. -# Label: "DigiCert TLS ECC P384 Root G5" -# Serial: 13129116028163249804115411775095713523 -# MD5 Fingerprint: d3:71:04:6a:43:1c:db:a6:59:e1:a8:a3:aa:c5:71:ed -# SHA1 Fingerprint: 17:f3:de:5e:9f:0f:19:e9:8e:f6:1f:32:26:6e:20:c4:07:ae:30:ee -# SHA256 Fingerprint: 01:8e:13:f0:77:25:32:cf:80:9b:d1:b1:72:81:86:72:83:fc:48:c6:e1:3b:e9:c6:98:12:85:4a:49:0c:1b:05 ------BEGIN CERTIFICATE----- -MIICGTCCAZ+gAwIBAgIQCeCTZaz32ci5PhwLBCou8zAKBggqhkjOPQQDAzBOMQsw -CQYDVQQGEwJVUzEXMBUGA1UEChMORGlnaUNlcnQsIEluYy4xJjAkBgNVBAMTHURp -Z2lDZXJ0IFRMUyBFQ0MgUDM4NCBSb290IEc1MB4XDTIxMDExNTAwMDAwMFoXDTQ2 -MDExNDIzNTk1OVowTjELMAkGA1UEBhMCVVMxFzAVBgNVBAoTDkRpZ2lDZXJ0LCBJ -bmMuMSYwJAYDVQQDEx1EaWdpQ2VydCBUTFMgRUNDIFAzODQgUm9vdCBHNTB2MBAG -ByqGSM49AgEGBSuBBAAiA2IABMFEoc8Rl1Ca3iOCNQfN0MsYndLxf3c1TzvdlHJS -7cI7+Oz6e2tYIOyZrsn8aLN1udsJ7MgT9U7GCh1mMEy7H0cKPGEQQil8pQgO4CLp -0zVozptjn4S1mU1YoI71VOeVyaNCMEAwHQYDVR0OBBYEFMFRRVBZqz7nLFr6ICIS -B4CIfBFqMA4GA1UdDwEB/wQEAwIBhjAPBgNVHRMBAf8EBTADAQH/MAoGCCqGSM49 -BAMDA2gAMGUCMQCJao1H5+z8blUD2WdsJk6Dxv3J+ysTvLd6jLRl0mlpYxNjOyZQ -LgGheQaRnUi/wr4CMEfDFXuxoJGZSZOoPHzoRgaLLPIxAJSdYsiJvRmEFOml+wG4 -DXZDjC5Ty3zfDBeWUA== ------END CERTIFICATE----- - -# Issuer: CN=DigiCert TLS RSA4096 Root G5 O=DigiCert, Inc. -# Subject: CN=DigiCert TLS RSA4096 Root G5 O=DigiCert, Inc. -# Label: "DigiCert TLS RSA4096 Root G5" -# Serial: 11930366277458970227240571539258396554 -# MD5 Fingerprint: ac:fe:f7:34:96:a9:f2:b3:b4:12:4b:e4:27:41:6f:e1 -# SHA1 Fingerprint: a7:88:49:dc:5d:7c:75:8c:8c:de:39:98:56:b3:aa:d0:b2:a5:71:35 -# SHA256 Fingerprint: 37:1a:00:dc:05:33:b3:72:1a:7e:eb:40:e8:41:9e:70:79:9d:2b:0a:0f:2c:1d:80:69:31:65:f7:ce:c4:ad:75 ------BEGIN CERTIFICATE----- -MIIFZjCCA06gAwIBAgIQCPm0eKj6ftpqMzeJ3nzPijANBgkqhkiG9w0BAQwFADBN -MQswCQYDVQQGEwJVUzEXMBUGA1UEChMORGlnaUNlcnQsIEluYy4xJTAjBgNVBAMT -HERpZ2lDZXJ0IFRMUyBSU0E0MDk2IFJvb3QgRzUwHhcNMjEwMTE1MDAwMDAwWhcN -NDYwMTE0MjM1OTU5WjBNMQswCQYDVQQGEwJVUzEXMBUGA1UEChMORGlnaUNlcnQs -IEluYy4xJTAjBgNVBAMTHERpZ2lDZXJ0IFRMUyBSU0E0MDk2IFJvb3QgRzUwggIi -MA0GCSqGSIb3DQEBAQUAA4ICDwAwggIKAoICAQCz0PTJeRGd/fxmgefM1eS87IE+ -ajWOLrfn3q/5B03PMJ3qCQuZvWxX2hhKuHisOjmopkisLnLlvevxGs3npAOpPxG0 -2C+JFvuUAT27L/gTBaF4HI4o4EXgg/RZG5Wzrn4DReW+wkL+7vI8toUTmDKdFqgp -wgscONyfMXdcvyej/Cestyu9dJsXLfKB2l2w4SMXPohKEiPQ6s+d3gMXsUJKoBZM -pG2T6T867jp8nVid9E6P/DsjyG244gXazOvswzH016cpVIDPRFtMbzCe88zdH5RD -nU1/cHAN1DrRN/BsnZvAFJNY781BOHW8EwOVfH/jXOnVDdXifBBiqmvwPXbzP6Po -sMH976pXTayGpxi0KcEsDr9kvimM2AItzVwv8n/vFfQMFawKsPHTDU9qTXeXAaDx -Zre3zu/O7Oyldcqs4+Fj97ihBMi8ez9dLRYiVu1ISf6nL3kwJZu6ay0/nTvEF+cd -Lvvyz6b84xQslpghjLSR6Rlgg/IwKwZzUNWYOwbpx4oMYIwo+FKbbuH2TbsGJJvX -KyY//SovcfXWJL5/MZ4PbeiPT02jP/816t9JXkGPhvnxd3lLG7SjXi/7RgLQZhNe -XoVPzthwiHvOAbWWl9fNff2C+MIkwcoBOU+NosEUQB+cZtUMCUbW8tDRSHZWOkPL -tgoRObqME2wGtZ7P6wIDAQABo0IwQDAdBgNVHQ4EFgQUUTMc7TZArxfTJc1paPKv -TiM+s0EwDgYDVR0PAQH/BAQDAgGGMA8GA1UdEwEB/wQFMAMBAf8wDQYJKoZIhvcN -AQEMBQADggIBAGCmr1tfV9qJ20tQqcQjNSH/0GEwhJG3PxDPJY7Jv0Y02cEhJhxw -GXIeo8mH/qlDZJY6yFMECrZBu8RHANmfGBg7sg7zNOok992vIGCukihfNudd5N7H -PNtQOa27PShNlnx2xlv0wdsUpasZYgcYQF+Xkdycx6u1UQ3maVNVzDl92sURVXLF -O4uJ+DQtpBflF+aZfTCIITfNMBc9uPK8qHWgQ9w+iUuQrm0D4ByjoJYJu32jtyoQ -REtGBzRj7TG5BO6jm5qu5jF49OokYTurWGT/u4cnYiWB39yhL/btp/96j1EuMPik -AdKFOV8BmZZvWltwGUb+hmA+rYAQCd05JS9Yf7vSdPD3Rh9GOUrYU9DzLjtxpdRv -/PNn5AeP3SYZ4Y1b+qOTEZvpyDrDVWiakuFSdjjo4bq9+0/V77PnSIMx8IIh47a+ -p6tv75/fTM8BuGJqIz3nCU2AG3swpMPdB380vqQmsvZB6Akd4yCYqjdP//fx4ilw -MUc/dNAUFvohigLVigmUdy7yWSiLfFCSCmZ4OIN1xLVaqBHG5cGdZlXPU8Sv13WF -qUITVuwhd4GTWgzqltlJyqEI8pc7bZsEGCREjnwB8twl2F6GmrE52/WRMmrRpnCK -ovfepEWFJqgejF0pW8hL2JpqA15w8oVPbEtoL8pU9ozaMv7Da4M/OMZ+ ------END CERTIFICATE----- - -# Issuer: CN=Certainly Root R1 O=Certainly -# Subject: CN=Certainly Root R1 O=Certainly -# Label: "Certainly Root R1" -# Serial: 188833316161142517227353805653483829216 -# MD5 Fingerprint: 07:70:d4:3e:82:87:a0:fa:33:36:13:f4:fa:33:e7:12 -# SHA1 Fingerprint: a0:50:ee:0f:28:71:f4:27:b2:12:6d:6f:50:96:25:ba:cc:86:42:af -# SHA256 Fingerprint: 77:b8:2c:d8:64:4c:43:05:f7:ac:c5:cb:15:6b:45:67:50:04:03:3d:51:c6:0c:62:02:a8:e0:c3:34:67:d3:a0 ------BEGIN CERTIFICATE----- -MIIFRzCCAy+gAwIBAgIRAI4P+UuQcWhlM1T01EQ5t+AwDQYJKoZIhvcNAQELBQAw -PTELMAkGA1UEBhMCVVMxEjAQBgNVBAoTCUNlcnRhaW5seTEaMBgGA1UEAxMRQ2Vy -dGFpbmx5IFJvb3QgUjEwHhcNMjEwNDAxMDAwMDAwWhcNNDYwNDAxMDAwMDAwWjA9 -MQswCQYDVQQGEwJVUzESMBAGA1UEChMJQ2VydGFpbmx5MRowGAYDVQQDExFDZXJ0 -YWlubHkgUm9vdCBSMTCCAiIwDQYJKoZIhvcNAQEBBQADggIPADCCAgoCggIBANA2 -1B/q3avk0bbm+yLA3RMNansiExyXPGhjZjKcA7WNpIGD2ngwEc/csiu+kr+O5MQT -vqRoTNoCaBZ0vrLdBORrKt03H2As2/X3oXyVtwxwhi7xOu9S98zTm/mLvg7fMbed -aFySpvXl8wo0tf97ouSHocavFwDvA5HtqRxOcT3Si2yJ9HiG5mpJoM610rCrm/b0 -1C7jcvk2xusVtyWMOvwlDbMicyF0yEqWYZL1LwsYpfSt4u5BvQF5+paMjRcCMLT5 -r3gajLQ2EBAHBXDQ9DGQilHFhiZ5shGIXsXwClTNSaa/ApzSRKft43jvRl5tcdF5 -cBxGX1HpyTfcX35pe0HfNEXgO4T0oYoKNp43zGJS4YkNKPl6I7ENPT2a/Z2B7yyQ -wHtETrtJ4A5KVpK8y7XdeReJkd5hiXSSqOMyhb5OhaRLWcsrxXiOcVTQAjeZjOVJ -6uBUcqQRBi8LjMFbvrWhsFNunLhgkR9Za/kt9JQKl7XsxXYDVBtlUrpMklZRNaBA -2CnbrlJ2Oy0wQJuK0EJWtLeIAaSHO1OWzaMWj/Nmqhexx2DgwUMFDO6bW2BvBlyH -Wyf5QBGenDPBt+U1VwV/J84XIIwc/PH72jEpSe31C4SnT8H2TsIonPru4K8H+zMR -eiFPCyEQtkA6qyI6BJyLm4SGcprSp6XEtHWRqSsjAgMBAAGjQjBAMA4GA1UdDwEB -/wQEAwIBBjAPBgNVHRMBAf8EBTADAQH/MB0GA1UdDgQWBBTgqj8ljZ9EXME66C6u -d0yEPmcM9DANBgkqhkiG9w0BAQsFAAOCAgEAuVevuBLaV4OPaAszHQNTVfSVcOQr -PbA56/qJYv331hgELyE03fFo8NWWWt7CgKPBjcZq91l3rhVkz1t5BXdm6ozTaw3d -8VkswTOlMIAVRQdFGjEitpIAq5lNOo93r6kiyi9jyhXWx8bwPWz8HA2YEGGeEaIi -1wrykXprOQ4vMMM2SZ/g6Q8CRFA3lFV96p/2O7qUpUzpvD5RtOjKkjZUbVwlKNrd -rRT90+7iIgXr0PK3aBLXWopBGsaSpVo7Y0VPv+E6dyIvXL9G+VoDhRNCX8reU9di -taY1BMJH/5n9hN9czulegChB8n3nHpDYT3Y+gjwN/KUD+nsa2UUeYNrEjvn8K8l7 -lcUq/6qJ34IxD3L/DCfXCh5WAFAeDJDBlrXYFIW7pw0WwfgHJBu6haEaBQmAupVj -yTrsJZ9/nbqkRxWbRHDxakvWOF5D8xh+UG7pWijmZeZ3Gzr9Hb4DJqPb1OG7fpYn -Kx3upPvaJVQTA945xsMfTZDsjxtK0hzthZU4UHlG1sGQUDGpXJpuHfUzVounmdLy -yCwzk5Iwx06MZTMQZBf9JBeW0Y3COmor6xOLRPIh80oat3df1+2IpHLlOR+Vnb5n -wXARPbv0+Em34yaXOp/SX3z7wJl8OSngex2/DaeP0ik0biQVy96QXr8axGbqwua6 -OV+KmalBWQewLK8= ------END CERTIFICATE----- - -# Issuer: CN=Certainly Root E1 O=Certainly -# Subject: CN=Certainly Root E1 O=Certainly -# Label: "Certainly Root E1" -# Serial: 8168531406727139161245376702891150584 -# MD5 Fingerprint: 0a:9e:ca:cd:3e:52:50:c6:36:f3:4b:a3:ed:a7:53:e9 -# SHA1 Fingerprint: f9:e1:6d:dc:01:89:cf:d5:82:45:63:3e:c5:37:7d:c2:eb:93:6f:2b -# SHA256 Fingerprint: b4:58:5f:22:e4:ac:75:6a:4e:86:12:a1:36:1c:5d:9d:03:1a:93:fd:84:fe:bb:77:8f:a3:06:8b:0f:c4:2d:c2 ------BEGIN CERTIFICATE----- -MIIB9zCCAX2gAwIBAgIQBiUzsUcDMydc+Y2aub/M+DAKBggqhkjOPQQDAzA9MQsw -CQYDVQQGEwJVUzESMBAGA1UEChMJQ2VydGFpbmx5MRowGAYDVQQDExFDZXJ0YWlu -bHkgUm9vdCBFMTAeFw0yMTA0MDEwMDAwMDBaFw00NjA0MDEwMDAwMDBaMD0xCzAJ -BgNVBAYTAlVTMRIwEAYDVQQKEwlDZXJ0YWlubHkxGjAYBgNVBAMTEUNlcnRhaW5s -eSBSb290IEUxMHYwEAYHKoZIzj0CAQYFK4EEACIDYgAE3m/4fxzf7flHh4axpMCK -+IKXgOqPyEpeKn2IaKcBYhSRJHpcnqMXfYqGITQYUBsQ3tA3SybHGWCA6TS9YBk2 -QNYphwk8kXr2vBMj3VlOBF7PyAIcGFPBMdjaIOlEjeR2o0IwQDAOBgNVHQ8BAf8E -BAMCAQYwDwYDVR0TAQH/BAUwAwEB/zAdBgNVHQ4EFgQU8ygYy2R17ikq6+2uI1g4 -hevIIgcwCgYIKoZIzj0EAwMDaAAwZQIxALGOWiDDshliTd6wT99u0nCK8Z9+aozm -ut6Dacpps6kFtZaSF4fC0urQe87YQVt8rgIwRt7qy12a7DLCZRawTDBcMPPaTnOG -BtjOiQRINzf43TNRnXCve1XYAS59BWQOhriR ------END CERTIFICATE----- - -# Issuer: CN=Security Communication RootCA3 O=SECOM Trust Systems CO.,LTD. -# Subject: CN=Security Communication RootCA3 O=SECOM Trust Systems CO.,LTD. -# Label: "Security Communication RootCA3" -# Serial: 16247922307909811815 -# MD5 Fingerprint: 1c:9a:16:ff:9e:5c:e0:4d:8a:14:01:f4:35:5d:29:26 -# SHA1 Fingerprint: c3:03:c8:22:74:92:e5:61:a2:9c:5f:79:91:2b:1e:44:13:91:30:3a -# SHA256 Fingerprint: 24:a5:5c:2a:b0:51:44:2d:06:17:76:65:41:23:9a:4a:d0:32:d7:c5:51:75:aa:34:ff:de:2f:bc:4f:5c:52:94 ------BEGIN CERTIFICATE----- -MIIFfzCCA2egAwIBAgIJAOF8N0D9G/5nMA0GCSqGSIb3DQEBDAUAMF0xCzAJBgNV -BAYTAkpQMSUwIwYDVQQKExxTRUNPTSBUcnVzdCBTeXN0ZW1zIENPLixMVEQuMScw -JQYDVQQDEx5TZWN1cml0eSBDb21tdW5pY2F0aW9uIFJvb3RDQTMwHhcNMTYwNjE2 -MDYxNzE2WhcNMzgwMTE4MDYxNzE2WjBdMQswCQYDVQQGEwJKUDElMCMGA1UEChMc -U0VDT00gVHJ1c3QgU3lzdGVtcyBDTy4sTFRELjEnMCUGA1UEAxMeU2VjdXJpdHkg -Q29tbXVuaWNhdGlvbiBSb290Q0EzMIICIjANBgkqhkiG9w0BAQEFAAOCAg8AMIIC -CgKCAgEA48lySfcw3gl8qUCBWNO0Ot26YQ+TUG5pPDXC7ltzkBtnTCHsXzW7OT4r -CmDvu20rhvtxosis5FaU+cmvsXLUIKx00rgVrVH+hXShuRD+BYD5UpOzQD11EKzA -lrenfna84xtSGc4RHwsENPXY9Wk8d/Nk9A2qhd7gCVAEF5aEt8iKvE1y/By7z/MG -TfmfZPd+pmaGNXHIEYBMwXFAWB6+oHP2/D5Q4eAvJj1+XCO1eXDe+uDRpdYMQXF7 -9+qMHIjH7Iv10S9VlkZ8WjtYO/u62C21Jdp6Ts9EriGmnpjKIG58u4iFW/vAEGK7 -8vknR+/RiTlDxN/e4UG/VHMgly1s2vPUB6PmudhvrvyMGS7TZ2crldtYXLVqAvO4 -g160a75BflcJdURQVc1aEWEhCmHCqYj9E7wtiS/NYeCVvsq1e+F7NGcLH7YMx3we -GVPKp7FKFSBWFHA9K4IsD50VHUeAR/94mQ4xr28+j+2GaR57GIgUssL8gjMunEst -+3A7caoreyYn8xrC3PsXuKHqy6C0rtOUfnrQq8PsOC0RLoi/1D+tEjtCrI8Cbn3M -0V9hvqG8OmpI6iZVIhZdXw3/JzOfGAN0iltSIEdrRU0id4xVJ/CvHozJgyJUt5rQ -T9nO/NkuHJYosQLTA70lUhw0Zk8jq/R3gpYd0VcwCBEF/VfR2ccCAwEAAaNCMEAw -HQYDVR0OBBYEFGQUfPxYchamCik0FW8qy7z8r6irMA4GA1UdDwEB/wQEAwIBBjAP -BgNVHRMBAf8EBTADAQH/MA0GCSqGSIb3DQEBDAUAA4ICAQDcAiMI4u8hOscNtybS -YpOnpSNyByCCYN8Y11StaSWSntkUz5m5UoHPrmyKO1o5yGwBQ8IibQLwYs1OY0PA -FNr0Y/Dq9HHuTofjcan0yVflLl8cebsjqodEV+m9NU1Bu0soo5iyG9kLFwfl9+qd -9XbXv8S2gVj/yP9kaWJ5rW4OH3/uHWnlt3Jxs/6lATWUVCvAUm2PVcTJ0rjLyjQI -UYWg9by0F1jqClx6vWPGOi//lkkZhOpn2ASxYfQAW0q3nHE3GYV5v4GwxxMOdnE+ -OoAGrgYWp421wsTL/0ClXI2lyTrtcoHKXJg80jQDdwj98ClZXSEIx2C/pHF7uNke -gr4Jr2VvKKu/S7XuPghHJ6APbw+LP6yVGPO5DtxnVW5inkYO0QR4ynKudtml+LLf -iAlhi+8kTtFZP1rUPcmTPCtk9YENFpb3ksP+MW/oKjJ0DvRMmEoYDjBU1cXrvMUV -nuiZIesnKwkK2/HmcBhWuwzkvvnoEKQTkrgc4NtnHVMDpCKn3F2SEDzq//wbEBrD -2NCcnWXL0CsnMQMeNuE9dnUM/0Umud1RvCPHX9jYhxBAEg09ODfnRDwYwFMJZI// -1ZqmfHAuc1Uh6N//g7kdPjIe1qZ9LPFm6Vwdp6POXiUyK+OVrCoHzrQoeIY8Laad -TdJ0MN1kURXbg4NR16/9M51NZg== ------END CERTIFICATE----- - -# Issuer: CN=Security Communication ECC RootCA1 O=SECOM Trust Systems CO.,LTD. -# Subject: CN=Security Communication ECC RootCA1 O=SECOM Trust Systems CO.,LTD. -# Label: "Security Communication ECC RootCA1" -# Serial: 15446673492073852651 -# MD5 Fingerprint: 7e:43:b0:92:68:ec:05:43:4c:98:ab:5d:35:2e:7e:86 -# SHA1 Fingerprint: b8:0e:26:a9:bf:d2:b2:3b:c0:ef:46:c9:ba:c7:bb:f6:1d:0d:41:41 -# SHA256 Fingerprint: e7:4f:bd:a5:5b:d5:64:c4:73:a3:6b:44:1a:a7:99:c8:a6:8e:07:74:40:e8:28:8b:9f:a1:e5:0e:4b:ba:ca:11 ------BEGIN CERTIFICATE----- -MIICODCCAb6gAwIBAgIJANZdm7N4gS7rMAoGCCqGSM49BAMDMGExCzAJBgNVBAYT -AkpQMSUwIwYDVQQKExxTRUNPTSBUcnVzdCBTeXN0ZW1zIENPLixMVEQuMSswKQYD -VQQDEyJTZWN1cml0eSBDb21tdW5pY2F0aW9uIEVDQyBSb290Q0ExMB4XDTE2MDYx -NjA1MTUyOFoXDTM4MDExODA1MTUyOFowYTELMAkGA1UEBhMCSlAxJTAjBgNVBAoT -HFNFQ09NIFRydXN0IFN5c3RlbXMgQ08uLExURC4xKzApBgNVBAMTIlNlY3VyaXR5 -IENvbW11bmljYXRpb24gRUNDIFJvb3RDQTEwdjAQBgcqhkjOPQIBBgUrgQQAIgNi -AASkpW9gAwPDvTH00xecK4R1rOX9PVdu12O/5gSJko6BnOPpR27KkBLIE+Cnnfdl -dB9sELLo5OnvbYUymUSxXv3MdhDYW72ixvnWQuRXdtyQwjWpS4g8EkdtXP9JTxpK -ULGjQjBAMB0GA1UdDgQWBBSGHOf+LaVKiwj+KBH6vqNm+GBZLzAOBgNVHQ8BAf8E -BAMCAQYwDwYDVR0TAQH/BAUwAwEB/zAKBggqhkjOPQQDAwNoADBlAjAVXUI9/Lbu -9zuxNuie9sRGKEkz0FhDKmMpzE2xtHqiuQ04pV1IKv3LsnNdo4gIxwwCMQDAqy0O -be0YottT6SXbVQjgUMzfRGEWgqtJsLKB7HOHeLRMsmIbEvoWTSVLY70eN9k= ------END CERTIFICATE----- - -# Issuer: CN=BJCA Global Root CA1 O=BEIJING CERTIFICATE AUTHORITY -# Subject: CN=BJCA Global Root CA1 O=BEIJING CERTIFICATE AUTHORITY -# Label: "BJCA Global Root CA1" -# Serial: 113562791157148395269083148143378328608 -# MD5 Fingerprint: 42:32:99:76:43:33:36:24:35:07:82:9b:28:f9:d0:90 -# SHA1 Fingerprint: d5:ec:8d:7b:4c:ba:79:f4:e7:e8:cb:9d:6b:ae:77:83:10:03:21:6a -# SHA256 Fingerprint: f3:89:6f:88:fe:7c:0a:88:27:66:a7:fa:6a:d2:74:9f:b5:7a:7f:3e:98:fb:76:9c:1f:a7:b0:9c:2c:44:d5:ae ------BEGIN CERTIFICATE----- -MIIFdDCCA1ygAwIBAgIQVW9l47TZkGobCdFsPsBsIDANBgkqhkiG9w0BAQsFADBU -MQswCQYDVQQGEwJDTjEmMCQGA1UECgwdQkVJSklORyBDRVJUSUZJQ0FURSBBVVRI -T1JJVFkxHTAbBgNVBAMMFEJKQ0EgR2xvYmFsIFJvb3QgQ0ExMB4XDTE5MTIxOTAz -MTYxN1oXDTQ0MTIxMjAzMTYxN1owVDELMAkGA1UEBhMCQ04xJjAkBgNVBAoMHUJF -SUpJTkcgQ0VSVElGSUNBVEUgQVVUSE9SSVRZMR0wGwYDVQQDDBRCSkNBIEdsb2Jh -bCBSb290IENBMTCCAiIwDQYJKoZIhvcNAQEBBQADggIPADCCAgoCggIBAPFmCL3Z -xRVhy4QEQaVpN3cdwbB7+sN3SJATcmTRuHyQNZ0YeYjjlwE8R4HyDqKYDZ4/N+AZ -spDyRhySsTphzvq3Rp4Dhtczbu33RYx2N95ulpH3134rhxfVizXuhJFyV9xgw8O5 -58dnJCNPYwpj9mZ9S1WnP3hkSWkSl+BMDdMJoDIwOvqfwPKcxRIqLhy1BDPapDgR -at7GGPZHOiJBhyL8xIkoVNiMpTAK+BcWyqw3/XmnkRd4OJmtWO2y3syJfQOcs4ll -5+M7sSKGjwZteAf9kRJ/sGsciQ35uMt0WwfCyPQ10WRjeulumijWML3mG90Vr4Tq -nMfK9Q7q8l0ph49pczm+LiRvRSGsxdRpJQaDrXpIhRMsDQa4bHlW/KNnMoH1V6XK -V0Jp6VwkYe/iMBhORJhVb3rCk9gZtt58R4oRTklH2yiUAguUSiz5EtBP6DF+bHq/ -pj+bOT0CFqMYs2esWz8sgytnOYFcuX6U1WTdno9uruh8W7TXakdI136z1C2OVnZO -z2nxbkRs1CTqjSShGL+9V/6pmTW12xB3uD1IutbB5/EjPtffhZ0nPNRAvQoMvfXn -jSXWgXSHRtQpdaJCbPdzied9v3pKH9MiyRVVz99vfFXQpIsHETdfg6YmV6YBW37+ -WGgHqel62bno/1Afq8K0wM7o6v0PvY1NuLxxAgMBAAGjQjBAMB0GA1UdDgQWBBTF -7+3M2I0hxkjk49cULqcWk+WYATAPBgNVHRMBAf8EBTADAQH/MA4GA1UdDwEB/wQE -AwIBBjANBgkqhkiG9w0BAQsFAAOCAgEAUoKsITQfI/Ki2Pm4rzc2IInRNwPWaZ+4 -YRC6ojGYWUfo0Q0lHhVBDOAqVdVXUsv45Mdpox1NcQJeXyFFYEhcCY5JEMEE3Kli -awLwQ8hOnThJdMkycFRtwUf8jrQ2ntScvd0g1lPJGKm1Vrl2i5VnZu69mP6u775u -+2D2/VnGKhs/I0qUJDAnyIm860Qkmss9vk/Ves6OF8tiwdneHg56/0OGNFK8YT88 -X7vZdrRTvJez/opMEi4r89fO4aL/3Xtw+zuhTaRjAv04l5U/BXCga99igUOLtFkN -SoxUnMW7gZ/NfaXvCyUeOiDbHPwfmGcCCtRzRBPbUYQaVQNW4AB+dAb/OMRyHdOo -P2gxXdMJxy6MW2Pg6Nwe0uxhHvLe5e/2mXZgLR6UcnHGCyoyx5JO1UbXHfmpGQrI -+pXObSOYqgs4rZpWDW+N8TEAiMEXnM0ZNjX+VVOg4DwzX5Ze4jLp3zO7Bkqp2IRz -znfSxqxx4VyjHQy7Ct9f4qNx2No3WqB4K/TUfet27fJhcKVlmtOJNBir+3I+17Q9 -eVzYH6Eze9mCUAyTF6ps3MKCuwJXNq+YJyo5UOGwifUll35HaBC07HPKs5fRJNz2 -YqAo07WjuGS3iGJCz51TzZm+ZGiPTx4SSPfSKcOYKMryMguTjClPPGAyzQWWYezy -r/6zcCwupvI= ------END CERTIFICATE----- - -# Issuer: CN=BJCA Global Root CA2 O=BEIJING CERTIFICATE AUTHORITY -# Subject: CN=BJCA Global Root CA2 O=BEIJING CERTIFICATE AUTHORITY -# Label: "BJCA Global Root CA2" -# Serial: 58605626836079930195615843123109055211 -# MD5 Fingerprint: 5e:0a:f6:47:5f:a6:14:e8:11:01:95:3f:4d:01:eb:3c -# SHA1 Fingerprint: f4:27:86:eb:6e:b8:6d:88:31:67:02:fb:ba:66:a4:53:00:aa:7a:a6 -# SHA256 Fingerprint: 57:4d:f6:93:1e:27:80:39:66:7b:72:0a:fd:c1:60:0f:c2:7e:b6:6d:d3:09:29:79:fb:73:85:64:87:21:28:82 ------BEGIN CERTIFICATE----- -MIICJTCCAaugAwIBAgIQLBcIfWQqwP6FGFkGz7RK6zAKBggqhkjOPQQDAzBUMQsw -CQYDVQQGEwJDTjEmMCQGA1UECgwdQkVJSklORyBDRVJUSUZJQ0FURSBBVVRIT1JJ -VFkxHTAbBgNVBAMMFEJKQ0EgR2xvYmFsIFJvb3QgQ0EyMB4XDTE5MTIxOTAzMTgy -MVoXDTQ0MTIxMjAzMTgyMVowVDELMAkGA1UEBhMCQ04xJjAkBgNVBAoMHUJFSUpJ -TkcgQ0VSVElGSUNBVEUgQVVUSE9SSVRZMR0wGwYDVQQDDBRCSkNBIEdsb2JhbCBS -b290IENBMjB2MBAGByqGSM49AgEGBSuBBAAiA2IABJ3LgJGNU2e1uVCxA/jlSR9B -IgmwUVJY1is0j8USRhTFiy8shP8sbqjV8QnjAyEUxEM9fMEsxEtqSs3ph+B99iK+ -+kpRuDCK/eHeGBIK9ke35xe/J4rUQUyWPGCWwf0VHKNCMEAwHQYDVR0OBBYEFNJK -sVF/BvDRgh9Obl+rg/xI1LCRMA8GA1UdEwEB/wQFMAMBAf8wDgYDVR0PAQH/BAQD -AgEGMAoGCCqGSM49BAMDA2gAMGUCMBq8W9f+qdJUDkpd0m2xQNz0Q9XSSpkZElaA -94M04TVOSG0ED1cxMDAtsaqdAzjbBgIxAMvMh1PLet8gUXOQwKhbYdDFUDn9hf7B -43j4ptZLvZuHjw/l1lOWqzzIQNph91Oj9w== ------END CERTIFICATE----- - -# Issuer: CN=Sectigo Public Server Authentication Root E46 O=Sectigo Limited -# Subject: CN=Sectigo Public Server Authentication Root E46 O=Sectigo Limited -# Label: "Sectigo Public Server Authentication Root E46" -# Serial: 88989738453351742415770396670917916916 -# MD5 Fingerprint: 28:23:f8:b2:98:5c:37:16:3b:3e:46:13:4e:b0:b3:01 -# SHA1 Fingerprint: ec:8a:39:6c:40:f0:2e:bc:42:75:d4:9f:ab:1c:1a:5b:67:be:d2:9a -# SHA256 Fingerprint: c9:0f:26:f0:fb:1b:40:18:b2:22:27:51:9b:5c:a2:b5:3e:2c:a5:b3:be:5c:f1:8e:fe:1b:ef:47:38:0c:53:83 ------BEGIN CERTIFICATE----- -MIICOjCCAcGgAwIBAgIQQvLM2htpN0RfFf51KBC49DAKBggqhkjOPQQDAzBfMQsw -CQYDVQQGEwJHQjEYMBYGA1UEChMPU2VjdGlnbyBMaW1pdGVkMTYwNAYDVQQDEy1T -ZWN0aWdvIFB1YmxpYyBTZXJ2ZXIgQXV0aGVudGljYXRpb24gUm9vdCBFNDYwHhcN -MjEwMzIyMDAwMDAwWhcNNDYwMzIxMjM1OTU5WjBfMQswCQYDVQQGEwJHQjEYMBYG -A1UEChMPU2VjdGlnbyBMaW1pdGVkMTYwNAYDVQQDEy1TZWN0aWdvIFB1YmxpYyBT -ZXJ2ZXIgQXV0aGVudGljYXRpb24gUm9vdCBFNDYwdjAQBgcqhkjOPQIBBgUrgQQA -IgNiAAR2+pmpbiDt+dd34wc7qNs9Xzjoq1WmVk/WSOrsfy2qw7LFeeyZYX8QeccC -WvkEN/U0NSt3zn8gj1KjAIns1aeibVvjS5KToID1AZTc8GgHHs3u/iVStSBDHBv+ -6xnOQ6OjQjBAMB0GA1UdDgQWBBTRItpMWfFLXyY4qp3W7usNw/upYTAOBgNVHQ8B -Af8EBAMCAYYwDwYDVR0TAQH/BAUwAwEB/zAKBggqhkjOPQQDAwNnADBkAjAn7qRa -qCG76UeXlImldCBteU/IvZNeWBj7LRoAasm4PdCkT0RHlAFWovgzJQxC36oCMB3q -4S6ILuH5px0CMk7yn2xVdOOurvulGu7t0vzCAxHrRVxgED1cf5kDW21USAGKcw== ------END CERTIFICATE----- - -# Issuer: CN=Sectigo Public Server Authentication Root R46 O=Sectigo Limited -# Subject: CN=Sectigo Public Server Authentication Root R46 O=Sectigo Limited -# Label: "Sectigo Public Server Authentication Root R46" -# Serial: 156256931880233212765902055439220583700 -# MD5 Fingerprint: 32:10:09:52:00:d5:7e:6c:43:df:15:c0:b1:16:93:e5 -# SHA1 Fingerprint: ad:98:f9:f3:e4:7d:75:3b:65:d4:82:b3:a4:52:17:bb:6e:f5:e4:38 -# SHA256 Fingerprint: 7b:b6:47:a6:2a:ee:ac:88:bf:25:7a:a5:22:d0:1f:fe:a3:95:e0:ab:45:c7:3f:93:f6:56:54:ec:38:f2:5a:06 ------BEGIN CERTIFICATE----- -MIIFijCCA3KgAwIBAgIQdY39i658BwD6qSWn4cetFDANBgkqhkiG9w0BAQwFADBf -MQswCQYDVQQGEwJHQjEYMBYGA1UEChMPU2VjdGlnbyBMaW1pdGVkMTYwNAYDVQQD -Ey1TZWN0aWdvIFB1YmxpYyBTZXJ2ZXIgQXV0aGVudGljYXRpb24gUm9vdCBSNDYw -HhcNMjEwMzIyMDAwMDAwWhcNNDYwMzIxMjM1OTU5WjBfMQswCQYDVQQGEwJHQjEY -MBYGA1UEChMPU2VjdGlnbyBMaW1pdGVkMTYwNAYDVQQDEy1TZWN0aWdvIFB1Ymxp -YyBTZXJ2ZXIgQXV0aGVudGljYXRpb24gUm9vdCBSNDYwggIiMA0GCSqGSIb3DQEB -AQUAA4ICDwAwggIKAoICAQCTvtU2UnXYASOgHEdCSe5jtrch/cSV1UgrJnwUUxDa -ef0rty2k1Cz66jLdScK5vQ9IPXtamFSvnl0xdE8H/FAh3aTPaE8bEmNtJZlMKpnz -SDBh+oF8HqcIStw+KxwfGExxqjWMrfhu6DtK2eWUAtaJhBOqbchPM8xQljeSM9xf -iOefVNlI8JhD1mb9nxc4Q8UBUQvX4yMPFF1bFOdLvt30yNoDN9HWOaEhUTCDsG3X -ME6WW5HwcCSrv0WBZEMNvSE6Lzzpng3LILVCJ8zab5vuZDCQOc2TZYEhMbUjUDM3 -IuM47fgxMMxF/mL50V0yeUKH32rMVhlATc6qu/m1dkmU8Sf4kaWD5QazYw6A3OAS -VYCmO2a0OYctyPDQ0RTp5A1NDvZdV3LFOxxHVp3i1fuBYYzMTYCQNFu31xR13NgE -SJ/AwSiItOkcyqex8Va3e0lMWeUgFaiEAin6OJRpmkkGj80feRQXEgyDet4fsZfu -+Zd4KKTIRJLpfSYFplhym3kT2BFfrsU4YjRosoYwjviQYZ4ybPUHNs2iTG7sijbt -8uaZFURww3y8nDnAtOFr94MlI1fZEoDlSfB1D++N6xybVCi0ITz8fAr/73trdf+L -HaAZBav6+CuBQug4urv7qv094PPK306Xlynt8xhW6aWWrL3DkJiy4Pmi1KZHQ3xt -zwIDAQABo0IwQDAdBgNVHQ4EFgQUVnNYZJX5khqwEioEYnmhQBWIIUkwDgYDVR0P -AQH/BAQDAgGGMA8GA1UdEwEB/wQFMAMBAf8wDQYJKoZIhvcNAQEMBQADggIBAC9c -mTz8Bl6MlC5w6tIyMY208FHVvArzZJ8HXtXBc2hkeqK5Duj5XYUtqDdFqij0lgVQ -YKlJfp/imTYpE0RHap1VIDzYm/EDMrraQKFz6oOht0SmDpkBm+S8f74TlH7Kph52 -gDY9hAaLMyZlbcp+nv4fjFg4exqDsQ+8FxG75gbMY/qB8oFM2gsQa6H61SilzwZA -Fv97fRheORKkU55+MkIQpiGRqRxOF3yEvJ+M0ejf5lG5Nkc/kLnHvALcWxxPDkjB -JYOcCj+esQMzEhonrPcibCTRAUH4WAP+JWgiH5paPHxsnnVI84HxZmduTILA7rpX -DhjvLpr3Etiga+kFpaHpaPi8TD8SHkXoUsCjvxInebnMMTzD9joiFgOgyY9mpFui -TdaBJQbpdqQACj7LzTWb4OE4y2BThihCQRxEV+ioratF4yUQvNs+ZUH7G6aXD+u5 -dHn5HrwdVw1Hr8Mvn4dGp+smWg9WY7ViYG4A++MnESLn/pmPNPW56MORcr3Ywx65 -LvKRRFHQV80MNNVIIb/bE/FmJUNS0nAiNs2fxBx1IK1jcmMGDw4nztJqDby1ORrp -0XZ60Vzk50lJLVU3aPAaOpg+VBeHVOmmJ1CJeyAvP/+/oYtKR5j/K3tJPsMpRmAY -QqszKbrAKbkTidOIijlBO8n9pu0f9GBj39ItVQGL ------END CERTIFICATE----- - -# Issuer: CN=SSL.com TLS RSA Root CA 2022 O=SSL Corporation -# Subject: CN=SSL.com TLS RSA Root CA 2022 O=SSL Corporation -# Label: "SSL.com TLS RSA Root CA 2022" -# Serial: 148535279242832292258835760425842727825 -# MD5 Fingerprint: d8:4e:c6:59:30:d8:fe:a0:d6:7a:5a:2c:2c:69:78:da -# SHA1 Fingerprint: ec:2c:83:40:72:af:26:95:10:ff:0e:f2:03:ee:31:70:f6:78:9d:ca -# SHA256 Fingerprint: 8f:af:7d:2e:2c:b4:70:9b:b8:e0:b3:36:66:bf:75:a5:dd:45:b5:de:48:0f:8e:a8:d4:bf:e6:be:bc:17:f2:ed ------BEGIN CERTIFICATE----- -MIIFiTCCA3GgAwIBAgIQb77arXO9CEDii02+1PdbkTANBgkqhkiG9w0BAQsFADBO -MQswCQYDVQQGEwJVUzEYMBYGA1UECgwPU1NMIENvcnBvcmF0aW9uMSUwIwYDVQQD -DBxTU0wuY29tIFRMUyBSU0EgUm9vdCBDQSAyMDIyMB4XDTIyMDgyNTE2MzQyMloX -DTQ2MDgxOTE2MzQyMVowTjELMAkGA1UEBhMCVVMxGDAWBgNVBAoMD1NTTCBDb3Jw -b3JhdGlvbjElMCMGA1UEAwwcU1NMLmNvbSBUTFMgUlNBIFJvb3QgQ0EgMjAyMjCC -AiIwDQYJKoZIhvcNAQEBBQADggIPADCCAgoCggIBANCkCXJPQIgSYT41I57u9nTP -L3tYPc48DRAokC+X94xI2KDYJbFMsBFMF3NQ0CJKY7uB0ylu1bUJPiYYf7ISf5OY -t6/wNr/y7hienDtSxUcZXXTzZGbVXcdotL8bHAajvI9AI7YexoS9UcQbOcGV0ins -S657Lb85/bRi3pZ7QcacoOAGcvvwB5cJOYF0r/c0WRFXCsJbwST0MXMwgsadugL3 -PnxEX4MN8/HdIGkWCVDi1FW24IBydm5MR7d1VVm0U3TZlMZBrViKMWYPHqIbKUBO -L9975hYsLfy/7PO0+r4Y9ptJ1O4Fbtk085zx7AGL0SDGD6C1vBdOSHtRwvzpXGk3 -R2azaPgVKPC506QVzFpPulJwoxJF3ca6TvvC0PeoUidtbnm1jPx7jMEWTO6Af77w -dr5BUxIzrlo4QqvXDz5BjXYHMtWrifZOZ9mxQnUjbvPNQrL8VfVThxc7wDNY8VLS -+YCk8OjwO4s4zKTGkH8PnP2L0aPP2oOnaclQNtVcBdIKQXTbYxE3waWglksejBYS -d66UNHsef8JmAOSqg+qKkK3ONkRN0VHpvB/zagX9wHQfJRlAUW7qglFA35u5CCoG -AtUjHBPW6dvbxrB6y3snm/vg1UYk7RBLY0ulBY+6uB0rpvqR4pJSvezrZ5dtmi2f -gTIFZzL7SAg/2SW4BCUvAgMBAAGjYzBhMA8GA1UdEwEB/wQFMAMBAf8wHwYDVR0j -BBgwFoAU+y437uOEeicuzRk1sTN8/9REQrkwHQYDVR0OBBYEFPsuN+7jhHonLs0Z -NbEzfP/UREK5MA4GA1UdDwEB/wQEAwIBhjANBgkqhkiG9w0BAQsFAAOCAgEAjYlt -hEUY8U+zoO9opMAdrDC8Z2awms22qyIZZtM7QbUQnRC6cm4pJCAcAZli05bg4vsM -QtfhWsSWTVTNj8pDU/0quOr4ZcoBwq1gaAafORpR2eCNJvkLTqVTJXojpBzOCBvf -R4iyrT7gJ4eLSYwfqUdYe5byiB0YrrPRpgqU+tvT5TgKa3kSM/tKWTcWQA673vWJ -DPFs0/dRa1419dvAJuoSc06pkZCmF8NsLzjUo3KUQyxi4U5cMj29TH0ZR6LDSeeW -P4+a0zvkEdiLA9z2tmBVGKaBUfPhqBVq6+AL8BQx1rmMRTqoENjwuSfr98t67wVy -lrXEj5ZzxOhWc5y8aVFjvO9nHEMaX3cZHxj4HCUp+UmZKbaSPaKDN7EgkaibMOlq -bLQjk2UEqxHzDh1TJElTHaE/nUiSEeJ9DU/1172iWD54nR4fK/4huxoTtrEoZP2w -AgDHbICivRZQIA9ygV/MlP+7mea6kMvq+cYMwq7FGc4zoWtcu358NFcXrfA/rs3q -r5nsLFR+jM4uElZI7xc7P0peYNLcdDa8pUNjyw9bowJWCZ4kLOGGgYz+qxcs+sji -Mho6/4UIyYOf8kpIEFR3N+2ivEC+5BB09+Rbu7nzifmPQdjH5FCQNYA+HLhNkNPU -98OwoX6EyneSMSy4kLGCenROmxMmtNVQZlR4rmA= ------END CERTIFICATE----- - -# Issuer: CN=SSL.com TLS ECC Root CA 2022 O=SSL Corporation -# Subject: CN=SSL.com TLS ECC Root CA 2022 O=SSL Corporation -# Label: "SSL.com TLS ECC Root CA 2022" -# Serial: 26605119622390491762507526719404364228 -# MD5 Fingerprint: 99:d7:5c:f1:51:36:cc:e9:ce:d9:19:2e:77:71:56:c5 -# SHA1 Fingerprint: 9f:5f:d9:1a:54:6d:f5:0c:71:f0:ee:7a:bd:17:49:98:84:73:e2:39 -# SHA256 Fingerprint: c3:2f:fd:9f:46:f9:36:d1:6c:36:73:99:09:59:43:4b:9a:d6:0a:af:bb:9e:7c:f3:36:54:f1:44:cc:1b:a1:43 ------BEGIN CERTIFICATE----- -MIICOjCCAcCgAwIBAgIQFAP1q/s3ixdAW+JDsqXRxDAKBggqhkjOPQQDAzBOMQsw -CQYDVQQGEwJVUzEYMBYGA1UECgwPU1NMIENvcnBvcmF0aW9uMSUwIwYDVQQDDBxT -U0wuY29tIFRMUyBFQ0MgUm9vdCBDQSAyMDIyMB4XDTIyMDgyNTE2MzM0OFoXDTQ2 -MDgxOTE2MzM0N1owTjELMAkGA1UEBhMCVVMxGDAWBgNVBAoMD1NTTCBDb3Jwb3Jh -dGlvbjElMCMGA1UEAwwcU1NMLmNvbSBUTFMgRUNDIFJvb3QgQ0EgMjAyMjB2MBAG -ByqGSM49AgEGBSuBBAAiA2IABEUpNXP6wrgjzhR9qLFNoFs27iosU8NgCTWyJGYm -acCzldZdkkAZDsalE3D07xJRKF3nzL35PIXBz5SQySvOkkJYWWf9lCcQZIxPBLFN -SeR7T5v15wj4A4j3p8OSSxlUgaNjMGEwDwYDVR0TAQH/BAUwAwEB/zAfBgNVHSME -GDAWgBSJjy+j6CugFFR781a4Jl9nOAuc0DAdBgNVHQ4EFgQUiY8vo+groBRUe/NW -uCZfZzgLnNAwDgYDVR0PAQH/BAQDAgGGMAoGCCqGSM49BAMDA2gAMGUCMFXjIlbp -15IkWE8elDIPDAI2wv2sdDJO4fscgIijzPvX6yv/N33w7deedWo1dlJF4AIxAMeN -b0Igj762TVntd00pxCAgRWSGOlDGxK0tk/UYfXLtqc/ErFc2KAhl3zx5Zn6g6g== ------END CERTIFICATE----- - -# Issuer: CN=Atos TrustedRoot Root CA ECC TLS 2021 O=Atos -# Subject: CN=Atos TrustedRoot Root CA ECC TLS 2021 O=Atos -# Label: "Atos TrustedRoot Root CA ECC TLS 2021" -# Serial: 81873346711060652204712539181482831616 -# MD5 Fingerprint: 16:9f:ad:f1:70:ad:79:d6:ed:29:b4:d1:c5:79:70:a8 -# SHA1 Fingerprint: 9e:bc:75:10:42:b3:02:f3:81:f4:f7:30:62:d4:8f:c3:a7:51:b2:dd -# SHA256 Fingerprint: b2:fa:e5:3e:14:cc:d7:ab:92:12:06:47:01:ae:27:9c:1d:89:88:fa:cb:77:5f:a8:a0:08:91:4e:66:39:88:a8 ------BEGIN CERTIFICATE----- -MIICFTCCAZugAwIBAgIQPZg7pmY9kGP3fiZXOATvADAKBggqhkjOPQQDAzBMMS4w -LAYDVQQDDCVBdG9zIFRydXN0ZWRSb290IFJvb3QgQ0EgRUNDIFRMUyAyMDIxMQ0w -CwYDVQQKDARBdG9zMQswCQYDVQQGEwJERTAeFw0yMTA0MjIwOTI2MjNaFw00MTA0 -MTcwOTI2MjJaMEwxLjAsBgNVBAMMJUF0b3MgVHJ1c3RlZFJvb3QgUm9vdCBDQSBF -Q0MgVExTIDIwMjExDTALBgNVBAoMBEF0b3MxCzAJBgNVBAYTAkRFMHYwEAYHKoZI -zj0CAQYFK4EEACIDYgAEloZYKDcKZ9Cg3iQZGeHkBQcfl+3oZIK59sRxUM6KDP/X -tXa7oWyTbIOiaG6l2b4siJVBzV3dscqDY4PMwL502eCdpO5KTlbgmClBk1IQ1SQ4 -AjJn8ZQSb+/Xxd4u/RmAo0IwQDAPBgNVHRMBAf8EBTADAQH/MB0GA1UdDgQWBBR2 -KCXWfeBmmnoJsmo7jjPXNtNPojAOBgNVHQ8BAf8EBAMCAYYwCgYIKoZIzj0EAwMD -aAAwZQIwW5kp85wxtolrbNa9d+F851F+uDrNozZffPc8dz7kUK2o59JZDCaOMDtu -CCrCp1rIAjEAmeMM56PDr9NJLkaCI2ZdyQAUEv049OGYa3cpetskz2VAv9LcjBHo -9H1/IISpQuQo ------END CERTIFICATE----- - -# Issuer: CN=Atos TrustedRoot Root CA RSA TLS 2021 O=Atos -# Subject: CN=Atos TrustedRoot Root CA RSA TLS 2021 O=Atos -# Label: "Atos TrustedRoot Root CA RSA TLS 2021" -# Serial: 111436099570196163832749341232207667876 -# MD5 Fingerprint: d4:d3:46:b8:9a:c0:9c:76:5d:9e:3a:c3:b9:99:31:d2 -# SHA1 Fingerprint: 18:52:3b:0d:06:37:e4:d6:3a:df:23:e4:98:fb:5b:16:fb:86:74:48 -# SHA256 Fingerprint: 81:a9:08:8e:a5:9f:b3:64:c5:48:a6:f8:55:59:09:9b:6f:04:05:ef:bf:18:e5:32:4e:c9:f4:57:ba:00:11:2f ------BEGIN CERTIFICATE----- -MIIFZDCCA0ygAwIBAgIQU9XP5hmTC/srBRLYwiqipDANBgkqhkiG9w0BAQwFADBM -MS4wLAYDVQQDDCVBdG9zIFRydXN0ZWRSb290IFJvb3QgQ0EgUlNBIFRMUyAyMDIx -MQ0wCwYDVQQKDARBdG9zMQswCQYDVQQGEwJERTAeFw0yMTA0MjIwOTIxMTBaFw00 -MTA0MTcwOTIxMDlaMEwxLjAsBgNVBAMMJUF0b3MgVHJ1c3RlZFJvb3QgUm9vdCBD -QSBSU0EgVExTIDIwMjExDTALBgNVBAoMBEF0b3MxCzAJBgNVBAYTAkRFMIICIjAN -BgkqhkiG9w0BAQEFAAOCAg8AMIICCgKCAgEAtoAOxHm9BYx9sKOdTSJNy/BBl01Z -4NH+VoyX8te9j2y3I49f1cTYQcvyAh5x5en2XssIKl4w8i1mx4QbZFc4nXUtVsYv -Ye+W/CBGvevUez8/fEc4BKkbqlLfEzfTFRVOvV98r61jx3ncCHvVoOX3W3WsgFWZ -kmGbzSoXfduP9LVq6hdKZChmFSlsAvFr1bqjM9xaZ6cF4r9lthawEO3NUDPJcFDs -GY6wx/J0W2tExn2WuZgIWWbeKQGb9Cpt0xU6kGpn8bRrZtkh68rZYnxGEFzedUln -nkL5/nWpo63/dgpnQOPF943HhZpZnmKaau1Fh5hnstVKPNe0OwANwI8f4UDErmwh -3El+fsqyjW22v5MvoVw+j8rtgI5Y4dtXz4U2OLJxpAmMkokIiEjxQGMYsluMWuPD -0xeqqxmjLBvk1cbiZnrXghmmOxYsL3GHX0WelXOTwkKBIROW1527k2gV+p2kHYzy -geBYBr3JtuP2iV2J+axEoctr+hbxx1A9JNr3w+SH1VbxT5Aw+kUJWdo0zuATHAR8 -ANSbhqRAvNncTFd+rrcztl524WWLZt+NyteYr842mIycg5kDcPOvdO3GDjbnvezB -c6eUWsuSZIKmAMFwoW4sKeFYV+xafJlrJaSQOoD0IJ2azsct+bJLKZWD6TWNp0lI -pw9MGZHQ9b8Q4HECAwEAAaNCMEAwDwYDVR0TAQH/BAUwAwEB/zAdBgNVHQ4EFgQU -dEmZ0f+0emhFdcN+tNzMzjkz2ggwDgYDVR0PAQH/BAQDAgGGMA0GCSqGSIb3DQEB -DAUAA4ICAQAjQ1MkYlxt/T7Cz1UAbMVWiLkO3TriJQ2VSpfKgInuKs1l+NsW4AmS -4BjHeJi78+xCUvuppILXTdiK/ORO/auQxDh1MoSf/7OwKwIzNsAQkG8dnK/haZPs -o0UvFJ/1TCplQ3IM98P4lYsU84UgYt1UU90s3BiVaU+DR3BAM1h3Egyi61IxHkzJ -qM7F78PRreBrAwA0JrRUITWXAdxfG/F851X6LWh3e9NpzNMOa7pNdkTWwhWaJuyw -xfW70Xp0wmzNxbVe9kzmWy2B27O3Opee7c9GslA9hGCZcbUztVdF5kJHdWoOsAgM -rr3e97sPWD2PAzHoPYJQyi9eDF20l74gNAf0xBLh7tew2VktafcxBPTy+av5EzH4 -AXcOPUIjJsyacmdRIXrMPIWo6iFqO9taPKU0nprALN+AnCng33eU0aKAQv9qTFsR -0PXNor6uzFFcw9VUewyu1rkGd4Di7wcaaMxZUa1+XGdrudviB0JbuAEFWDlN5LuY -o7Ey7Nmj1m+UI/87tyll5gfp77YZ6ufCOB0yiJA8EytuzO+rdwY0d4RPcuSBhPm5 -dDTedk+SKlOxJTnbPP/lPqYO5Wue/9vsL3SD3460s6neFE3/MaNFcyT6lSnMEpcE -oji2jbDwN/zIIX8/syQbPYtuzE2wFg2WHYMfRsCbvUOZ58SWLs5fyQ== ------END CERTIFICATE----- - -# Issuer: CN=TrustAsia Global Root CA G3 O=TrustAsia Technologies, Inc. -# Subject: CN=TrustAsia Global Root CA G3 O=TrustAsia Technologies, Inc. -# Label: "TrustAsia Global Root CA G3" -# Serial: 576386314500428537169965010905813481816650257167 -# MD5 Fingerprint: 30:42:1b:b7:bb:81:75:35:e4:16:4f:53:d2:94:de:04 -# SHA1 Fingerprint: 63:cf:b6:c1:27:2b:56:e4:88:8e:1c:23:9a:b6:2e:81:47:24:c3:c7 -# SHA256 Fingerprint: e0:d3:22:6a:eb:11:63:c2:e4:8f:f9:be:3b:50:b4:c6:43:1b:e7:bb:1e:ac:c5:c3:6b:5d:5e:c5:09:03:9a:08 ------BEGIN CERTIFICATE----- -MIIFpTCCA42gAwIBAgIUZPYOZXdhaqs7tOqFhLuxibhxkw8wDQYJKoZIhvcNAQEM -BQAwWjELMAkGA1UEBhMCQ04xJTAjBgNVBAoMHFRydXN0QXNpYSBUZWNobm9sb2dp -ZXMsIEluYy4xJDAiBgNVBAMMG1RydXN0QXNpYSBHbG9iYWwgUm9vdCBDQSBHMzAe -Fw0yMTA1MjAwMjEwMTlaFw00NjA1MTkwMjEwMTlaMFoxCzAJBgNVBAYTAkNOMSUw -IwYDVQQKDBxUcnVzdEFzaWEgVGVjaG5vbG9naWVzLCBJbmMuMSQwIgYDVQQDDBtU -cnVzdEFzaWEgR2xvYmFsIFJvb3QgQ0EgRzMwggIiMA0GCSqGSIb3DQEBAQUAA4IC -DwAwggIKAoICAQDAMYJhkuSUGwoqZdC+BqmHO1ES6nBBruL7dOoKjbmzTNyPtxNS -T1QY4SxzlZHFZjtqz6xjbYdT8PfxObegQ2OwxANdV6nnRM7EoYNl9lA+sX4WuDqK -AtCWHwDNBSHvBm3dIZwZQ0WhxeiAysKtQGIXBsaqvPPW5vxQfmZCHzyLpnl5hkA1 -nyDvP+uLRx+PjsXUjrYsyUQE49RDdT/VP68czH5GX6zfZBCK70bwkPAPLfSIC7Ep -qq+FqklYqL9joDiR5rPmd2jE+SoZhLsO4fWvieylL1AgdB4SQXMeJNnKziyhWTXA -yB1GJ2Faj/lN03J5Zh6fFZAhLf3ti1ZwA0pJPn9pMRJpxx5cynoTi+jm9WAPzJMs -hH/x/Gr8m0ed262IPfN2dTPXS6TIi/n1Q1hPy8gDVI+lhXgEGvNz8teHHUGf59gX -zhqcD0r83ERoVGjiQTz+LISGNzzNPy+i2+f3VANfWdP3kXjHi3dqFuVJhZBFcnAv -kV34PmVACxmZySYgWmjBNb9Pp1Hx2BErW+Canig7CjoKH8GB5S7wprlppYiU5msT -f9FkPz2ccEblooV7WIQn3MSAPmeamseaMQ4w7OYXQJXZRe0Blqq/DPNL0WP3E1jA -uPP6Z92bfW1K/zJMtSU7/xxnD4UiWQWRkUF3gdCFTIcQcf+eQxuulXUtgQIDAQAB -o2MwYTAPBgNVHRMBAf8EBTADAQH/MB8GA1UdIwQYMBaAFEDk5PIj7zjKsK5Xf/Ih -MBY027ySMB0GA1UdDgQWBBRA5OTyI+84yrCuV3/yITAWNNu8kjAOBgNVHQ8BAf8E -BAMCAQYwDQYJKoZIhvcNAQEMBQADggIBACY7UeFNOPMyGLS0XuFlXsSUT9SnYaP4 -wM8zAQLpw6o1D/GUE3d3NZ4tVlFEbuHGLige/9rsR82XRBf34EzC4Xx8MnpmyFq2 -XFNFV1pF1AWZLy4jVe5jaN/TG3inEpQGAHUNcoTpLrxaatXeL1nHo+zSh2bbt1S1 -JKv0Q3jbSwTEb93mPmY+KfJLaHEih6D4sTNjduMNhXJEIlU/HHzp/LgV6FL6qj6j -ITk1dImmasI5+njPtqzn59ZW/yOSLlALqbUHM/Q4X6RJpstlcHboCoWASzY9M/eV -VHUl2qzEc4Jl6VL1XP04lQJqaTDFHApXB64ipCz5xUG3uOyfT0gA+QEEVcys+TIx -xHWVBqB/0Y0n3bOppHKH/lmLmnp0Ft0WpWIp6zqW3IunaFnT63eROfjXy9mPX1on -AX1daBli2MjN9LdyR75bl87yraKZk62Uy5P2EgmVtqvXO9A/EcswFi55gORngS1d -7XB4tmBZrOFdRWOPyN9yaFvqHbgB8X7754qz41SgOAngPN5C8sLtLpvzHzW2Ntjj -gKGLzZlkD8Kqq7HK9W+eQ42EVJmzbsASZthwEPEGNTNDqJwuuhQxzhB/HIbjj9LV -+Hfsm6vxL2PZQl/gZ4FkkfGXL/xuJvYz+NO1+MRiqzFRJQJ6+N1rZdVtTTDIZbpo -FGWsJwt0ivKH ------END CERTIFICATE----- - -# Issuer: CN=TrustAsia Global Root CA G4 O=TrustAsia Technologies, Inc. -# Subject: CN=TrustAsia Global Root CA G4 O=TrustAsia Technologies, Inc. -# Label: "TrustAsia Global Root CA G4" -# Serial: 451799571007117016466790293371524403291602933463 -# MD5 Fingerprint: 54:dd:b2:d7:5f:d8:3e:ed:7c:e0:0b:2e:cc:ed:eb:eb -# SHA1 Fingerprint: 57:73:a5:61:5d:80:b2:e6:ac:38:82:fc:68:07:31:ac:9f:b5:92:5a -# SHA256 Fingerprint: be:4b:56:cb:50:56:c0:13:6a:52:6d:f4:44:50:8d:aa:36:a0:b5:4f:42:e4:ac:38:f7:2a:f4:70:e4:79:65:4c ------BEGIN CERTIFICATE----- -MIICVTCCAdygAwIBAgIUTyNkuI6XY57GU4HBdk7LKnQV1tcwCgYIKoZIzj0EAwMw -WjELMAkGA1UEBhMCQ04xJTAjBgNVBAoMHFRydXN0QXNpYSBUZWNobm9sb2dpZXMs -IEluYy4xJDAiBgNVBAMMG1RydXN0QXNpYSBHbG9iYWwgUm9vdCBDQSBHNDAeFw0y -MTA1MjAwMjEwMjJaFw00NjA1MTkwMjEwMjJaMFoxCzAJBgNVBAYTAkNOMSUwIwYD -VQQKDBxUcnVzdEFzaWEgVGVjaG5vbG9naWVzLCBJbmMuMSQwIgYDVQQDDBtUcnVz -dEFzaWEgR2xvYmFsIFJvb3QgQ0EgRzQwdjAQBgcqhkjOPQIBBgUrgQQAIgNiAATx -s8045CVD5d4ZCbuBeaIVXxVjAd7Cq92zphtnS4CDr5nLrBfbK5bKfFJV4hrhPVbw -LxYI+hW8m7tH5j/uqOFMjPXTNvk4XatwmkcN4oFBButJ+bAp3TPsUKV/eSm4IJij -YzBhMA8GA1UdEwEB/wQFMAMBAf8wHwYDVR0jBBgwFoAUpbtKl86zK3+kMd6Xg1mD -pm9xy94wHQYDVR0OBBYEFKW7SpfOsyt/pDHel4NZg6ZvccveMA4GA1UdDwEB/wQE -AwIBBjAKBggqhkjOPQQDAwNnADBkAjBe8usGzEkxn0AAbbd+NvBNEU/zy4k6LHiR -UKNbwMp1JvK/kF0LgoxgKJ/GcJpo5PECMFxYDlZ2z1jD1xCMuo6u47xkdUfFVZDj -/bpV6wfEU6s3qe4hsiFbYI89MvHVI5TWWA== ------END CERTIFICATE----- - -# Issuer: CN=CommScope Public Trust ECC Root-01 O=CommScope -# Subject: CN=CommScope Public Trust ECC Root-01 O=CommScope -# Label: "CommScope Public Trust ECC Root-01" -# Serial: 385011430473757362783587124273108818652468453534 -# MD5 Fingerprint: 3a:40:a7:fc:03:8c:9c:38:79:2f:3a:a2:6c:b6:0a:16 -# SHA1 Fingerprint: 07:86:c0:d8:dd:8e:c0:80:98:06:98:d0:58:7a:ef:de:a6:cc:a2:5d -# SHA256 Fingerprint: 11:43:7c:da:7b:b4:5e:41:36:5f:45:b3:9a:38:98:6b:0d:e0:0d:ef:34:8e:0c:7b:b0:87:36:33:80:0b:c3:8b ------BEGIN CERTIFICATE----- -MIICHTCCAaOgAwIBAgIUQ3CCd89NXTTxyq4yLzf39H91oJ4wCgYIKoZIzj0EAwMw -TjELMAkGA1UEBhMCVVMxEjAQBgNVBAoMCUNvbW1TY29wZTErMCkGA1UEAwwiQ29t -bVNjb3BlIFB1YmxpYyBUcnVzdCBFQ0MgUm9vdC0wMTAeFw0yMTA0MjgxNzM1NDNa -Fw00NjA0MjgxNzM1NDJaME4xCzAJBgNVBAYTAlVTMRIwEAYDVQQKDAlDb21tU2Nv -cGUxKzApBgNVBAMMIkNvbW1TY29wZSBQdWJsaWMgVHJ1c3QgRUNDIFJvb3QtMDEw -djAQBgcqhkjOPQIBBgUrgQQAIgNiAARLNumuV16ocNfQj3Rid8NeeqrltqLxeP0C -flfdkXmcbLlSiFS8LwS+uM32ENEp7LXQoMPwiXAZu1FlxUOcw5tjnSCDPgYLpkJE -hRGnSjot6dZoL0hOUysHP029uax3OVejQjBAMA8GA1UdEwEB/wQFMAMBAf8wDgYD -VR0PAQH/BAQDAgEGMB0GA1UdDgQWBBSOB2LAUN3GGQYARnQE9/OufXVNMDAKBggq -hkjOPQQDAwNoADBlAjEAnDPfQeMjqEI2Jpc1XHvr20v4qotzVRVcrHgpD7oh2MSg -2NED3W3ROT3Ek2DS43KyAjB8xX6I01D1HiXo+k515liWpDVfG2XqYZpwI7UNo5uS -Um9poIyNStDuiw7LR47QjRE= ------END CERTIFICATE----- - -# Issuer: CN=CommScope Public Trust ECC Root-02 O=CommScope -# Subject: CN=CommScope Public Trust ECC Root-02 O=CommScope -# Label: "CommScope Public Trust ECC Root-02" -# Serial: 234015080301808452132356021271193974922492992893 -# MD5 Fingerprint: 59:b0:44:d5:65:4d:b8:5c:55:19:92:02:b6:d1:94:b2 -# SHA1 Fingerprint: 3c:3f:ef:57:0f:fe:65:93:86:9e:a0:fe:b0:f6:ed:8e:d1:13:c7:e5 -# SHA256 Fingerprint: 2f:fb:7f:81:3b:bb:b3:c8:9a:b4:e8:16:2d:0f:16:d7:15:09:a8:30:cc:9d:73:c2:62:e5:14:08:75:d1:ad:4a ------BEGIN CERTIFICATE----- -MIICHDCCAaOgAwIBAgIUKP2ZYEFHpgE6yhR7H+/5aAiDXX0wCgYIKoZIzj0EAwMw -TjELMAkGA1UEBhMCVVMxEjAQBgNVBAoMCUNvbW1TY29wZTErMCkGA1UEAwwiQ29t -bVNjb3BlIFB1YmxpYyBUcnVzdCBFQ0MgUm9vdC0wMjAeFw0yMTA0MjgxNzQ0NTRa -Fw00NjA0MjgxNzQ0NTNaME4xCzAJBgNVBAYTAlVTMRIwEAYDVQQKDAlDb21tU2Nv -cGUxKzApBgNVBAMMIkNvbW1TY29wZSBQdWJsaWMgVHJ1c3QgRUNDIFJvb3QtMDIw -djAQBgcqhkjOPQIBBgUrgQQAIgNiAAR4MIHoYx7l63FRD/cHB8o5mXxO1Q/MMDAL -j2aTPs+9xYa9+bG3tD60B8jzljHz7aRP+KNOjSkVWLjVb3/ubCK1sK9IRQq9qEmU -v4RDsNuESgMjGWdqb8FuvAY5N9GIIvejQjBAMA8GA1UdEwEB/wQFMAMBAf8wDgYD -VR0PAQH/BAQDAgEGMB0GA1UdDgQWBBTmGHX/72DehKT1RsfeSlXjMjZ59TAKBggq -hkjOPQQDAwNnADBkAjAmc0l6tqvmSfR9Uj/UQQSugEODZXW5hYA4O9Zv5JOGq4/n -ich/m35rChJVYaoR4HkCMHfoMXGsPHED1oQmHhS48zs73u1Z/GtMMH9ZzkXpc2AV -mkzw5l4lIhVtwodZ0LKOag== ------END CERTIFICATE----- - -# Issuer: CN=CommScope Public Trust RSA Root-01 O=CommScope -# Subject: CN=CommScope Public Trust RSA Root-01 O=CommScope -# Label: "CommScope Public Trust RSA Root-01" -# Serial: 354030733275608256394402989253558293562031411421 -# MD5 Fingerprint: 0e:b4:15:bc:87:63:5d:5d:02:73:d4:26:38:68:73:d8 -# SHA1 Fingerprint: 6d:0a:5f:f7:b4:23:06:b4:85:b3:b7:97:64:fc:ac:75:f5:33:f2:93 -# SHA256 Fingerprint: 02:bd:f9:6e:2a:45:dd:9b:f1:8f:c7:e1:db:df:21:a0:37:9b:a3:c9:c2:61:03:44:cf:d8:d6:06:fe:c1:ed:81 ------BEGIN CERTIFICATE----- -MIIFbDCCA1SgAwIBAgIUPgNJgXUWdDGOTKvVxZAplsU5EN0wDQYJKoZIhvcNAQEL -BQAwTjELMAkGA1UEBhMCVVMxEjAQBgNVBAoMCUNvbW1TY29wZTErMCkGA1UEAwwi -Q29tbVNjb3BlIFB1YmxpYyBUcnVzdCBSU0EgUm9vdC0wMTAeFw0yMTA0MjgxNjQ1 -NTRaFw00NjA0MjgxNjQ1NTNaME4xCzAJBgNVBAYTAlVTMRIwEAYDVQQKDAlDb21t -U2NvcGUxKzApBgNVBAMMIkNvbW1TY29wZSBQdWJsaWMgVHJ1c3QgUlNBIFJvb3Qt -MDEwggIiMA0GCSqGSIb3DQEBAQUAA4ICDwAwggIKAoICAQCwSGWjDR1C45FtnYSk -YZYSwu3D2iM0GXb26v1VWvZVAVMP8syMl0+5UMuzAURWlv2bKOx7dAvnQmtVzslh -suitQDy6uUEKBU8bJoWPQ7VAtYXR1HHcg0Hz9kXHgKKEUJdGzqAMxGBWBB0HW0al -DrJLpA6lfO741GIDuZNqihS4cPgugkY4Iw50x2tBt9Apo52AsH53k2NC+zSDO3Oj -WiE260f6GBfZumbCk6SP/F2krfxQapWsvCQz0b2If4b19bJzKo98rwjyGpg/qYFl -P8GMicWWMJoKz/TUyDTtnS+8jTiGU+6Xn6myY5QXjQ/cZip8UlF1y5mO6D1cv547 -KI2DAg+pn3LiLCuz3GaXAEDQpFSOm117RTYm1nJD68/A6g3czhLmfTifBSeolz7p -UcZsBSjBAg/pGG3svZwG1KdJ9FQFa2ww8esD1eo9anbCyxooSU1/ZOD6K9pzg4H/ -kQO9lLvkuI6cMmPNn7togbGEW682v3fuHX/3SZtS7NJ3Wn2RnU3COS3kuoL4b/JO -Hg9O5j9ZpSPcPYeoKFgo0fEbNttPxP/hjFtyjMcmAyejOQoBqsCyMWCDIqFPEgkB -Ea801M/XrmLTBQe0MXXgDW1XT2mH+VepuhX2yFJtocucH+X8eKg1mp9BFM6ltM6U -CBwJrVbl2rZJmkrqYxhTnCwuwwIDAQABo0IwQDAPBgNVHRMBAf8EBTADAQH/MA4G -A1UdDwEB/wQEAwIBBjAdBgNVHQ4EFgQUN12mmnQywsL5x6YVEFm45P3luG0wDQYJ -KoZIhvcNAQELBQADggIBAK+nz97/4L1CjU3lIpbfaOp9TSp90K09FlxD533Ahuh6 -NWPxzIHIxgvoLlI1pKZJkGNRrDSsBTtXAOnTYtPZKdVUvhwQkZyybf5Z/Xn36lbQ -nmhUQo8mUuJM3y+Xpi/SB5io82BdS5pYV4jvguX6r2yBS5KPQJqTRlnLX3gWsWc+ -QgvfKNmwrZggvkN80V4aCRckjXtdlemrwWCrWxhkgPut4AZ9HcpZuPN4KWfGVh2v -trV0KnahP/t1MJ+UXjulYPPLXAziDslg+MkfFoom3ecnf+slpoq9uC02EJqxWE2a -aE9gVOX2RhOOiKy8IUISrcZKiX2bwdgt6ZYD9KJ0DLwAHb/WNyVntHKLr4W96ioD -j8z7PEQkguIBpQtZtjSNMgsSDesnwv1B10A8ckYpwIzqug/xBpMu95yo9GA+o/E4 -Xo4TwbM6l4c/ksp4qRyv0LAbJh6+cOx69TOY6lz/KwsETkPdY34Op054A5U+1C0w -lREQKC6/oAI+/15Z0wUOlV9TRe9rh9VIzRamloPh37MG88EU26fsHItdkJANclHn -YfkUyq+Dj7+vsQpZXdxc1+SWrVtgHdqul7I52Qb1dgAT+GhMIbA1xNxVssnBQVoc -icCMb3SgazNNtQEo/a2tiRc7ppqEvOuM6sRxJKi6KfkIsidWNTJf6jn7MZrVGczw ------END CERTIFICATE----- - -# Issuer: CN=CommScope Public Trust RSA Root-02 O=CommScope -# Subject: CN=CommScope Public Trust RSA Root-02 O=CommScope -# Label: "CommScope Public Trust RSA Root-02" -# Serial: 480062499834624527752716769107743131258796508494 -# MD5 Fingerprint: e1:29:f9:62:7b:76:e2:96:6d:f3:d4:d7:0f:ae:1f:aa -# SHA1 Fingerprint: ea:b0:e2:52:1b:89:93:4c:11:68:f2:d8:9a:ac:22:4c:a3:8a:57:ae -# SHA256 Fingerprint: ff:e9:43:d7:93:42:4b:4f:7c:44:0c:1c:3d:64:8d:53:63:f3:4b:82:dc:87:aa:7a:9f:11:8f:c5:de:e1:01:f1 ------BEGIN CERTIFICATE----- -MIIFbDCCA1SgAwIBAgIUVBa/O345lXGN0aoApYYNK496BU4wDQYJKoZIhvcNAQEL -BQAwTjELMAkGA1UEBhMCVVMxEjAQBgNVBAoMCUNvbW1TY29wZTErMCkGA1UEAwwi -Q29tbVNjb3BlIFB1YmxpYyBUcnVzdCBSU0EgUm9vdC0wMjAeFw0yMTA0MjgxNzE2 -NDNaFw00NjA0MjgxNzE2NDJaME4xCzAJBgNVBAYTAlVTMRIwEAYDVQQKDAlDb21t -U2NvcGUxKzApBgNVBAMMIkNvbW1TY29wZSBQdWJsaWMgVHJ1c3QgUlNBIFJvb3Qt -MDIwggIiMA0GCSqGSIb3DQEBAQUAA4ICDwAwggIKAoICAQDh+g77aAASyE3VrCLE -NQE7xVTlWXZjpX/rwcRqmL0yjReA61260WI9JSMZNRTpf4mnG2I81lDnNJUDMrG0 -kyI9p+Kx7eZ7Ti6Hmw0zdQreqjXnfuU2mKKuJZ6VszKWpCtYHu8//mI0SFHRtI1C -rWDaSWqVcN3SAOLMV2MCe5bdSZdbkk6V0/nLKR8YSvgBKtJjCW4k6YnS5cciTNxz -hkcAqg2Ijq6FfUrpuzNPDlJwnZXjfG2WWy09X6GDRl224yW4fKcZgBzqZUPckXk2 -LHR88mcGyYnJ27/aaL8j7dxrrSiDeS/sOKUNNwFnJ5rpM9kzXzehxfCrPfp4sOcs -n/Y+n2Dg70jpkEUeBVF4GiwSLFworA2iI540jwXmojPOEXcT1A6kHkIfhs1w/tku -FT0du7jyU1fbzMZ0KZwYszZ1OC4PVKH4kh+Jlk+71O6d6Ts2QrUKOyrUZHk2EOH5 -kQMreyBUzQ0ZGshBMjTRsJnhkB4BQDa1t/qp5Xd1pCKBXbCL5CcSD1SIxtuFdOa3 -wNemKfrb3vOTlycEVS8KbzfFPROvCgCpLIscgSjX74Yxqa7ybrjKaixUR9gqiC6v -wQcQeKwRoi9C8DfF8rhW3Q5iLc4tVn5V8qdE9isy9COoR+jUKgF4z2rDN6ieZdIs -5fq6M8EGRPbmz6UNp2YINIos8wIDAQABo0IwQDAPBgNVHRMBAf8EBTADAQH/MA4G -A1UdDwEB/wQEAwIBBjAdBgNVHQ4EFgQUR9DnsSL/nSz12Vdgs7GxcJXvYXowDQYJ -KoZIhvcNAQELBQADggIBAIZpsU0v6Z9PIpNojuQhmaPORVMbc0RTAIFhzTHjCLqB -KCh6krm2qMhDnscTJk3C2OVVnJJdUNjCK9v+5qiXz1I6JMNlZFxHMaNlNRPDk7n3 -+VGXu6TwYofF1gbTl4MgqX67tiHCpQ2EAOHyJxCDut0DgdXdaMNmEMjRdrSzbyme -APnCKfWxkxlSaRosTKCL4BWaMS/TiJVZbuXEs1DIFAhKm4sTg7GkcrI7djNB3Nyq -pgdvHSQSn8h2vS/ZjvQs7rfSOBAkNlEv41xdgSGn2rtO/+YHqP65DSdsu3BaVXoT -6fEqSWnHX4dXTEN5bTpl6TBcQe7rd6VzEojov32u5cSoHw2OHG1QAk8mGEPej1WF -sQs3BWDJVTkSBKEqz3EWnzZRSb9wO55nnPt7eck5HHisd5FUmrh1CoFSl+NmYWvt -PjgelmFV4ZFUjO2MJB+ByRCac5krFk5yAD9UG/iNuovnFNa2RU9g7Jauwy8CTl2d -lklyALKrdVwPaFsdZcJfMw8eD/A7hvWwTruc9+olBdytoptLFwG+Qt81IR2tq670 -v64fG9PiO/yzcnMcmyiQiRM9HcEARwmWmjgb3bHPDcK0RPOWlc4yOo80nOAXx17O -rg3bhzjlP1v9mxnhMUF6cKojawHhRUzNlM47ni3niAIi9G7oyOzWPPO5std3eqx7 ------END CERTIFICATE----- - -# Issuer: CN=Telekom Security TLS ECC Root 2020 O=Deutsche Telekom Security GmbH -# Subject: CN=Telekom Security TLS ECC Root 2020 O=Deutsche Telekom Security GmbH -# Label: "Telekom Security TLS ECC Root 2020" -# Serial: 72082518505882327255703894282316633856 -# MD5 Fingerprint: c1:ab:fe:6a:10:2c:03:8d:bc:1c:22:32:c0:85:a7:fd -# SHA1 Fingerprint: c0:f8:96:c5:a9:3b:01:06:21:07:da:18:42:48:bc:e9:9d:88:d5:ec -# SHA256 Fingerprint: 57:8a:f4:de:d0:85:3f:4e:59:98:db:4a:ea:f9:cb:ea:8d:94:5f:60:b6:20:a3:8d:1a:3c:13:b2:bc:7b:a8:e1 ------BEGIN CERTIFICATE----- -MIICQjCCAcmgAwIBAgIQNjqWjMlcsljN0AFdxeVXADAKBggqhkjOPQQDAzBjMQsw -CQYDVQQGEwJERTEnMCUGA1UECgweRGV1dHNjaGUgVGVsZWtvbSBTZWN1cml0eSBH -bWJIMSswKQYDVQQDDCJUZWxla29tIFNlY3VyaXR5IFRMUyBFQ0MgUm9vdCAyMDIw -MB4XDTIwMDgyNTA3NDgyMFoXDTQ1MDgyNTIzNTk1OVowYzELMAkGA1UEBhMCREUx -JzAlBgNVBAoMHkRldXRzY2hlIFRlbGVrb20gU2VjdXJpdHkgR21iSDErMCkGA1UE -AwwiVGVsZWtvbSBTZWN1cml0eSBUTFMgRUNDIFJvb3QgMjAyMDB2MBAGByqGSM49 -AgEGBSuBBAAiA2IABM6//leov9Wq9xCazbzREaK9Z0LMkOsVGJDZos0MKiXrPk/O -tdKPD/M12kOLAoC+b1EkHQ9rK8qfwm9QMuU3ILYg/4gND21Ju9sGpIeQkpT0CdDP -f8iAC8GXs7s1J8nCG6NCMEAwHQYDVR0OBBYEFONyzG6VmUex5rNhTNHLq+O6zd6f -MA8GA1UdEwEB/wQFMAMBAf8wDgYDVR0PAQH/BAQDAgEGMAoGCCqGSM49BAMDA2cA -MGQCMHVSi7ekEE+uShCLsoRbQuHmKjYC2qBuGT8lv9pZMo7k+5Dck2TOrbRBR2Di -z6fLHgIwN0GMZt9Ba9aDAEH9L1r3ULRn0SyocddDypwnJJGDSA3PzfdUga/sf+Rn -27iQ7t0l ------END CERTIFICATE----- - -# Issuer: CN=Telekom Security TLS RSA Root 2023 O=Deutsche Telekom Security GmbH -# Subject: CN=Telekom Security TLS RSA Root 2023 O=Deutsche Telekom Security GmbH -# Label: "Telekom Security TLS RSA Root 2023" -# Serial: 44676229530606711399881795178081572759 -# MD5 Fingerprint: bf:5b:eb:54:40:cd:48:71:c4:20:8d:7d:de:0a:42:f2 -# SHA1 Fingerprint: 54:d3:ac:b3:bd:57:56:f6:85:9d:ce:e5:c3:21:e2:d4:ad:83:d0:93 -# SHA256 Fingerprint: ef:c6:5c:ad:bb:59:ad:b6:ef:e8:4d:a2:23:11:b3:56:24:b7:1b:3b:1e:a0:da:8b:66:55:17:4e:c8:97:86:46 ------BEGIN CERTIFICATE----- -MIIFszCCA5ugAwIBAgIQIZxULej27HF3+k7ow3BXlzANBgkqhkiG9w0BAQwFADBj -MQswCQYDVQQGEwJERTEnMCUGA1UECgweRGV1dHNjaGUgVGVsZWtvbSBTZWN1cml0 -eSBHbWJIMSswKQYDVQQDDCJUZWxla29tIFNlY3VyaXR5IFRMUyBSU0EgUm9vdCAy -MDIzMB4XDTIzMDMyODEyMTY0NVoXDTQ4MDMyNzIzNTk1OVowYzELMAkGA1UEBhMC -REUxJzAlBgNVBAoMHkRldXRzY2hlIFRlbGVrb20gU2VjdXJpdHkgR21iSDErMCkG -A1UEAwwiVGVsZWtvbSBTZWN1cml0eSBUTFMgUlNBIFJvb3QgMjAyMzCCAiIwDQYJ -KoZIhvcNAQEBBQADggIPADCCAgoCggIBAO01oYGA88tKaVvC+1GDrib94W7zgRJ9 -cUD/h3VCKSHtgVIs3xLBGYSJwb3FKNXVS2xE1kzbB5ZKVXrKNoIENqil/Cf2SfHV -cp6R+SPWcHu79ZvB7JPPGeplfohwoHP89v+1VmLhc2o0mD6CuKyVU/QBoCcHcqMA -U6DksquDOFczJZSfvkgdmOGjup5czQRxUX11eKvzWarE4GC+j4NSuHUaQTXtvPM6 -Y+mpFEXX5lLRbtLevOP1Czvm4MS9Q2QTps70mDdsipWol8hHD/BeEIvnHRz+sTug -BTNoBUGCwQMrAcjnj02r6LX2zWtEtefdi+zqJbQAIldNsLGyMcEWzv/9FIS3R/qy -8XDe24tsNlikfLMR0cN3f1+2JeANxdKz+bi4d9s3cXFH42AYTyS2dTd4uaNir73J -co4vzLuu2+QVUhkHM/tqty1LkCiCc/4YizWN26cEar7qwU02OxY2kTLvtkCJkUPg -8qKrBC7m8kwOFjQgrIfBLX7JZkcXFBGk8/ehJImr2BrIoVyxo/eMbcgByU/J7MT8 -rFEz0ciD0cmfHdRHNCk+y7AO+oMLKFjlKdw/fKifybYKu6boRhYPluV75Gp6SG12 -mAWl3G0eQh5C2hrgUve1g8Aae3g1LDj1H/1Joy7SWWO/gLCMk3PLNaaZlSJhZQNg -+y+TS/qanIA7AgMBAAGjYzBhMA4GA1UdDwEB/wQEAwIBBjAdBgNVHQ4EFgQUtqeX -gj10hZv3PJ+TmpV5dVKMbUcwDwYDVR0TAQH/BAUwAwEB/zAfBgNVHSMEGDAWgBS2 -p5eCPXSFm/c8n5OalXl1UoxtRzANBgkqhkiG9w0BAQwFAAOCAgEAqMxhpr51nhVQ -pGv7qHBFfLp+sVr8WyP6Cnf4mHGCDG3gXkaqk/QeoMPhk9tLrbKmXauw1GLLXrtm -9S3ul0A8Yute1hTWjOKWi0FpkzXmuZlrYrShF2Y0pmtjxrlO8iLpWA1WQdH6DErw -M807u20hOq6OcrXDSvvpfeWxm4bu4uB9tPcy/SKE8YXJN3nptT+/XOR0so8RYgDd -GGah2XsjX/GO1WfoVNpbOms2b/mBsTNHM3dA+VKq3dSDz4V4mZqTuXNnQkYRIer+ -CqkbGmVps4+uFrb2S1ayLfmlyOw7YqPta9BO1UAJpB+Y1zqlklkg5LB9zVtzaL1t -xKITDmcZuI1CfmwMmm6gJC3VRRvcxAIU/oVbZZfKTpBQCHpCNfnqwmbU+AGuHrS+ -w6jv/naaoqYfRvaE7fzbzsQCzndILIyy7MMAo+wsVRjBfhnu4S/yrYObnqsZ38aK -L4x35bcF7DvB7L6Gs4a8wPfc5+pbrrLMtTWGS9DiP7bY+A4A7l3j941Y/8+LN+lj -X273CXE2whJdV/LItM3z7gLfEdxquVeEHVlNjM7IDiPCtyaaEBRx/pOyiriA8A4Q -ntOoUAw3gi/q4Iqd4Sw5/7W0cwDk90imc6y/st53BIe0o82bNSQ3+pCTE4FCxpgm -dTdmQRCsu/WU48IxK63nI1bMNSWSs1A= ------END CERTIFICATE----- - -# Issuer: CN=FIRMAPROFESIONAL CA ROOT-A WEB O=Firmaprofesional SA -# Subject: CN=FIRMAPROFESIONAL CA ROOT-A WEB O=Firmaprofesional SA -# Label: "FIRMAPROFESIONAL CA ROOT-A WEB" -# Serial: 65916896770016886708751106294915943533 -# MD5 Fingerprint: 82:b2:ad:45:00:82:b0:66:63:f8:5f:c3:67:4e:ce:a3 -# SHA1 Fingerprint: a8:31:11:74:a6:14:15:0d:ca:77:dd:0e:e4:0c:5d:58:fc:a0:72:a5 -# SHA256 Fingerprint: be:f2:56:da:f2:6e:9c:69:bd:ec:16:02:35:97:98:f3:ca:f7:18:21:a0:3e:01:82:57:c5:3c:65:61:7f:3d:4a ------BEGIN CERTIFICATE----- -MIICejCCAgCgAwIBAgIQMZch7a+JQn81QYehZ1ZMbTAKBggqhkjOPQQDAzBuMQsw -CQYDVQQGEwJFUzEcMBoGA1UECgwTRmlybWFwcm9mZXNpb25hbCBTQTEYMBYGA1UE -YQwPVkFURVMtQTYyNjM0MDY4MScwJQYDVQQDDB5GSVJNQVBST0ZFU0lPTkFMIENB -IFJPT1QtQSBXRUIwHhcNMjIwNDA2MDkwMTM2WhcNNDcwMzMxMDkwMTM2WjBuMQsw -CQYDVQQGEwJFUzEcMBoGA1UECgwTRmlybWFwcm9mZXNpb25hbCBTQTEYMBYGA1UE -YQwPVkFURVMtQTYyNjM0MDY4MScwJQYDVQQDDB5GSVJNQVBST0ZFU0lPTkFMIENB -IFJPT1QtQSBXRUIwdjAQBgcqhkjOPQIBBgUrgQQAIgNiAARHU+osEaR3xyrq89Zf -e9MEkVz6iMYiuYMQYneEMy3pA4jU4DP37XcsSmDq5G+tbbT4TIqk5B/K6k84Si6C -cyvHZpsKjECcfIr28jlgst7L7Ljkb+qbXbdTkBgyVcUgt5SjYzBhMA8GA1UdEwEB -/wQFMAMBAf8wHwYDVR0jBBgwFoAUk+FDY1w8ndYn81LsF7Kpryz3dvgwHQYDVR0O -BBYEFJPhQ2NcPJ3WJ/NS7Beyqa8s93b4MA4GA1UdDwEB/wQEAwIBBjAKBggqhkjO -PQQDAwNoADBlAjAdfKR7w4l1M+E7qUW/Runpod3JIha3RxEL2Jq68cgLcFBTApFw -hVmpHqTm6iMxoAACMQD94vizrxa5HnPEluPBMBnYfubDl94cT7iJLzPrSA8Z94dG -XSaQpYXFuXqUPoeovQA= ------END CERTIFICATE----- - -# Issuer: CN=TWCA CYBER Root CA O=TAIWAN-CA OU=Root CA -# Subject: CN=TWCA CYBER Root CA O=TAIWAN-CA OU=Root CA -# Label: "TWCA CYBER Root CA" -# Serial: 85076849864375384482682434040119489222 -# MD5 Fingerprint: 0b:33:a0:97:52:95:d4:a9:fd:bb:db:6e:a3:55:5b:51 -# SHA1 Fingerprint: f6:b1:1c:1a:83:38:e9:7b:db:b3:a8:c8:33:24:e0:2d:9c:7f:26:66 -# SHA256 Fingerprint: 3f:63:bb:28:14:be:17:4e:c8:b6:43:9c:f0:8d:6d:56:f0:b7:c4:05:88:3a:56:48:a3:34:42:4d:6b:3e:c5:58 ------BEGIN CERTIFICATE----- -MIIFjTCCA3WgAwIBAgIQQAE0jMIAAAAAAAAAATzyxjANBgkqhkiG9w0BAQwFADBQ -MQswCQYDVQQGEwJUVzESMBAGA1UEChMJVEFJV0FOLUNBMRAwDgYDVQQLEwdSb290 -IENBMRswGQYDVQQDExJUV0NBIENZQkVSIFJvb3QgQ0EwHhcNMjIxMTIyMDY1NDI5 -WhcNNDcxMTIyMTU1OTU5WjBQMQswCQYDVQQGEwJUVzESMBAGA1UEChMJVEFJV0FO -LUNBMRAwDgYDVQQLEwdSb290IENBMRswGQYDVQQDExJUV0NBIENZQkVSIFJvb3Qg -Q0EwggIiMA0GCSqGSIb3DQEBAQUAA4ICDwAwggIKAoICAQDG+Moe2Qkgfh1sTs6P -40czRJzHyWmqOlt47nDSkvgEs1JSHWdyKKHfi12VCv7qze33Kc7wb3+szT3vsxxF -avcokPFhV8UMxKNQXd7UtcsZyoC5dc4pztKFIuwCY8xEMCDa6pFbVuYdHNWdZsc/ -34bKS1PE2Y2yHer43CdTo0fhYcx9tbD47nORxc5zb87uEB8aBs/pJ2DFTxnk684i -JkXXYJndzk834H/nY62wuFm40AZoNWDTNq5xQwTxaWV4fPMf88oon1oglWa0zbfu -j3ikRRjpJi+NmykosaS3Om251Bw4ckVYsV7r8Cibt4LK/c/WMw+f+5eesRycnupf -Xtuq3VTpMCEobY5583WSjCb+3MX2w7DfRFlDo7YDKPYIMKoNM+HvnKkHIuNZW0CP -2oi3aQiotyMuRAlZN1vH4xfyIutuOVLF3lSnmMlLIJXcRolftBL5hSmO68gnFSDA -S9TMfAxsNAwmmyYxpjyn9tnQS6Jk/zuZQXLB4HCX8SS7K8R0IrGsayIyJNN4KsDA -oS/xUgXJP+92ZuJF2A09rZXIx4kmyA+upwMu+8Ff+iDhcK2wZSA3M2Cw1a/XDBzC -kHDXShi8fgGwsOsVHkQGzaRP6AzRwyAQ4VRlnrZR0Bp2a0JaWHY06rc3Ga4udfmW -5cFZ95RXKSWNOkyrTZpB0F8mAwIDAQABo2MwYTAOBgNVHQ8BAf8EBAMCAQYwDwYD -VR0TAQH/BAUwAwEB/zAfBgNVHSMEGDAWgBSdhWEUfMFib5do5E83QOGt4A1WNzAd -BgNVHQ4EFgQUnYVhFHzBYm+XaORPN0DhreANVjcwDQYJKoZIhvcNAQEMBQADggIB -AGSPesRiDrWIzLjHhg6hShbNcAu3p4ULs3a2D6f/CIsLJc+o1IN1KriWiLb73y0t -tGlTITVX1olNc79pj3CjYcya2x6a4CD4bLubIp1dhDGaLIrdaqHXKGnK/nZVekZn -68xDiBaiA9a5F/gZbG0jAn/xX9AKKSM70aoK7akXJlQKTcKlTfjF/biBzysseKNn -TKkHmvPfXvt89YnNdJdhEGoHK4Fa0o635yDRIG4kqIQnoVesqlVYL9zZyvpoBJ7t -RCT5dEA7IzOrg1oYJkK2bVS1FmAwbLGg+LhBoF1JSdJlBTrq/p1hvIbZv97Tujqx -f36SNI7JAG7cmL3c7IAFrQI932XtCwP39xaEBDG6k5TY8hL4iuO/Qq+n1M0RFxbI -Qh0UqEL20kCGoE8jypZFVmAGzbdVAaYBlGX+bgUJurSkquLvWL69J1bY73NxW0Qz -8ppy6rBePm6pUlvscG21h483XjyMnM7k8M4MZ0HMzvaAq07MTFb1wWFZk7Q+ptq4 -NxKfKjLji7gh7MMrZQzvIt6IKTtM1/r+t+FHvpw+PoP7UV31aPcuIYXcv/Fa4nzX -xeSDwWrruoBa3lwtcHb4yOWHh8qgnaHlIhInD0Q9HWzq1MKLL295q39QpsQZp6F6 -t5b5wR9iWqJDB0BeJsas7a5wFsWqynKKTbDPAYsDP27X ------END CERTIFICATE----- - -# Issuer: CN=SecureSign Root CA12 O=Cybertrust Japan Co., Ltd. -# Subject: CN=SecureSign Root CA12 O=Cybertrust Japan Co., Ltd. -# Label: "SecureSign Root CA12" -# Serial: 587887345431707215246142177076162061960426065942 -# MD5 Fingerprint: c6:89:ca:64:42:9b:62:08:49:0b:1e:7f:e9:07:3d:e8 -# SHA1 Fingerprint: 7a:22:1e:3d:de:1b:06:ac:9e:c8:47:70:16:8e:3c:e5:f7:6b:06:f4 -# SHA256 Fingerprint: 3f:03:4b:b5:70:4d:44:b2:d0:85:45:a0:20:57:de:93:eb:f3:90:5f:ce:72:1a:cb:c7:30:c0:6d:da:ee:90:4e ------BEGIN CERTIFICATE----- -MIIDcjCCAlqgAwIBAgIUZvnHwa/swlG07VOX5uaCwysckBYwDQYJKoZIhvcNAQEL -BQAwUTELMAkGA1UEBhMCSlAxIzAhBgNVBAoTGkN5YmVydHJ1c3QgSmFwYW4gQ28u -LCBMdGQuMR0wGwYDVQQDExRTZWN1cmVTaWduIFJvb3QgQ0ExMjAeFw0yMDA0MDgw -NTM2NDZaFw00MDA0MDgwNTM2NDZaMFExCzAJBgNVBAYTAkpQMSMwIQYDVQQKExpD -eWJlcnRydXN0IEphcGFuIENvLiwgTHRkLjEdMBsGA1UEAxMUU2VjdXJlU2lnbiBS -b290IENBMTIwggEiMA0GCSqGSIb3DQEBAQUAA4IBDwAwggEKAoIBAQC6OcE3emhF -KxS06+QT61d1I02PJC0W6K6OyX2kVzsqdiUzg2zqMoqUm048luT9Ub+ZyZN+v/mt -p7JIKwccJ/VMvHASd6SFVLX9kHrko+RRWAPNEHl57muTH2SOa2SroxPjcf59q5zd -J1M3s6oYwlkm7Fsf0uZlfO+TvdhYXAvA42VvPMfKWeP+bl+sg779XSVOKik71gur -FzJ4pOE+lEa+Ym6b3kaosRbnhW70CEBFEaCeVESE99g2zvVQR9wsMJvuwPWW0v4J -hscGWa5Pro4RmHvzC1KqYiaqId+OJTN5lxZJjfU+1UefNzFJM3IFTQy2VYzxV4+K -h9GtxRESOaCtAgMBAAGjQjBAMA8GA1UdEwEB/wQFMAMBAf8wDgYDVR0PAQH/BAQD -AgEGMB0GA1UdDgQWBBRXNPN0zwRL1SXm8UC2LEzZLemgrTANBgkqhkiG9w0BAQsF -AAOCAQEAPrvbFxbS8hQBICw4g0utvsqFepq2m2um4fylOqyttCg6r9cBg0krY6Ld -mmQOmFxv3Y67ilQiLUoT865AQ9tPkbeGGuwAtEGBpE/6aouIs3YIcipJQMPTw4WJ -mBClnW8Zt7vPemVV2zfrPIpyMpcemik+rY3moxtt9XUa5rBouVui7mlHJzWhhpmA -8zNL4WukJsPvdFlseqJkth5Ew1DgDzk9qTPxpfPSvWKErI4cqc1avTc7bgoitPQV -55FYxTpE05Uo2cBl6XLK0A+9H7MV2anjpEcJnuDLN/v9vZfVvhgaaaI5gdka9at/ -yOPiZwud9AzqVN/Ssq+xIvEg37xEHA== ------END CERTIFICATE----- - -# Issuer: CN=SecureSign Root CA14 O=Cybertrust Japan Co., Ltd. -# Subject: CN=SecureSign Root CA14 O=Cybertrust Japan Co., Ltd. -# Label: "SecureSign Root CA14" -# Serial: 575790784512929437950770173562378038616896959179 -# MD5 Fingerprint: 71:0d:72:fa:92:19:65:5e:89:04:ac:16:33:f0:bc:d5 -# SHA1 Fingerprint: dd:50:c0:f7:79:b3:64:2e:74:a2:b8:9d:9f:d3:40:dd:bb:f0:f2:4f -# SHA256 Fingerprint: 4b:00:9c:10:34:49:4f:9a:b5:6b:ba:3b:a1:d6:27:31:fc:4d:20:d8:95:5a:dc:ec:10:a9:25:60:72:61:e3:38 ------BEGIN CERTIFICATE----- -MIIFcjCCA1qgAwIBAgIUZNtaDCBO6Ncpd8hQJ6JaJ90t8sswDQYJKoZIhvcNAQEM -BQAwUTELMAkGA1UEBhMCSlAxIzAhBgNVBAoTGkN5YmVydHJ1c3QgSmFwYW4gQ28u -LCBMdGQuMR0wGwYDVQQDExRTZWN1cmVTaWduIFJvb3QgQ0ExNDAeFw0yMDA0MDgw -NzA2MTlaFw00NTA0MDgwNzA2MTlaMFExCzAJBgNVBAYTAkpQMSMwIQYDVQQKExpD -eWJlcnRydXN0IEphcGFuIENvLiwgTHRkLjEdMBsGA1UEAxMUU2VjdXJlU2lnbiBS -b290IENBMTQwggIiMA0GCSqGSIb3DQEBAQUAA4ICDwAwggIKAoICAQDF0nqh1oq/ -FjHQmNE6lPxauG4iwWL3pwon71D2LrGeaBLwbCRjOfHw3xDG3rdSINVSW0KZnvOg -vlIfX8xnbacuUKLBl422+JX1sLrcneC+y9/3OPJH9aaakpUqYllQC6KxNedlsmGy -6pJxaeQp8E+BgQQ8sqVb1MWoWWd7VRxJq3qdwudzTe/NCcLEVxLbAQ4jeQkHO6Lo -/IrPj8BGJJw4J+CDnRugv3gVEOuGTgpa/d/aLIJ+7sr2KeH6caH3iGicnPCNvg9J -kdjqOvn90Ghx2+m1K06Ckm9mH+Dw3EzsytHqunQG+bOEkJTRX45zGRBdAuVwpcAQ -0BB8b8VYSbSwbprafZX1zNoCr7gsfXmPvkPx+SgojQlD+Ajda8iLLCSxjVIHvXib -y8posqTdDEx5YMaZ0ZPxMBoH064iwurO8YQJzOAUbn8/ftKChazcqRZOhaBgy/ac -18izju3Gm5h1DVXoX+WViwKkrkMpKBGk5hIwAUt1ax5mnXkvpXYvHUC0bcl9eQjs -0Wq2XSqypWa9a4X0dFbD9ed1Uigspf9mR6XU/v6eVL9lfgHWMI+lNpyiUBzuOIAB -SMbHdPTGrMNASRZhdCyvjG817XsYAFs2PJxQDcqSMxDxJklt33UkN4Ii1+iW/RVL -ApY+B3KVfqs9TC7XyvDf4Fg/LS8EmjijAQIDAQABo0IwQDAPBgNVHRMBAf8EBTAD -AQH/MA4GA1UdDwEB/wQEAwIBBjAdBgNVHQ4EFgQUBpOjCl4oaTeqYR3r6/wtbyPk -86AwDQYJKoZIhvcNAQEMBQADggIBAJaAcgkGfpzMkwQWu6A6jZJOtxEaCnFxEM0E -rX+lRVAQZk5KQaID2RFPeje5S+LGjzJmdSX7684/AykmjbgWHfYfM25I5uj4V7Ib -ed87hwriZLoAymzvftAj63iP/2SbNDefNWWipAA9EiOWWF3KY4fGoweITedpdopT -zfFP7ELyk+OZpDc8h7hi2/DsHzc/N19DzFGdtfCXwreFamgLRB7lUe6TzktuhsHS -DCRZNhqfLJGP4xjblJUK7ZGqDpncllPjYYPGFrojutzdfhrGe0K22VoF3Jpf1d+4 -2kd92jjbrDnVHmtsKheMYc2xbXIBw8MgAGJoFjHVdqqGuw6qnsb58Nn4DSEC5MUo -FlkRudlpcyqSeLiSV5sI8jrlL5WwWLdrIBRtFO8KvH7YVdiI2i/6GaX7i+B/OfVy -K4XELKzvGUWSTLNhB9xNH27SgRNcmvMSZ4PPmz+Ln52kuaiWA3rF7iDeM9ovnhp6 -dB7h7sxaOgTdsxoEqBRjrLdHEoOabPXm6RUVkRqEGQ6UROcSjiVbgGcZ3GOTEAtl -Lor6CZpO2oYofaphNdgOpygau1LgePhsumywbrmHXumZNTfxPWQrqaA0k89jL9WB -365jJ6UeTo3cKXhZ+PmhIIynJkBugnLNeLLIjzwec+fBH7/PzqUqm9tEZDKgu39c -JRNItX+S ------END CERTIFICATE----- - -# Issuer: CN=SecureSign Root CA15 O=Cybertrust Japan Co., Ltd. -# Subject: CN=SecureSign Root CA15 O=Cybertrust Japan Co., Ltd. -# Label: "SecureSign Root CA15" -# Serial: 126083514594751269499665114766174399806381178503 -# MD5 Fingerprint: 13:30:fc:c4:62:a6:a9:de:b5:c1:68:af:b5:d2:31:47 -# SHA1 Fingerprint: cb:ba:83:c8:c1:5a:5d:f1:f9:73:6f:ca:d7:ef:28:13:06:4a:07:7d -# SHA256 Fingerprint: e7:78:f0:f0:95:fe:84:37:29:cd:1a:00:82:17:9e:53:14:a9:c2:91:44:28:05:e1:fb:1d:8f:b6:b8:88:6c:3a ------BEGIN CERTIFICATE----- -MIICIzCCAamgAwIBAgIUFhXHw9hJp75pDIqI7fBw+d23PocwCgYIKoZIzj0EAwMw -UTELMAkGA1UEBhMCSlAxIzAhBgNVBAoTGkN5YmVydHJ1c3QgSmFwYW4gQ28uLCBM -dGQuMR0wGwYDVQQDExRTZWN1cmVTaWduIFJvb3QgQ0ExNTAeFw0yMDA0MDgwODMy -NTZaFw00NTA0MDgwODMyNTZaMFExCzAJBgNVBAYTAkpQMSMwIQYDVQQKExpDeWJl -cnRydXN0IEphcGFuIENvLiwgTHRkLjEdMBsGA1UEAxMUU2VjdXJlU2lnbiBSb290 -IENBMTUwdjAQBgcqhkjOPQIBBgUrgQQAIgNiAAQLUHSNZDKZmbPSYAi4Io5GdCx4 -wCtELW1fHcmuS1Iggz24FG1Th2CeX2yF2wYUleDHKP+dX+Sq8bOLbe1PL0vJSpSR -ZHX+AezB2Ot6lHhWGENfa4HL9rzatAy2KZMIaY+jQjBAMA8GA1UdEwEB/wQFMAMB -Af8wDgYDVR0PAQH/BAQDAgEGMB0GA1UdDgQWBBTrQciu/NWeUUj1vYv0hyCTQSvT -9DAKBggqhkjOPQQDAwNoADBlAjEA2S6Jfl5OpBEHvVnCB96rMjhTKkZEBhd6zlHp -4P9mLQlO4E/0BdGF9jVg3PVys0Z9AjBEmEYagoUeYWmJSwdLZrWeqrqgHkHZAXQ6 -bkU6iYAZezKYVWOr62Nuk22rGwlgMU4= ------END CERTIFICATE----- diff --git a/env/lib/python3.10/site-packages/certifi/core.py b/env/lib/python3.10/site-packages/certifi/core.py deleted file mode 100644 index 91f538b..0000000 --- a/env/lib/python3.10/site-packages/certifi/core.py +++ /dev/null @@ -1,114 +0,0 @@ -""" -certifi.py -~~~~~~~~~~ - -This module returns the installation location of cacert.pem or its contents. -""" -import sys -import atexit - -def exit_cacert_ctx() -> None: - _CACERT_CTX.__exit__(None, None, None) # type: ignore[union-attr] - - -if sys.version_info >= (3, 11): - - from importlib.resources import as_file, files - - _CACERT_CTX = None - _CACERT_PATH = None - - def where() -> str: - # This is slightly terrible, but we want to delay extracting the file - # in cases where we're inside of a zipimport situation until someone - # actually calls where(), but we don't want to re-extract the file - # on every call of where(), so we'll do it once then store it in a - # global variable. - global _CACERT_CTX - global _CACERT_PATH - if _CACERT_PATH is None: - # This is slightly janky, the importlib.resources API wants you to - # manage the cleanup of this file, so it doesn't actually return a - # path, it returns a context manager that will give you the path - # when you enter it and will do any cleanup when you leave it. In - # the common case of not needing a temporary file, it will just - # return the file system location and the __exit__() is a no-op. - # - # We also have to hold onto the actual context manager, because - # it will do the cleanup whenever it gets garbage collected, so - # we will also store that at the global level as well. - _CACERT_CTX = as_file(files("certifi").joinpath("cacert.pem")) - _CACERT_PATH = str(_CACERT_CTX.__enter__()) - atexit.register(exit_cacert_ctx) - - return _CACERT_PATH - - def contents() -> str: - return files("certifi").joinpath("cacert.pem").read_text(encoding="ascii") - -elif sys.version_info >= (3, 7): - - from importlib.resources import path as get_path, read_text - - _CACERT_CTX = None - _CACERT_PATH = None - - def where() -> str: - # This is slightly terrible, but we want to delay extracting the - # file in cases where we're inside of a zipimport situation until - # someone actually calls where(), but we don't want to re-extract - # the file on every call of where(), so we'll do it once then store - # it in a global variable. - global _CACERT_CTX - global _CACERT_PATH - if _CACERT_PATH is None: - # This is slightly janky, the importlib.resources API wants you - # to manage the cleanup of this file, so it doesn't actually - # return a path, it returns a context manager that will give - # you the path when you enter it and will do any cleanup when - # you leave it. In the common case of not needing a temporary - # file, it will just return the file system location and the - # __exit__() is a no-op. - # - # We also have to hold onto the actual context manager, because - # it will do the cleanup whenever it gets garbage collected, so - # we will also store that at the global level as well. - _CACERT_CTX = get_path("certifi", "cacert.pem") - _CACERT_PATH = str(_CACERT_CTX.__enter__()) - atexit.register(exit_cacert_ctx) - - return _CACERT_PATH - - def contents() -> str: - return read_text("certifi", "cacert.pem", encoding="ascii") - -else: - import os - import types - from typing import Union - - Package = Union[types.ModuleType, str] - Resource = Union[str, "os.PathLike"] - - # This fallback will work for Python versions prior to 3.7 that lack the - # importlib.resources module but relies on the existing `where` function - # so won't address issues with environments like PyOxidizer that don't set - # __file__ on modules. - def read_text( - package: Package, - resource: Resource, - encoding: str = 'utf-8', - errors: str = 'strict' - ) -> str: - with open(where(), encoding=encoding) as data: - return data.read() - - # If we don't have importlib.resources, then we will just do the old logic - # of assuming we're on the filesystem and munge the path directly. - def where() -> str: - f = os.path.dirname(__file__) - - return os.path.join(f, "cacert.pem") - - def contents() -> str: - return read_text("certifi", "cacert.pem", encoding="ascii") diff --git a/env/lib/python3.10/site-packages/certifi/py.typed b/env/lib/python3.10/site-packages/certifi/py.typed deleted file mode 100644 index e69de29..0000000 diff --git a/env/lib/python3.10/site-packages/cffi-1.17.1.dist-info/INSTALLER b/env/lib/python3.10/site-packages/cffi-1.17.1.dist-info/INSTALLER deleted file mode 100644 index a1b589e..0000000 --- a/env/lib/python3.10/site-packages/cffi-1.17.1.dist-info/INSTALLER +++ /dev/null @@ -1 +0,0 @@ -pip diff --git a/env/lib/python3.10/site-packages/cffi-1.17.1.dist-info/LICENSE b/env/lib/python3.10/site-packages/cffi-1.17.1.dist-info/LICENSE deleted file mode 100644 index 29225ee..0000000 --- a/env/lib/python3.10/site-packages/cffi-1.17.1.dist-info/LICENSE +++ /dev/null @@ -1,26 +0,0 @@ - -Except when otherwise stated (look for LICENSE files in directories or -information at the beginning of each file) all software and -documentation is licensed as follows: - - The MIT License - - Permission is hereby granted, free of charge, to any person - obtaining a copy of this software and associated documentation - files (the "Software"), to deal in the Software without - restriction, including without limitation the rights to use, - copy, modify, merge, publish, distribute, sublicense, and/or - sell copies of the Software, and to permit persons to whom the - Software is furnished to do so, subject to the following conditions: - - The above copyright notice and this permission notice shall be included - in all copies or substantial portions of the Software. - - THE SOFTWARE IS PROVIDED "AS IS", WITHOUT WARRANTY OF ANY KIND, EXPRESS - OR IMPLIED, INCLUDING BUT NOT LIMITED TO THE WARRANTIES OF MERCHANTABILITY, - FITNESS FOR A PARTICULAR PURPOSE AND NONINFRINGEMENT. IN NO EVENT SHALL - THE AUTHORS OR COPYRIGHT HOLDERS BE LIABLE FOR ANY CLAIM, DAMAGES OR OTHER - LIABILITY, WHETHER IN AN ACTION OF CONTRACT, TORT OR OTHERWISE, ARISING - FROM, OUT OF OR IN CONNECTION WITH THE SOFTWARE OR THE USE OR OTHER - DEALINGS IN THE SOFTWARE. - diff --git a/env/lib/python3.10/site-packages/cffi-1.17.1.dist-info/METADATA b/env/lib/python3.10/site-packages/cffi-1.17.1.dist-info/METADATA deleted file mode 100644 index 60b0779..0000000 --- a/env/lib/python3.10/site-packages/cffi-1.17.1.dist-info/METADATA +++ /dev/null @@ -1,40 +0,0 @@ -Metadata-Version: 2.1 -Name: cffi -Version: 1.17.1 -Summary: Foreign Function Interface for Python calling C code. -Home-page: http://cffi.readthedocs.org -Author: Armin Rigo, Maciej Fijalkowski -Author-email: python-cffi@googlegroups.com -License: MIT -Project-URL: Documentation, http://cffi.readthedocs.org/ -Project-URL: Source Code, https://github.com/python-cffi/cffi -Project-URL: Issue Tracker, https://github.com/python-cffi/cffi/issues -Project-URL: Changelog, https://cffi.readthedocs.io/en/latest/whatsnew.html -Project-URL: Downloads, https://github.com/python-cffi/cffi/releases -Project-URL: Contact, https://groups.google.com/forum/#!forum/python-cffi -Classifier: Programming Language :: Python -Classifier: Programming Language :: Python :: 3 -Classifier: Programming Language :: Python :: 3.8 -Classifier: Programming Language :: Python :: 3.9 -Classifier: Programming Language :: Python :: 3.10 -Classifier: Programming Language :: Python :: 3.11 -Classifier: Programming Language :: Python :: 3.12 -Classifier: Programming Language :: Python :: 3.13 -Classifier: Programming Language :: Python :: Implementation :: CPython -Classifier: Programming Language :: Python :: Implementation :: PyPy -Classifier: License :: OSI Approved :: MIT License -Requires-Python: >=3.8 -License-File: LICENSE -Requires-Dist: pycparser - - -CFFI -==== - -Foreign Function Interface for Python calling C code. -Please see the `Documentation `_. - -Contact -------- - -`Mailing list `_ diff --git a/env/lib/python3.10/site-packages/cffi-1.17.1.dist-info/RECORD b/env/lib/python3.10/site-packages/cffi-1.17.1.dist-info/RECORD deleted file mode 100644 index 9e7aa9f..0000000 --- a/env/lib/python3.10/site-packages/cffi-1.17.1.dist-info/RECORD +++ /dev/null @@ -1,48 +0,0 @@ -_cffi_backend.cpython-310-x86_64-linux-gnu.so,sha256=pciUVwDoiYkGtuoos7gi5U2TSTeBHVoDkneECMzaObI,985520 -cffi-1.17.1.dist-info/INSTALLER,sha256=zuuue4knoyJ-UwPPXg8fezS7VCrXJQrAP7zeNuwvFQg,4 -cffi-1.17.1.dist-info/LICENSE,sha256=BLgPWwd7vtaICM_rreteNSPyqMmpZJXFh72W3x6sKjM,1294 -cffi-1.17.1.dist-info/METADATA,sha256=u6nuvP_qPJKu2zvIbi2zkGzVu7KjnnRIYUFyIrOY3j4,1531 -cffi-1.17.1.dist-info/RECORD,, -cffi-1.17.1.dist-info/WHEEL,sha256=AxiTY2sz_GcPOsKDeggQV_FGgAhpyJSKs70WYTq6kog,151 -cffi-1.17.1.dist-info/entry_points.txt,sha256=y6jTxnyeuLnL-XJcDv8uML3n6wyYiGRg8MTp_QGJ9Ho,75 -cffi-1.17.1.dist-info/top_level.txt,sha256=rE7WR3rZfNKxWI9-jn6hsHCAl7MDkB-FmuQbxWjFehQ,19 -cffi/__init__.py,sha256=H6t_ebva6EeHpUuItFLW1gbRp94eZRNJODLaWKdbx1I,513 -cffi/__pycache__/__init__.cpython-310.pyc,, -cffi/__pycache__/_imp_emulation.cpython-310.pyc,, -cffi/__pycache__/_shimmed_dist_utils.cpython-310.pyc,, -cffi/__pycache__/api.cpython-310.pyc,, -cffi/__pycache__/backend_ctypes.cpython-310.pyc,, -cffi/__pycache__/cffi_opcode.cpython-310.pyc,, -cffi/__pycache__/commontypes.cpython-310.pyc,, -cffi/__pycache__/cparser.cpython-310.pyc,, -cffi/__pycache__/error.cpython-310.pyc,, -cffi/__pycache__/ffiplatform.cpython-310.pyc,, -cffi/__pycache__/lock.cpython-310.pyc,, -cffi/__pycache__/model.cpython-310.pyc,, -cffi/__pycache__/pkgconfig.cpython-310.pyc,, -cffi/__pycache__/recompiler.cpython-310.pyc,, -cffi/__pycache__/setuptools_ext.cpython-310.pyc,, -cffi/__pycache__/vengine_cpy.cpython-310.pyc,, -cffi/__pycache__/vengine_gen.cpython-310.pyc,, -cffi/__pycache__/verifier.cpython-310.pyc,, -cffi/_cffi_errors.h,sha256=zQXt7uR_m8gUW-fI2hJg0KoSkJFwXv8RGUkEDZ177dQ,3908 -cffi/_cffi_include.h,sha256=Exhmgm9qzHWzWivjfTe0D7Xp4rPUkVxdNuwGhMTMzbw,15055 -cffi/_embedding.h,sha256=EDKw5QrLvQoe3uosXB3H1xPVTYxsn33eV3A43zsA_Fw,18787 -cffi/_imp_emulation.py,sha256=RxREG8zAbI2RPGBww90u_5fi8sWdahpdipOoPzkp7C0,2960 -cffi/_shimmed_dist_utils.py,sha256=Bjj2wm8yZbvFvWEx5AEfmqaqZyZFhYfoyLLQHkXZuao,2230 -cffi/api.py,sha256=alBv6hZQkjpmZplBphdaRn2lPO9-CORs_M7ixabvZWI,42169 -cffi/backend_ctypes.py,sha256=h5ZIzLc6BFVXnGyc9xPqZWUS7qGy7yFSDqXe68Sa8z4,42454 -cffi/cffi_opcode.py,sha256=JDV5l0R0_OadBX_uE7xPPTYtMdmpp8I9UYd6av7aiDU,5731 -cffi/commontypes.py,sha256=7N6zPtCFlvxXMWhHV08psUjdYIK2XgsN3yo5dgua_v4,2805 -cffi/cparser.py,sha256=0qI3mEzZSNVcCangoyXOoAcL-RhpQL08eG8798T024s,44789 -cffi/error.py,sha256=v6xTiS4U0kvDcy4h_BDRo5v39ZQuj-IMRYLv5ETddZs,877 -cffi/ffiplatform.py,sha256=avxFjdikYGJoEtmJO7ewVmwG_VEVl6EZ_WaNhZYCqv4,3584 -cffi/lock.py,sha256=l9TTdwMIMpi6jDkJGnQgE9cvTIR7CAntIJr8EGHt3pY,747 -cffi/model.py,sha256=W30UFQZE73jL5Mx5N81YT77us2W2iJjTm0XYfnwz1cg,21797 -cffi/parse_c_type.h,sha256=OdwQfwM9ktq6vlCB43exFQmxDBtj2MBNdK8LYl15tjw,5976 -cffi/pkgconfig.py,sha256=LP1w7vmWvmKwyqLaU1Z243FOWGNQMrgMUZrvgFuOlco,4374 -cffi/recompiler.py,sha256=sim4Tm7lamt2Jn8uzKN0wMYp6ODByk3g7of47-h9LD4,65367 -cffi/setuptools_ext.py,sha256=-ebj79lO2_AUH-kRcaja2pKY1Z_5tloGwsJgzK8P3Cc,8871 -cffi/vengine_cpy.py,sha256=8UagT6ZEOZf6Dju7_CfNulue8CnsHLEzJYhnqUhoF04,43752 -cffi/vengine_gen.py,sha256=DUlEIrDiVin1Pnhn1sfoamnS5NLqfJcOdhRoeSNeJRg,26939 -cffi/verifier.py,sha256=oX8jpaohg2Qm3aHcznidAdvrVm5N4sQYG0a3Eo5mIl4,11182 diff --git a/env/lib/python3.10/site-packages/cffi-1.17.1.dist-info/WHEEL b/env/lib/python3.10/site-packages/cffi-1.17.1.dist-info/WHEEL deleted file mode 100644 index 59abe6e..0000000 --- a/env/lib/python3.10/site-packages/cffi-1.17.1.dist-info/WHEEL +++ /dev/null @@ -1,6 +0,0 @@ -Wheel-Version: 1.0 -Generator: setuptools (74.1.1) -Root-Is-Purelib: false -Tag: cp310-cp310-manylinux_2_17_x86_64 -Tag: cp310-cp310-manylinux2014_x86_64 - diff --git a/env/lib/python3.10/site-packages/cffi-1.17.1.dist-info/entry_points.txt b/env/lib/python3.10/site-packages/cffi-1.17.1.dist-info/entry_points.txt deleted file mode 100644 index 4b0274f..0000000 --- a/env/lib/python3.10/site-packages/cffi-1.17.1.dist-info/entry_points.txt +++ /dev/null @@ -1,2 +0,0 @@ -[distutils.setup_keywords] -cffi_modules = cffi.setuptools_ext:cffi_modules diff --git a/env/lib/python3.10/site-packages/cffi-1.17.1.dist-info/top_level.txt b/env/lib/python3.10/site-packages/cffi-1.17.1.dist-info/top_level.txt deleted file mode 100644 index f645779..0000000 --- a/env/lib/python3.10/site-packages/cffi-1.17.1.dist-info/top_level.txt +++ /dev/null @@ -1,2 +0,0 @@ -_cffi_backend -cffi diff --git a/env/lib/python3.10/site-packages/cffi/__init__.py b/env/lib/python3.10/site-packages/cffi/__init__.py deleted file mode 100644 index 2e35a38..0000000 --- a/env/lib/python3.10/site-packages/cffi/__init__.py +++ /dev/null @@ -1,14 +0,0 @@ -__all__ = ['FFI', 'VerificationError', 'VerificationMissing', 'CDefError', - 'FFIError'] - -from .api import FFI -from .error import CDefError, FFIError, VerificationError, VerificationMissing -from .error import PkgConfigError - -__version__ = "1.17.1" -__version_info__ = (1, 17, 1) - -# The verifier module file names are based on the CRC32 of a string that -# contains the following version number. It may be older than __version__ -# if nothing is clearly incompatible. -__version_verifier_modules__ = "0.8.6" diff --git a/env/lib/python3.10/site-packages/cffi/__pycache__/__init__.cpython-310.pyc b/env/lib/python3.10/site-packages/cffi/__pycache__/__init__.cpython-310.pyc deleted file mode 100644 index 0803f73..0000000 Binary files a/env/lib/python3.10/site-packages/cffi/__pycache__/__init__.cpython-310.pyc and /dev/null differ diff --git a/env/lib/python3.10/site-packages/cffi/__pycache__/_imp_emulation.cpython-310.pyc b/env/lib/python3.10/site-packages/cffi/__pycache__/_imp_emulation.cpython-310.pyc deleted file mode 100644 index 511543c..0000000 Binary files a/env/lib/python3.10/site-packages/cffi/__pycache__/_imp_emulation.cpython-310.pyc and /dev/null differ diff --git a/env/lib/python3.10/site-packages/cffi/__pycache__/_shimmed_dist_utils.cpython-310.pyc b/env/lib/python3.10/site-packages/cffi/__pycache__/_shimmed_dist_utils.cpython-310.pyc deleted file mode 100644 index 20814b9..0000000 Binary files a/env/lib/python3.10/site-packages/cffi/__pycache__/_shimmed_dist_utils.cpython-310.pyc and /dev/null differ diff --git a/env/lib/python3.10/site-packages/cffi/__pycache__/api.cpython-310.pyc b/env/lib/python3.10/site-packages/cffi/__pycache__/api.cpython-310.pyc deleted file mode 100644 index fb38357..0000000 Binary files a/env/lib/python3.10/site-packages/cffi/__pycache__/api.cpython-310.pyc and /dev/null differ diff --git a/env/lib/python3.10/site-packages/cffi/__pycache__/backend_ctypes.cpython-310.pyc b/env/lib/python3.10/site-packages/cffi/__pycache__/backend_ctypes.cpython-310.pyc deleted file mode 100644 index bcdc30b..0000000 Binary files a/env/lib/python3.10/site-packages/cffi/__pycache__/backend_ctypes.cpython-310.pyc and /dev/null differ diff --git a/env/lib/python3.10/site-packages/cffi/__pycache__/cffi_opcode.cpython-310.pyc b/env/lib/python3.10/site-packages/cffi/__pycache__/cffi_opcode.cpython-310.pyc deleted file mode 100644 index 06deb96..0000000 Binary files a/env/lib/python3.10/site-packages/cffi/__pycache__/cffi_opcode.cpython-310.pyc and /dev/null differ diff --git a/env/lib/python3.10/site-packages/cffi/__pycache__/commontypes.cpython-310.pyc b/env/lib/python3.10/site-packages/cffi/__pycache__/commontypes.cpython-310.pyc deleted file mode 100644 index 2caac2e..0000000 Binary files a/env/lib/python3.10/site-packages/cffi/__pycache__/commontypes.cpython-310.pyc and /dev/null differ diff --git a/env/lib/python3.10/site-packages/cffi/__pycache__/cparser.cpython-310.pyc b/env/lib/python3.10/site-packages/cffi/__pycache__/cparser.cpython-310.pyc deleted file mode 100644 index 5f0d53e..0000000 Binary files a/env/lib/python3.10/site-packages/cffi/__pycache__/cparser.cpython-310.pyc and /dev/null differ diff --git a/env/lib/python3.10/site-packages/cffi/__pycache__/error.cpython-310.pyc b/env/lib/python3.10/site-packages/cffi/__pycache__/error.cpython-310.pyc deleted file mode 100644 index 76186a0..0000000 Binary files a/env/lib/python3.10/site-packages/cffi/__pycache__/error.cpython-310.pyc and /dev/null differ diff --git a/env/lib/python3.10/site-packages/cffi/__pycache__/ffiplatform.cpython-310.pyc b/env/lib/python3.10/site-packages/cffi/__pycache__/ffiplatform.cpython-310.pyc deleted file mode 100644 index c1accab..0000000 Binary files a/env/lib/python3.10/site-packages/cffi/__pycache__/ffiplatform.cpython-310.pyc and /dev/null differ diff --git a/env/lib/python3.10/site-packages/cffi/__pycache__/lock.cpython-310.pyc b/env/lib/python3.10/site-packages/cffi/__pycache__/lock.cpython-310.pyc deleted file mode 100644 index 81f12df..0000000 Binary files a/env/lib/python3.10/site-packages/cffi/__pycache__/lock.cpython-310.pyc and /dev/null differ diff --git a/env/lib/python3.10/site-packages/cffi/__pycache__/model.cpython-310.pyc b/env/lib/python3.10/site-packages/cffi/__pycache__/model.cpython-310.pyc deleted file mode 100644 index 04370d7..0000000 Binary files a/env/lib/python3.10/site-packages/cffi/__pycache__/model.cpython-310.pyc and /dev/null differ diff --git a/env/lib/python3.10/site-packages/cffi/__pycache__/pkgconfig.cpython-310.pyc b/env/lib/python3.10/site-packages/cffi/__pycache__/pkgconfig.cpython-310.pyc deleted file mode 100644 index 94ef634..0000000 Binary files a/env/lib/python3.10/site-packages/cffi/__pycache__/pkgconfig.cpython-310.pyc and /dev/null differ diff --git a/env/lib/python3.10/site-packages/cffi/__pycache__/recompiler.cpython-310.pyc b/env/lib/python3.10/site-packages/cffi/__pycache__/recompiler.cpython-310.pyc deleted file mode 100644 index 47f30a8..0000000 Binary files a/env/lib/python3.10/site-packages/cffi/__pycache__/recompiler.cpython-310.pyc and /dev/null differ diff --git a/env/lib/python3.10/site-packages/cffi/__pycache__/setuptools_ext.cpython-310.pyc b/env/lib/python3.10/site-packages/cffi/__pycache__/setuptools_ext.cpython-310.pyc deleted file mode 100644 index 2905a81..0000000 Binary files a/env/lib/python3.10/site-packages/cffi/__pycache__/setuptools_ext.cpython-310.pyc and /dev/null differ diff --git a/env/lib/python3.10/site-packages/cffi/__pycache__/vengine_cpy.cpython-310.pyc b/env/lib/python3.10/site-packages/cffi/__pycache__/vengine_cpy.cpython-310.pyc deleted file mode 100644 index dd0172e..0000000 Binary files a/env/lib/python3.10/site-packages/cffi/__pycache__/vengine_cpy.cpython-310.pyc and /dev/null differ diff --git a/env/lib/python3.10/site-packages/cffi/__pycache__/vengine_gen.cpython-310.pyc b/env/lib/python3.10/site-packages/cffi/__pycache__/vengine_gen.cpython-310.pyc deleted file mode 100644 index 4a394df..0000000 Binary files a/env/lib/python3.10/site-packages/cffi/__pycache__/vengine_gen.cpython-310.pyc and /dev/null differ diff --git a/env/lib/python3.10/site-packages/cffi/__pycache__/verifier.cpython-310.pyc b/env/lib/python3.10/site-packages/cffi/__pycache__/verifier.cpython-310.pyc deleted file mode 100644 index 4b9c77a..0000000 Binary files a/env/lib/python3.10/site-packages/cffi/__pycache__/verifier.cpython-310.pyc and /dev/null differ diff --git a/env/lib/python3.10/site-packages/cffi/_cffi_errors.h b/env/lib/python3.10/site-packages/cffi/_cffi_errors.h deleted file mode 100644 index 158e059..0000000 --- a/env/lib/python3.10/site-packages/cffi/_cffi_errors.h +++ /dev/null @@ -1,149 +0,0 @@ -#ifndef CFFI_MESSAGEBOX -# ifdef _MSC_VER -# define CFFI_MESSAGEBOX 1 -# else -# define CFFI_MESSAGEBOX 0 -# endif -#endif - - -#if CFFI_MESSAGEBOX -/* Windows only: logic to take the Python-CFFI embedding logic - initialization errors and display them in a background thread - with MessageBox. The idea is that if the whole program closes - as a result of this problem, then likely it is already a console - program and you can read the stderr output in the console too. - If it is not a console program, then it will likely show its own - dialog to complain, or generally not abruptly close, and for this - case the background thread should stay alive. -*/ -static void *volatile _cffi_bootstrap_text; - -static PyObject *_cffi_start_error_capture(void) -{ - PyObject *result = NULL; - PyObject *x, *m, *bi; - - if (InterlockedCompareExchangePointer(&_cffi_bootstrap_text, - (void *)1, NULL) != NULL) - return (PyObject *)1; - - m = PyImport_AddModule("_cffi_error_capture"); - if (m == NULL) - goto error; - - result = PyModule_GetDict(m); - if (result == NULL) - goto error; - -#if PY_MAJOR_VERSION >= 3 - bi = PyImport_ImportModule("builtins"); -#else - bi = PyImport_ImportModule("__builtin__"); -#endif - if (bi == NULL) - goto error; - PyDict_SetItemString(result, "__builtins__", bi); - Py_DECREF(bi); - - x = PyRun_String( - "import sys\n" - "class FileLike:\n" - " def write(self, x):\n" - " try:\n" - " of.write(x)\n" - " except: pass\n" - " self.buf += x\n" - " def flush(self):\n" - " pass\n" - "fl = FileLike()\n" - "fl.buf = ''\n" - "of = sys.stderr\n" - "sys.stderr = fl\n" - "def done():\n" - " sys.stderr = of\n" - " return fl.buf\n", /* make sure the returned value stays alive */ - Py_file_input, - result, result); - Py_XDECREF(x); - - error: - if (PyErr_Occurred()) - { - PyErr_WriteUnraisable(Py_None); - PyErr_Clear(); - } - return result; -} - -#pragma comment(lib, "user32.lib") - -static DWORD WINAPI _cffi_bootstrap_dialog(LPVOID ignored) -{ - Sleep(666); /* may be interrupted if the whole process is closing */ -#if PY_MAJOR_VERSION >= 3 - MessageBoxW(NULL, (wchar_t *)_cffi_bootstrap_text, - L"Python-CFFI error", - MB_OK | MB_ICONERROR); -#else - MessageBoxA(NULL, (char *)_cffi_bootstrap_text, - "Python-CFFI error", - MB_OK | MB_ICONERROR); -#endif - _cffi_bootstrap_text = NULL; - return 0; -} - -static void _cffi_stop_error_capture(PyObject *ecap) -{ - PyObject *s; - void *text; - - if (ecap == (PyObject *)1) - return; - - if (ecap == NULL) - goto error; - - s = PyRun_String("done()", Py_eval_input, ecap, ecap); - if (s == NULL) - goto error; - - /* Show a dialog box, but in a background thread, and - never show multiple dialog boxes at once. */ -#if PY_MAJOR_VERSION >= 3 - text = PyUnicode_AsWideCharString(s, NULL); -#else - text = PyString_AsString(s); -#endif - - _cffi_bootstrap_text = text; - - if (text != NULL) - { - HANDLE h; - h = CreateThread(NULL, 0, _cffi_bootstrap_dialog, - NULL, 0, NULL); - if (h != NULL) - CloseHandle(h); - } - /* decref the string, but it should stay alive as 'fl.buf' - in the small module above. It will really be freed only if - we later get another similar error. So it's a leak of at - most one copy of the small module. That's fine for this - situation which is usually a "fatal error" anyway. */ - Py_DECREF(s); - PyErr_Clear(); - return; - - error: - _cffi_bootstrap_text = NULL; - PyErr_Clear(); -} - -#else - -static PyObject *_cffi_start_error_capture(void) { return NULL; } -static void _cffi_stop_error_capture(PyObject *ecap) { } - -#endif diff --git a/env/lib/python3.10/site-packages/cffi/_cffi_include.h b/env/lib/python3.10/site-packages/cffi/_cffi_include.h deleted file mode 100644 index 908a1d7..0000000 --- a/env/lib/python3.10/site-packages/cffi/_cffi_include.h +++ /dev/null @@ -1,389 +0,0 @@ -#define _CFFI_ - -/* We try to define Py_LIMITED_API before including Python.h. - - Mess: we can only define it if Py_DEBUG, Py_TRACE_REFS and - Py_REF_DEBUG are not defined. This is a best-effort approximation: - we can learn about Py_DEBUG from pyconfig.h, but it is unclear if - the same works for the other two macros. Py_DEBUG implies them, - but not the other way around. - - The implementation is messy (issue #350): on Windows, with _MSC_VER, - we have to define Py_LIMITED_API even before including pyconfig.h. - In that case, we guess what pyconfig.h will do to the macros above, - and check our guess after the #include. - - Note that on Windows, with CPython 3.x, you need >= 3.5 and virtualenv - version >= 16.0.0. With older versions of either, you don't get a - copy of PYTHON3.DLL in the virtualenv. We can't check the version of - CPython *before* we even include pyconfig.h. ffi.set_source() puts - a ``#define _CFFI_NO_LIMITED_API'' at the start of this file if it is - running on Windows < 3.5, as an attempt at fixing it, but that's - arguably wrong because it may not be the target version of Python. - Still better than nothing I guess. As another workaround, you can - remove the definition of Py_LIMITED_API here. - - See also 'py_limited_api' in cffi/setuptools_ext.py. -*/ -#if !defined(_CFFI_USE_EMBEDDING) && !defined(Py_LIMITED_API) -# ifdef _MSC_VER -# if !defined(_DEBUG) && !defined(Py_DEBUG) && !defined(Py_TRACE_REFS) && !defined(Py_REF_DEBUG) && !defined(_CFFI_NO_LIMITED_API) -# define Py_LIMITED_API -# endif -# include - /* sanity-check: Py_LIMITED_API will cause crashes if any of these - are also defined. Normally, the Python file PC/pyconfig.h does not - cause any of these to be defined, with the exception that _DEBUG - causes Py_DEBUG. Double-check that. */ -# ifdef Py_LIMITED_API -# if defined(Py_DEBUG) -# error "pyconfig.h unexpectedly defines Py_DEBUG, but Py_LIMITED_API is set" -# endif -# if defined(Py_TRACE_REFS) -# error "pyconfig.h unexpectedly defines Py_TRACE_REFS, but Py_LIMITED_API is set" -# endif -# if defined(Py_REF_DEBUG) -# error "pyconfig.h unexpectedly defines Py_REF_DEBUG, but Py_LIMITED_API is set" -# endif -# endif -# else -# include -# if !defined(Py_DEBUG) && !defined(Py_TRACE_REFS) && !defined(Py_REF_DEBUG) && !defined(_CFFI_NO_LIMITED_API) -# define Py_LIMITED_API -# endif -# endif -#endif - -#include -#ifdef __cplusplus -extern "C" { -#endif -#include -#include "parse_c_type.h" - -/* this block of #ifs should be kept exactly identical between - c/_cffi_backend.c, cffi/vengine_cpy.py, cffi/vengine_gen.py - and cffi/_cffi_include.h */ -#if defined(_MSC_VER) -# include /* for alloca() */ -# if _MSC_VER < 1600 /* MSVC < 2010 */ - typedef __int8 int8_t; - typedef __int16 int16_t; - typedef __int32 int32_t; - typedef __int64 int64_t; - typedef unsigned __int8 uint8_t; - typedef unsigned __int16 uint16_t; - typedef unsigned __int32 uint32_t; - typedef unsigned __int64 uint64_t; - typedef __int8 int_least8_t; - typedef __int16 int_least16_t; - typedef __int32 int_least32_t; - typedef __int64 int_least64_t; - typedef unsigned __int8 uint_least8_t; - typedef unsigned __int16 uint_least16_t; - typedef unsigned __int32 uint_least32_t; - typedef unsigned __int64 uint_least64_t; - typedef __int8 int_fast8_t; - typedef __int16 int_fast16_t; - typedef __int32 int_fast32_t; - typedef __int64 int_fast64_t; - typedef unsigned __int8 uint_fast8_t; - typedef unsigned __int16 uint_fast16_t; - typedef unsigned __int32 uint_fast32_t; - typedef unsigned __int64 uint_fast64_t; - typedef __int64 intmax_t; - typedef unsigned __int64 uintmax_t; -# else -# include -# endif -# if _MSC_VER < 1800 /* MSVC < 2013 */ -# ifndef __cplusplus - typedef unsigned char _Bool; -# endif -# endif -# define _cffi_float_complex_t _Fcomplex /* include for it */ -# define _cffi_double_complex_t _Dcomplex /* include for it */ -#else -# include -# if (defined (__SVR4) && defined (__sun)) || defined(_AIX) || defined(__hpux) -# include -# endif -# define _cffi_float_complex_t float _Complex -# define _cffi_double_complex_t double _Complex -#endif - -#ifdef __GNUC__ -# define _CFFI_UNUSED_FN __attribute__((unused)) -#else -# define _CFFI_UNUSED_FN /* nothing */ -#endif - -#ifdef __cplusplus -# ifndef _Bool - typedef bool _Bool; /* semi-hackish: C++ has no _Bool; bool is builtin */ -# endif -#endif - -/********** CPython-specific section **********/ -#ifndef PYPY_VERSION - - -#if PY_MAJOR_VERSION >= 3 -# define PyInt_FromLong PyLong_FromLong -#endif - -#define _cffi_from_c_double PyFloat_FromDouble -#define _cffi_from_c_float PyFloat_FromDouble -#define _cffi_from_c_long PyInt_FromLong -#define _cffi_from_c_ulong PyLong_FromUnsignedLong -#define _cffi_from_c_longlong PyLong_FromLongLong -#define _cffi_from_c_ulonglong PyLong_FromUnsignedLongLong -#define _cffi_from_c__Bool PyBool_FromLong - -#define _cffi_to_c_double PyFloat_AsDouble -#define _cffi_to_c_float PyFloat_AsDouble - -#define _cffi_from_c_int(x, type) \ - (((type)-1) > 0 ? /* unsigned */ \ - (sizeof(type) < sizeof(long) ? \ - PyInt_FromLong((long)x) : \ - sizeof(type) == sizeof(long) ? \ - PyLong_FromUnsignedLong((unsigned long)x) : \ - PyLong_FromUnsignedLongLong((unsigned long long)x)) : \ - (sizeof(type) <= sizeof(long) ? \ - PyInt_FromLong((long)x) : \ - PyLong_FromLongLong((long long)x))) - -#define _cffi_to_c_int(o, type) \ - ((type)( \ - sizeof(type) == 1 ? (((type)-1) > 0 ? (type)_cffi_to_c_u8(o) \ - : (type)_cffi_to_c_i8(o)) : \ - sizeof(type) == 2 ? (((type)-1) > 0 ? (type)_cffi_to_c_u16(o) \ - : (type)_cffi_to_c_i16(o)) : \ - sizeof(type) == 4 ? (((type)-1) > 0 ? (type)_cffi_to_c_u32(o) \ - : (type)_cffi_to_c_i32(o)) : \ - sizeof(type) == 8 ? (((type)-1) > 0 ? (type)_cffi_to_c_u64(o) \ - : (type)_cffi_to_c_i64(o)) : \ - (Py_FatalError("unsupported size for type " #type), (type)0))) - -#define _cffi_to_c_i8 \ - ((int(*)(PyObject *))_cffi_exports[1]) -#define _cffi_to_c_u8 \ - ((int(*)(PyObject *))_cffi_exports[2]) -#define _cffi_to_c_i16 \ - ((int(*)(PyObject *))_cffi_exports[3]) -#define _cffi_to_c_u16 \ - ((int(*)(PyObject *))_cffi_exports[4]) -#define _cffi_to_c_i32 \ - ((int(*)(PyObject *))_cffi_exports[5]) -#define _cffi_to_c_u32 \ - ((unsigned int(*)(PyObject *))_cffi_exports[6]) -#define _cffi_to_c_i64 \ - ((long long(*)(PyObject *))_cffi_exports[7]) -#define _cffi_to_c_u64 \ - ((unsigned long long(*)(PyObject *))_cffi_exports[8]) -#define _cffi_to_c_char \ - ((int(*)(PyObject *))_cffi_exports[9]) -#define _cffi_from_c_pointer \ - ((PyObject *(*)(char *, struct _cffi_ctypedescr *))_cffi_exports[10]) -#define _cffi_to_c_pointer \ - ((char *(*)(PyObject *, struct _cffi_ctypedescr *))_cffi_exports[11]) -#define _cffi_get_struct_layout \ - not used any more -#define _cffi_restore_errno \ - ((void(*)(void))_cffi_exports[13]) -#define _cffi_save_errno \ - ((void(*)(void))_cffi_exports[14]) -#define _cffi_from_c_char \ - ((PyObject *(*)(char))_cffi_exports[15]) -#define _cffi_from_c_deref \ - ((PyObject *(*)(char *, struct _cffi_ctypedescr *))_cffi_exports[16]) -#define _cffi_to_c \ - ((int(*)(char *, struct _cffi_ctypedescr *, PyObject *))_cffi_exports[17]) -#define _cffi_from_c_struct \ - ((PyObject *(*)(char *, struct _cffi_ctypedescr *))_cffi_exports[18]) -#define _cffi_to_c_wchar_t \ - ((_cffi_wchar_t(*)(PyObject *))_cffi_exports[19]) -#define _cffi_from_c_wchar_t \ - ((PyObject *(*)(_cffi_wchar_t))_cffi_exports[20]) -#define _cffi_to_c_long_double \ - ((long double(*)(PyObject *))_cffi_exports[21]) -#define _cffi_to_c__Bool \ - ((_Bool(*)(PyObject *))_cffi_exports[22]) -#define _cffi_prepare_pointer_call_argument \ - ((Py_ssize_t(*)(struct _cffi_ctypedescr *, \ - PyObject *, char **))_cffi_exports[23]) -#define _cffi_convert_array_from_object \ - ((int(*)(char *, struct _cffi_ctypedescr *, PyObject *))_cffi_exports[24]) -#define _CFFI_CPIDX 25 -#define _cffi_call_python \ - ((void(*)(struct _cffi_externpy_s *, char *))_cffi_exports[_CFFI_CPIDX]) -#define _cffi_to_c_wchar3216_t \ - ((int(*)(PyObject *))_cffi_exports[26]) -#define _cffi_from_c_wchar3216_t \ - ((PyObject *(*)(int))_cffi_exports[27]) -#define _CFFI_NUM_EXPORTS 28 - -struct _cffi_ctypedescr; - -static void *_cffi_exports[_CFFI_NUM_EXPORTS]; - -#define _cffi_type(index) ( \ - assert((((uintptr_t)_cffi_types[index]) & 1) == 0), \ - (struct _cffi_ctypedescr *)_cffi_types[index]) - -static PyObject *_cffi_init(const char *module_name, Py_ssize_t version, - const struct _cffi_type_context_s *ctx) -{ - PyObject *module, *o_arg, *new_module; - void *raw[] = { - (void *)module_name, - (void *)version, - (void *)_cffi_exports, - (void *)ctx, - }; - - module = PyImport_ImportModule("_cffi_backend"); - if (module == NULL) - goto failure; - - o_arg = PyLong_FromVoidPtr((void *)raw); - if (o_arg == NULL) - goto failure; - - new_module = PyObject_CallMethod( - module, (char *)"_init_cffi_1_0_external_module", (char *)"O", o_arg); - - Py_DECREF(o_arg); - Py_DECREF(module); - return new_module; - - failure: - Py_XDECREF(module); - return NULL; -} - - -#ifdef HAVE_WCHAR_H -typedef wchar_t _cffi_wchar_t; -#else -typedef uint16_t _cffi_wchar_t; /* same random pick as _cffi_backend.c */ -#endif - -_CFFI_UNUSED_FN static uint16_t _cffi_to_c_char16_t(PyObject *o) -{ - if (sizeof(_cffi_wchar_t) == 2) - return (uint16_t)_cffi_to_c_wchar_t(o); - else - return (uint16_t)_cffi_to_c_wchar3216_t(o); -} - -_CFFI_UNUSED_FN static PyObject *_cffi_from_c_char16_t(uint16_t x) -{ - if (sizeof(_cffi_wchar_t) == 2) - return _cffi_from_c_wchar_t((_cffi_wchar_t)x); - else - return _cffi_from_c_wchar3216_t((int)x); -} - -_CFFI_UNUSED_FN static int _cffi_to_c_char32_t(PyObject *o) -{ - if (sizeof(_cffi_wchar_t) == 4) - return (int)_cffi_to_c_wchar_t(o); - else - return (int)_cffi_to_c_wchar3216_t(o); -} - -_CFFI_UNUSED_FN static PyObject *_cffi_from_c_char32_t(unsigned int x) -{ - if (sizeof(_cffi_wchar_t) == 4) - return _cffi_from_c_wchar_t((_cffi_wchar_t)x); - else - return _cffi_from_c_wchar3216_t((int)x); -} - -union _cffi_union_alignment_u { - unsigned char m_char; - unsigned short m_short; - unsigned int m_int; - unsigned long m_long; - unsigned long long m_longlong; - float m_float; - double m_double; - long double m_longdouble; -}; - -struct _cffi_freeme_s { - struct _cffi_freeme_s *next; - union _cffi_union_alignment_u alignment; -}; - -_CFFI_UNUSED_FN static int -_cffi_convert_array_argument(struct _cffi_ctypedescr *ctptr, PyObject *arg, - char **output_data, Py_ssize_t datasize, - struct _cffi_freeme_s **freeme) -{ - char *p; - if (datasize < 0) - return -1; - - p = *output_data; - if (p == NULL) { - struct _cffi_freeme_s *fp = (struct _cffi_freeme_s *)PyObject_Malloc( - offsetof(struct _cffi_freeme_s, alignment) + (size_t)datasize); - if (fp == NULL) - return -1; - fp->next = *freeme; - *freeme = fp; - p = *output_data = (char *)&fp->alignment; - } - memset((void *)p, 0, (size_t)datasize); - return _cffi_convert_array_from_object(p, ctptr, arg); -} - -_CFFI_UNUSED_FN static void -_cffi_free_array_arguments(struct _cffi_freeme_s *freeme) -{ - do { - void *p = (void *)freeme; - freeme = freeme->next; - PyObject_Free(p); - } while (freeme != NULL); -} - -/********** end CPython-specific section **********/ -#else -_CFFI_UNUSED_FN -static void (*_cffi_call_python_org)(struct _cffi_externpy_s *, char *); -# define _cffi_call_python _cffi_call_python_org -#endif - - -#define _cffi_array_len(array) (sizeof(array) / sizeof((array)[0])) - -#define _cffi_prim_int(size, sign) \ - ((size) == 1 ? ((sign) ? _CFFI_PRIM_INT8 : _CFFI_PRIM_UINT8) : \ - (size) == 2 ? ((sign) ? _CFFI_PRIM_INT16 : _CFFI_PRIM_UINT16) : \ - (size) == 4 ? ((sign) ? _CFFI_PRIM_INT32 : _CFFI_PRIM_UINT32) : \ - (size) == 8 ? ((sign) ? _CFFI_PRIM_INT64 : _CFFI_PRIM_UINT64) : \ - _CFFI__UNKNOWN_PRIM) - -#define _cffi_prim_float(size) \ - ((size) == sizeof(float) ? _CFFI_PRIM_FLOAT : \ - (size) == sizeof(double) ? _CFFI_PRIM_DOUBLE : \ - (size) == sizeof(long double) ? _CFFI__UNKNOWN_LONG_DOUBLE : \ - _CFFI__UNKNOWN_FLOAT_PRIM) - -#define _cffi_check_int(got, got_nonpos, expected) \ - ((got_nonpos) == (expected <= 0) && \ - (got) == (unsigned long long)expected) - -#ifdef MS_WIN32 -# define _cffi_stdcall __stdcall -#else -# define _cffi_stdcall /* nothing */ -#endif - -#ifdef __cplusplus -} -#endif diff --git a/env/lib/python3.10/site-packages/cffi/_embedding.h b/env/lib/python3.10/site-packages/cffi/_embedding.h deleted file mode 100644 index 94d8b30..0000000 --- a/env/lib/python3.10/site-packages/cffi/_embedding.h +++ /dev/null @@ -1,550 +0,0 @@ - -/***** Support code for embedding *****/ - -#ifdef __cplusplus -extern "C" { -#endif - - -#if defined(_WIN32) -# define CFFI_DLLEXPORT __declspec(dllexport) -#elif defined(__GNUC__) -# define CFFI_DLLEXPORT __attribute__((visibility("default"))) -#else -# define CFFI_DLLEXPORT /* nothing */ -#endif - - -/* There are two global variables of type _cffi_call_python_fnptr: - - * _cffi_call_python, which we declare just below, is the one called - by ``extern "Python"`` implementations. - - * _cffi_call_python_org, which on CPython is actually part of the - _cffi_exports[] array, is the function pointer copied from - _cffi_backend. If _cffi_start_python() fails, then this is set - to NULL; otherwise, it should never be NULL. - - After initialization is complete, both are equal. However, the - first one remains equal to &_cffi_start_and_call_python until the - very end of initialization, when we are (or should be) sure that - concurrent threads also see a completely initialized world, and - only then is it changed. -*/ -#undef _cffi_call_python -typedef void (*_cffi_call_python_fnptr)(struct _cffi_externpy_s *, char *); -static void _cffi_start_and_call_python(struct _cffi_externpy_s *, char *); -static _cffi_call_python_fnptr _cffi_call_python = &_cffi_start_and_call_python; - - -#ifndef _MSC_VER - /* --- Assuming a GCC not infinitely old --- */ -# define cffi_compare_and_swap(l,o,n) __sync_bool_compare_and_swap(l,o,n) -# define cffi_write_barrier() __sync_synchronize() -# if !defined(__amd64__) && !defined(__x86_64__) && \ - !defined(__i386__) && !defined(__i386) -# define cffi_read_barrier() __sync_synchronize() -# else -# define cffi_read_barrier() (void)0 -# endif -#else - /* --- Windows threads version --- */ -# include -# define cffi_compare_and_swap(l,o,n) \ - (InterlockedCompareExchangePointer(l,n,o) == (o)) -# define cffi_write_barrier() InterlockedCompareExchange(&_cffi_dummy,0,0) -# define cffi_read_barrier() (void)0 -static volatile LONG _cffi_dummy; -#endif - -#ifdef WITH_THREAD -# ifndef _MSC_VER -# include - static pthread_mutex_t _cffi_embed_startup_lock; -# else - static CRITICAL_SECTION _cffi_embed_startup_lock; -# endif - static char _cffi_embed_startup_lock_ready = 0; -#endif - -static void _cffi_acquire_reentrant_mutex(void) -{ - static void *volatile lock = NULL; - - while (!cffi_compare_and_swap(&lock, NULL, (void *)1)) { - /* should ideally do a spin loop instruction here, but - hard to do it portably and doesn't really matter I - think: pthread_mutex_init() should be very fast, and - this is only run at start-up anyway. */ - } - -#ifdef WITH_THREAD - if (!_cffi_embed_startup_lock_ready) { -# ifndef _MSC_VER - pthread_mutexattr_t attr; - pthread_mutexattr_init(&attr); - pthread_mutexattr_settype(&attr, PTHREAD_MUTEX_RECURSIVE); - pthread_mutex_init(&_cffi_embed_startup_lock, &attr); -# else - InitializeCriticalSection(&_cffi_embed_startup_lock); -# endif - _cffi_embed_startup_lock_ready = 1; - } -#endif - - while (!cffi_compare_and_swap(&lock, (void *)1, NULL)) - ; - -#ifndef _MSC_VER - pthread_mutex_lock(&_cffi_embed_startup_lock); -#else - EnterCriticalSection(&_cffi_embed_startup_lock); -#endif -} - -static void _cffi_release_reentrant_mutex(void) -{ -#ifndef _MSC_VER - pthread_mutex_unlock(&_cffi_embed_startup_lock); -#else - LeaveCriticalSection(&_cffi_embed_startup_lock); -#endif -} - - -/********** CPython-specific section **********/ -#ifndef PYPY_VERSION - -#include "_cffi_errors.h" - - -#define _cffi_call_python_org _cffi_exports[_CFFI_CPIDX] - -PyMODINIT_FUNC _CFFI_PYTHON_STARTUP_FUNC(void); /* forward */ - -static void _cffi_py_initialize(void) -{ - /* XXX use initsigs=0, which "skips initialization registration of - signal handlers, which might be useful when Python is - embedded" according to the Python docs. But review and think - if it should be a user-controllable setting. - - XXX we should also give a way to write errors to a buffer - instead of to stderr. - - XXX if importing 'site' fails, CPython (any version) calls - exit(). Should we try to work around this behavior here? - */ - Py_InitializeEx(0); -} - -static int _cffi_initialize_python(void) -{ - /* This initializes Python, imports _cffi_backend, and then the - present .dll/.so is set up as a CPython C extension module. - */ - int result; - PyGILState_STATE state; - PyObject *pycode=NULL, *global_dict=NULL, *x; - PyObject *builtins; - - state = PyGILState_Ensure(); - - /* Call the initxxx() function from the present module. It will - create and initialize us as a CPython extension module, instead - of letting the startup Python code do it---it might reimport - the same .dll/.so and get maybe confused on some platforms. - It might also have troubles locating the .dll/.so again for all - I know. - */ - (void)_CFFI_PYTHON_STARTUP_FUNC(); - if (PyErr_Occurred()) - goto error; - - /* Now run the Python code provided to ffi.embedding_init_code(). - */ - pycode = Py_CompileString(_CFFI_PYTHON_STARTUP_CODE, - "", - Py_file_input); - if (pycode == NULL) - goto error; - global_dict = PyDict_New(); - if (global_dict == NULL) - goto error; - builtins = PyEval_GetBuiltins(); - if (builtins == NULL) - goto error; - if (PyDict_SetItemString(global_dict, "__builtins__", builtins) < 0) - goto error; - x = PyEval_EvalCode( -#if PY_MAJOR_VERSION < 3 - (PyCodeObject *) -#endif - pycode, global_dict, global_dict); - if (x == NULL) - goto error; - Py_DECREF(x); - - /* Done! Now if we've been called from - _cffi_start_and_call_python() in an ``extern "Python"``, we can - only hope that the Python code did correctly set up the - corresponding @ffi.def_extern() function. Otherwise, the - general logic of ``extern "Python"`` functions (inside the - _cffi_backend module) will find that the reference is still - missing and print an error. - */ - result = 0; - done: - Py_XDECREF(pycode); - Py_XDECREF(global_dict); - PyGILState_Release(state); - return result; - - error:; - { - /* Print as much information as potentially useful. - Debugging load-time failures with embedding is not fun - */ - PyObject *ecap; - PyObject *exception, *v, *tb, *f, *modules, *mod; - PyErr_Fetch(&exception, &v, &tb); - ecap = _cffi_start_error_capture(); - f = PySys_GetObject((char *)"stderr"); - if (f != NULL && f != Py_None) { - PyFile_WriteString( - "Failed to initialize the Python-CFFI embedding logic:\n\n", f); - } - - if (exception != NULL) { - PyErr_NormalizeException(&exception, &v, &tb); - PyErr_Display(exception, v, tb); - } - Py_XDECREF(exception); - Py_XDECREF(v); - Py_XDECREF(tb); - - if (f != NULL && f != Py_None) { - PyFile_WriteString("\nFrom: " _CFFI_MODULE_NAME - "\ncompiled with cffi version: 1.17.1" - "\n_cffi_backend module: ", f); - modules = PyImport_GetModuleDict(); - mod = PyDict_GetItemString(modules, "_cffi_backend"); - if (mod == NULL) { - PyFile_WriteString("not loaded", f); - } - else { - v = PyObject_GetAttrString(mod, "__file__"); - PyFile_WriteObject(v, f, 0); - Py_XDECREF(v); - } - PyFile_WriteString("\nsys.path: ", f); - PyFile_WriteObject(PySys_GetObject((char *)"path"), f, 0); - PyFile_WriteString("\n\n", f); - } - _cffi_stop_error_capture(ecap); - } - result = -1; - goto done; -} - -#if PY_VERSION_HEX < 0x03080000 -PyAPI_DATA(char *) _PyParser_TokenNames[]; /* from CPython */ -#endif - -static int _cffi_carefully_make_gil(void) -{ - /* This does the basic initialization of Python. It can be called - completely concurrently from unrelated threads. It assumes - that we don't hold the GIL before (if it exists), and we don't - hold it afterwards. - - (What it really does used to be completely different in Python 2 - and Python 3, with the Python 2 solution avoiding the spin-lock - around the Py_InitializeEx() call. However, after recent changes - to CPython 2.7 (issue #358) it no longer works. So we use the - Python 3 solution everywhere.) - - This initializes Python by calling Py_InitializeEx(). - Important: this must not be called concurrently at all. - So we use a global variable as a simple spin lock. This global - variable must be from 'libpythonX.Y.so', not from this - cffi-based extension module, because it must be shared from - different cffi-based extension modules. - - In Python < 3.8, we choose - _PyParser_TokenNames[0] as a completely arbitrary pointer value - that is never written to. The default is to point to the - string "ENDMARKER". We change it temporarily to point to the - next character in that string. (Yes, I know it's REALLY - obscure.) - - In Python >= 3.8, this string array is no longer writable, so - instead we pick PyCapsuleType.tp_version_tag. We can't change - Python < 3.8 because someone might use a mixture of cffi - embedded modules, some of which were compiled before this file - changed. - - In Python >= 3.12, this stopped working because that particular - tp_version_tag gets modified during interpreter startup. It's - arguably a bad idea before 3.12 too, but again we can't change - that because someone might use a mixture of cffi embedded - modules, and no-one reported a bug so far. In Python >= 3.12 - we go instead for PyCapsuleType.tp_as_buffer, which is supposed - to always be NULL. We write to it temporarily a pointer to - a struct full of NULLs, which is semantically the same. - */ - -#ifdef WITH_THREAD -# if PY_VERSION_HEX < 0x03080000 - char *volatile *lock = (char *volatile *)_PyParser_TokenNames; - char *old_value, *locked_value; - - while (1) { /* spin loop */ - old_value = *lock; - locked_value = old_value + 1; - if (old_value[0] == 'E') { - assert(old_value[1] == 'N'); - if (cffi_compare_and_swap(lock, old_value, locked_value)) - break; - } - else { - assert(old_value[0] == 'N'); - /* should ideally do a spin loop instruction here, but - hard to do it portably and doesn't really matter I - think: PyEval_InitThreads() should be very fast, and - this is only run at start-up anyway. */ - } - } -# else -# if PY_VERSION_HEX < 0x030C0000 - int volatile *lock = (int volatile *)&PyCapsule_Type.tp_version_tag; - int old_value, locked_value = -42; - assert(!(PyCapsule_Type.tp_flags & Py_TPFLAGS_HAVE_VERSION_TAG)); -# else - static struct ebp_s { PyBufferProcs buf; int mark; } empty_buffer_procs; - empty_buffer_procs.mark = -42; - PyBufferProcs *volatile *lock = (PyBufferProcs *volatile *) - &PyCapsule_Type.tp_as_buffer; - PyBufferProcs *old_value, *locked_value = &empty_buffer_procs.buf; -# endif - - while (1) { /* spin loop */ - old_value = *lock; - if (old_value == 0) { - if (cffi_compare_and_swap(lock, old_value, locked_value)) - break; - } - else { -# if PY_VERSION_HEX < 0x030C0000 - assert(old_value == locked_value); -# else - /* The pointer should point to a possibly different - empty_buffer_procs from another C extension module */ - assert(((struct ebp_s *)old_value)->mark == -42); -# endif - /* should ideally do a spin loop instruction here, but - hard to do it portably and doesn't really matter I - think: PyEval_InitThreads() should be very fast, and - this is only run at start-up anyway. */ - } - } -# endif -#endif - - /* call Py_InitializeEx() */ - if (!Py_IsInitialized()) { - _cffi_py_initialize(); -#if PY_VERSION_HEX < 0x03070000 - PyEval_InitThreads(); -#endif - PyEval_SaveThread(); /* release the GIL */ - /* the returned tstate must be the one that has been stored into the - autoTLSkey by _PyGILState_Init() called from Py_Initialize(). */ - } - else { -#if PY_VERSION_HEX < 0x03070000 - /* PyEval_InitThreads() is always a no-op from CPython 3.7 */ - PyGILState_STATE state = PyGILState_Ensure(); - PyEval_InitThreads(); - PyGILState_Release(state); -#endif - } - -#ifdef WITH_THREAD - /* release the lock */ - while (!cffi_compare_and_swap(lock, locked_value, old_value)) - ; -#endif - - return 0; -} - -/********** end CPython-specific section **********/ - - -#else - - -/********** PyPy-specific section **********/ - -PyMODINIT_FUNC _CFFI_PYTHON_STARTUP_FUNC(const void *[]); /* forward */ - -static struct _cffi_pypy_init_s { - const char *name; - void *func; /* function pointer */ - const char *code; -} _cffi_pypy_init = { - _CFFI_MODULE_NAME, - _CFFI_PYTHON_STARTUP_FUNC, - _CFFI_PYTHON_STARTUP_CODE, -}; - -extern int pypy_carefully_make_gil(const char *); -extern int pypy_init_embedded_cffi_module(int, struct _cffi_pypy_init_s *); - -static int _cffi_carefully_make_gil(void) -{ - return pypy_carefully_make_gil(_CFFI_MODULE_NAME); -} - -static int _cffi_initialize_python(void) -{ - return pypy_init_embedded_cffi_module(0xB011, &_cffi_pypy_init); -} - -/********** end PyPy-specific section **********/ - - -#endif - - -#ifdef __GNUC__ -__attribute__((noinline)) -#endif -static _cffi_call_python_fnptr _cffi_start_python(void) -{ - /* Delicate logic to initialize Python. This function can be - called multiple times concurrently, e.g. when the process calls - its first ``extern "Python"`` functions in multiple threads at - once. It can also be called recursively, in which case we must - ignore it. We also have to consider what occurs if several - different cffi-based extensions reach this code in parallel - threads---it is a different copy of the code, then, and we - can't have any shared global variable unless it comes from - 'libpythonX.Y.so'. - - Idea: - - * _cffi_carefully_make_gil(): "carefully" call - PyEval_InitThreads() (possibly with Py_InitializeEx() first). - - * then we use a (local) custom lock to make sure that a call to this - cffi-based extension will wait if another call to the *same* - extension is running the initialization in another thread. - It is reentrant, so that a recursive call will not block, but - only one from a different thread. - - * then we grab the GIL and (Python 2) we call Py_InitializeEx(). - At this point, concurrent calls to Py_InitializeEx() are not - possible: we have the GIL. - - * do the rest of the specific initialization, which may - temporarily release the GIL but not the custom lock. - Only release the custom lock when we are done. - */ - static char called = 0; - - if (_cffi_carefully_make_gil() != 0) - return NULL; - - _cffi_acquire_reentrant_mutex(); - - /* Here the GIL exists, but we don't have it. We're only protected - from concurrency by the reentrant mutex. */ - - /* This file only initializes the embedded module once, the first - time this is called, even if there are subinterpreters. */ - if (!called) { - called = 1; /* invoke _cffi_initialize_python() only once, - but don't set '_cffi_call_python' right now, - otherwise concurrent threads won't call - this function at all (we need them to wait) */ - if (_cffi_initialize_python() == 0) { - /* now initialization is finished. Switch to the fast-path. */ - - /* We would like nobody to see the new value of - '_cffi_call_python' without also seeing the rest of the - data initialized. However, this is not possible. But - the new value of '_cffi_call_python' is the function - 'cffi_call_python()' from _cffi_backend. So: */ - cffi_write_barrier(); - /* ^^^ we put a write barrier here, and a corresponding - read barrier at the start of cffi_call_python(). This - ensures that after that read barrier, we see everything - done here before the write barrier. - */ - - assert(_cffi_call_python_org != NULL); - _cffi_call_python = (_cffi_call_python_fnptr)_cffi_call_python_org; - } - else { - /* initialization failed. Reset this to NULL, even if it was - already set to some other value. Future calls to - _cffi_start_python() are still forced to occur, and will - always return NULL from now on. */ - _cffi_call_python_org = NULL; - } - } - - _cffi_release_reentrant_mutex(); - - return (_cffi_call_python_fnptr)_cffi_call_python_org; -} - -static -void _cffi_start_and_call_python(struct _cffi_externpy_s *externpy, char *args) -{ - _cffi_call_python_fnptr fnptr; - int current_err = errno; -#ifdef _MSC_VER - int current_lasterr = GetLastError(); -#endif - fnptr = _cffi_start_python(); - if (fnptr == NULL) { - fprintf(stderr, "function %s() called, but initialization code " - "failed. Returning 0.\n", externpy->name); - memset(args, 0, externpy->size_of_result); - } -#ifdef _MSC_VER - SetLastError(current_lasterr); -#endif - errno = current_err; - - if (fnptr != NULL) - fnptr(externpy, args); -} - - -/* The cffi_start_python() function makes sure Python is initialized - and our cffi module is set up. It can be called manually from the - user C code. The same effect is obtained automatically from any - dll-exported ``extern "Python"`` function. This function returns - -1 if initialization failed, 0 if all is OK. */ -_CFFI_UNUSED_FN -static int cffi_start_python(void) -{ - if (_cffi_call_python == &_cffi_start_and_call_python) { - if (_cffi_start_python() == NULL) - return -1; - } - cffi_read_barrier(); - return 0; -} - -#undef cffi_compare_and_swap -#undef cffi_write_barrier -#undef cffi_read_barrier - -#ifdef __cplusplus -} -#endif diff --git a/env/lib/python3.10/site-packages/cffi/_imp_emulation.py b/env/lib/python3.10/site-packages/cffi/_imp_emulation.py deleted file mode 100644 index 136abdd..0000000 --- a/env/lib/python3.10/site-packages/cffi/_imp_emulation.py +++ /dev/null @@ -1,83 +0,0 @@ - -try: - # this works on Python < 3.12 - from imp import * - -except ImportError: - # this is a limited emulation for Python >= 3.12. - # Note that this is used only for tests or for the old ffi.verify(). - # This is copied from the source code of Python 3.11. - - from _imp import (acquire_lock, release_lock, - is_builtin, is_frozen) - - from importlib._bootstrap import _load - - from importlib import machinery - import os - import sys - import tokenize - - SEARCH_ERROR = 0 - PY_SOURCE = 1 - PY_COMPILED = 2 - C_EXTENSION = 3 - PY_RESOURCE = 4 - PKG_DIRECTORY = 5 - C_BUILTIN = 6 - PY_FROZEN = 7 - PY_CODERESOURCE = 8 - IMP_HOOK = 9 - - def get_suffixes(): - extensions = [(s, 'rb', C_EXTENSION) - for s in machinery.EXTENSION_SUFFIXES] - source = [(s, 'r', PY_SOURCE) for s in machinery.SOURCE_SUFFIXES] - bytecode = [(s, 'rb', PY_COMPILED) for s in machinery.BYTECODE_SUFFIXES] - return extensions + source + bytecode - - def find_module(name, path=None): - if not isinstance(name, str): - raise TypeError("'name' must be a str, not {}".format(type(name))) - elif not isinstance(path, (type(None), list)): - # Backwards-compatibility - raise RuntimeError("'path' must be None or a list, " - "not {}".format(type(path))) - - if path is None: - if is_builtin(name): - return None, None, ('', '', C_BUILTIN) - elif is_frozen(name): - return None, None, ('', '', PY_FROZEN) - else: - path = sys.path - - for entry in path: - package_directory = os.path.join(entry, name) - for suffix in ['.py', machinery.BYTECODE_SUFFIXES[0]]: - package_file_name = '__init__' + suffix - file_path = os.path.join(package_directory, package_file_name) - if os.path.isfile(file_path): - return None, package_directory, ('', '', PKG_DIRECTORY) - for suffix, mode, type_ in get_suffixes(): - file_name = name + suffix - file_path = os.path.join(entry, file_name) - if os.path.isfile(file_path): - break - else: - continue - break # Break out of outer loop when breaking out of inner loop. - else: - raise ImportError(name, name=name) - - encoding = None - if 'b' not in mode: - with open(file_path, 'rb') as file: - encoding = tokenize.detect_encoding(file.readline)[0] - file = open(file_path, mode, encoding=encoding) - return file, file_path, (suffix, mode, type_) - - def load_dynamic(name, path, file=None): - loader = machinery.ExtensionFileLoader(name, path) - spec = machinery.ModuleSpec(name=name, loader=loader, origin=path) - return _load(spec) diff --git a/env/lib/python3.10/site-packages/cffi/_shimmed_dist_utils.py b/env/lib/python3.10/site-packages/cffi/_shimmed_dist_utils.py deleted file mode 100644 index c3d2312..0000000 --- a/env/lib/python3.10/site-packages/cffi/_shimmed_dist_utils.py +++ /dev/null @@ -1,45 +0,0 @@ -""" -Temporary shim module to indirect the bits of distutils we need from setuptools/distutils while providing useful -error messages beyond `No module named 'distutils' on Python >= 3.12, or when setuptools' vendored distutils is broken. - -This is a compromise to avoid a hard-dep on setuptools for Python >= 3.12, since many users don't need runtime compilation support from CFFI. -""" -import sys - -try: - # import setuptools first; this is the most robust way to ensure its embedded distutils is available - # (the .pth shim should usually work, but this is even more robust) - import setuptools -except Exception as ex: - if sys.version_info >= (3, 12): - # Python 3.12 has no built-in distutils to fall back on, so any import problem is fatal - raise Exception("This CFFI feature requires setuptools on Python >= 3.12. The setuptools module is missing or non-functional.") from ex - - # silently ignore on older Pythons (support fallback to stdlib distutils where available) -else: - del setuptools - -try: - # bring in just the bits of distutils we need, whether they really came from setuptools or stdlib-embedded distutils - from distutils import log, sysconfig - from distutils.ccompiler import CCompiler - from distutils.command.build_ext import build_ext - from distutils.core import Distribution, Extension - from distutils.dir_util import mkpath - from distutils.errors import DistutilsSetupError, CompileError, LinkError - from distutils.log import set_threshold, set_verbosity - - if sys.platform == 'win32': - try: - # FUTURE: msvc9compiler module was removed in setuptools 74; consider removing, as it's only used by an ancient patch in `recompiler` - from distutils.msvc9compiler import MSVCCompiler - except ImportError: - MSVCCompiler = None -except Exception as ex: - if sys.version_info >= (3, 12): - raise Exception("This CFFI feature requires setuptools on Python >= 3.12. Please install the setuptools package.") from ex - - # anything older, just let the underlying distutils import error fly - raise Exception("This CFFI feature requires distutils. Please install the distutils or setuptools package.") from ex - -del sys diff --git a/env/lib/python3.10/site-packages/cffi/api.py b/env/lib/python3.10/site-packages/cffi/api.py deleted file mode 100644 index 5a474f3..0000000 --- a/env/lib/python3.10/site-packages/cffi/api.py +++ /dev/null @@ -1,967 +0,0 @@ -import sys, types -from .lock import allocate_lock -from .error import CDefError -from . import model - -try: - callable -except NameError: - # Python 3.1 - from collections import Callable - callable = lambda x: isinstance(x, Callable) - -try: - basestring -except NameError: - # Python 3.x - basestring = str - -_unspecified = object() - - - -class FFI(object): - r''' - The main top-level class that you instantiate once, or once per module. - - Example usage: - - ffi = FFI() - ffi.cdef(""" - int printf(const char *, ...); - """) - - C = ffi.dlopen(None) # standard library - -or- - C = ffi.verify() # use a C compiler: verify the decl above is right - - C.printf("hello, %s!\n", ffi.new("char[]", "world")) - ''' - - def __init__(self, backend=None): - """Create an FFI instance. The 'backend' argument is used to - select a non-default backend, mostly for tests. - """ - if backend is None: - # You need PyPy (>= 2.0 beta), or a CPython (>= 2.6) with - # _cffi_backend.so compiled. - import _cffi_backend as backend - from . import __version__ - if backend.__version__ != __version__: - # bad version! Try to be as explicit as possible. - if hasattr(backend, '__file__'): - # CPython - raise Exception("Version mismatch: this is the 'cffi' package version %s, located in %r. When we import the top-level '_cffi_backend' extension module, we get version %s, located in %r. The two versions should be equal; check your installation." % ( - __version__, __file__, - backend.__version__, backend.__file__)) - else: - # PyPy - raise Exception("Version mismatch: this is the 'cffi' package version %s, located in %r. This interpreter comes with a built-in '_cffi_backend' module, which is version %s. The two versions should be equal; check your installation." % ( - __version__, __file__, backend.__version__)) - # (If you insist you can also try to pass the option - # 'backend=backend_ctypes.CTypesBackend()', but don't - # rely on it! It's probably not going to work well.) - - from . import cparser - self._backend = backend - self._lock = allocate_lock() - self._parser = cparser.Parser() - self._cached_btypes = {} - self._parsed_types = types.ModuleType('parsed_types').__dict__ - self._new_types = types.ModuleType('new_types').__dict__ - self._function_caches = [] - self._libraries = [] - self._cdefsources = [] - self._included_ffis = [] - self._windows_unicode = None - self._init_once_cache = {} - self._cdef_version = None - self._embedding = None - self._typecache = model.get_typecache(backend) - if hasattr(backend, 'set_ffi'): - backend.set_ffi(self) - for name in list(backend.__dict__): - if name.startswith('RTLD_'): - setattr(self, name, getattr(backend, name)) - # - with self._lock: - self.BVoidP = self._get_cached_btype(model.voidp_type) - self.BCharA = self._get_cached_btype(model.char_array_type) - if isinstance(backend, types.ModuleType): - # _cffi_backend: attach these constants to the class - if not hasattr(FFI, 'NULL'): - FFI.NULL = self.cast(self.BVoidP, 0) - FFI.CData, FFI.CType = backend._get_types() - else: - # ctypes backend: attach these constants to the instance - self.NULL = self.cast(self.BVoidP, 0) - self.CData, self.CType = backend._get_types() - self.buffer = backend.buffer - - def cdef(self, csource, override=False, packed=False, pack=None): - """Parse the given C source. This registers all declared functions, - types, and global variables. The functions and global variables can - then be accessed via either 'ffi.dlopen()' or 'ffi.verify()'. - The types can be used in 'ffi.new()' and other functions. - If 'packed' is specified as True, all structs declared inside this - cdef are packed, i.e. laid out without any field alignment at all. - Alternatively, 'pack' can be a small integer, and requests for - alignment greater than that are ignored (pack=1 is equivalent to - packed=True). - """ - self._cdef(csource, override=override, packed=packed, pack=pack) - - def embedding_api(self, csource, packed=False, pack=None): - self._cdef(csource, packed=packed, pack=pack, dllexport=True) - if self._embedding is None: - self._embedding = '' - - def _cdef(self, csource, override=False, **options): - if not isinstance(csource, str): # unicode, on Python 2 - if not isinstance(csource, basestring): - raise TypeError("cdef() argument must be a string") - csource = csource.encode('ascii') - with self._lock: - self._cdef_version = object() - self._parser.parse(csource, override=override, **options) - self._cdefsources.append(csource) - if override: - for cache in self._function_caches: - cache.clear() - finishlist = self._parser._recomplete - if finishlist: - self._parser._recomplete = [] - for tp in finishlist: - tp.finish_backend_type(self, finishlist) - - def dlopen(self, name, flags=0): - """Load and return a dynamic library identified by 'name'. - The standard C library can be loaded by passing None. - Note that functions and types declared by 'ffi.cdef()' are not - linked to a particular library, just like C headers; in the - library we only look for the actual (untyped) symbols. - """ - if not (isinstance(name, basestring) or - name is None or - isinstance(name, self.CData)): - raise TypeError("dlopen(name): name must be a file name, None, " - "or an already-opened 'void *' handle") - with self._lock: - lib, function_cache = _make_ffi_library(self, name, flags) - self._function_caches.append(function_cache) - self._libraries.append(lib) - return lib - - def dlclose(self, lib): - """Close a library obtained with ffi.dlopen(). After this call, - access to functions or variables from the library will fail - (possibly with a segmentation fault). - """ - type(lib).__cffi_close__(lib) - - def _typeof_locked(self, cdecl): - # call me with the lock! - key = cdecl - if key in self._parsed_types: - return self._parsed_types[key] - # - if not isinstance(cdecl, str): # unicode, on Python 2 - cdecl = cdecl.encode('ascii') - # - type = self._parser.parse_type(cdecl) - really_a_function_type = type.is_raw_function - if really_a_function_type: - type = type.as_function_pointer() - btype = self._get_cached_btype(type) - result = btype, really_a_function_type - self._parsed_types[key] = result - return result - - def _typeof(self, cdecl, consider_function_as_funcptr=False): - # string -> ctype object - try: - result = self._parsed_types[cdecl] - except KeyError: - with self._lock: - result = self._typeof_locked(cdecl) - # - btype, really_a_function_type = result - if really_a_function_type and not consider_function_as_funcptr: - raise CDefError("the type %r is a function type, not a " - "pointer-to-function type" % (cdecl,)) - return btype - - def typeof(self, cdecl): - """Parse the C type given as a string and return the - corresponding object. - It can also be used on 'cdata' instance to get its C type. - """ - if isinstance(cdecl, basestring): - return self._typeof(cdecl) - if isinstance(cdecl, self.CData): - return self._backend.typeof(cdecl) - if isinstance(cdecl, types.BuiltinFunctionType): - res = _builtin_function_type(cdecl) - if res is not None: - return res - if (isinstance(cdecl, types.FunctionType) - and hasattr(cdecl, '_cffi_base_type')): - with self._lock: - return self._get_cached_btype(cdecl._cffi_base_type) - raise TypeError(type(cdecl)) - - def sizeof(self, cdecl): - """Return the size in bytes of the argument. It can be a - string naming a C type, or a 'cdata' instance. - """ - if isinstance(cdecl, basestring): - BType = self._typeof(cdecl) - return self._backend.sizeof(BType) - else: - return self._backend.sizeof(cdecl) - - def alignof(self, cdecl): - """Return the natural alignment size in bytes of the C type - given as a string. - """ - if isinstance(cdecl, basestring): - cdecl = self._typeof(cdecl) - return self._backend.alignof(cdecl) - - def offsetof(self, cdecl, *fields_or_indexes): - """Return the offset of the named field inside the given - structure or array, which must be given as a C type name. - You can give several field names in case of nested structures. - You can also give numeric values which correspond to array - items, in case of an array type. - """ - if isinstance(cdecl, basestring): - cdecl = self._typeof(cdecl) - return self._typeoffsetof(cdecl, *fields_or_indexes)[1] - - def new(self, cdecl, init=None): - """Allocate an instance according to the specified C type and - return a pointer to it. The specified C type must be either a - pointer or an array: ``new('X *')`` allocates an X and returns - a pointer to it, whereas ``new('X[n]')`` allocates an array of - n X'es and returns an array referencing it (which works - mostly like a pointer, like in C). You can also use - ``new('X[]', n)`` to allocate an array of a non-constant - length n. - - The memory is initialized following the rules of declaring a - global variable in C: by default it is zero-initialized, but - an explicit initializer can be given which can be used to - fill all or part of the memory. - - When the returned object goes out of scope, the memory - is freed. In other words the returned object has - ownership of the value of type 'cdecl' that it points to. This - means that the raw data can be used as long as this object is - kept alive, but must not be used for a longer time. Be careful - about that when copying the pointer to the memory somewhere - else, e.g. into another structure. - """ - if isinstance(cdecl, basestring): - cdecl = self._typeof(cdecl) - return self._backend.newp(cdecl, init) - - def new_allocator(self, alloc=None, free=None, - should_clear_after_alloc=True): - """Return a new allocator, i.e. a function that behaves like ffi.new() - but uses the provided low-level 'alloc' and 'free' functions. - - 'alloc' is called with the size as argument. If it returns NULL, a - MemoryError is raised. 'free' is called with the result of 'alloc' - as argument. Both can be either Python function or directly C - functions. If 'free' is None, then no free function is called. - If both 'alloc' and 'free' are None, the default is used. - - If 'should_clear_after_alloc' is set to False, then the memory - returned by 'alloc' is assumed to be already cleared (or you are - fine with garbage); otherwise CFFI will clear it. - """ - compiled_ffi = self._backend.FFI() - allocator = compiled_ffi.new_allocator(alloc, free, - should_clear_after_alloc) - def allocate(cdecl, init=None): - if isinstance(cdecl, basestring): - cdecl = self._typeof(cdecl) - return allocator(cdecl, init) - return allocate - - def cast(self, cdecl, source): - """Similar to a C cast: returns an instance of the named C - type initialized with the given 'source'. The source is - casted between integers or pointers of any type. - """ - if isinstance(cdecl, basestring): - cdecl = self._typeof(cdecl) - return self._backend.cast(cdecl, source) - - def string(self, cdata, maxlen=-1): - """Return a Python string (or unicode string) from the 'cdata'. - If 'cdata' is a pointer or array of characters or bytes, returns - the null-terminated string. The returned string extends until - the first null character, or at most 'maxlen' characters. If - 'cdata' is an array then 'maxlen' defaults to its length. - - If 'cdata' is a pointer or array of wchar_t, returns a unicode - string following the same rules. - - If 'cdata' is a single character or byte or a wchar_t, returns - it as a string or unicode string. - - If 'cdata' is an enum, returns the value of the enumerator as a - string, or 'NUMBER' if the value is out of range. - """ - return self._backend.string(cdata, maxlen) - - def unpack(self, cdata, length): - """Unpack an array of C data of the given length, - returning a Python string/unicode/list. - - If 'cdata' is a pointer to 'char', returns a byte string. - It does not stop at the first null. This is equivalent to: - ffi.buffer(cdata, length)[:] - - If 'cdata' is a pointer to 'wchar_t', returns a unicode string. - 'length' is measured in wchar_t's; it is not the size in bytes. - - If 'cdata' is a pointer to anything else, returns a list of - 'length' items. This is a faster equivalent to: - [cdata[i] for i in range(length)] - """ - return self._backend.unpack(cdata, length) - - #def buffer(self, cdata, size=-1): - # """Return a read-write buffer object that references the raw C data - # pointed to by the given 'cdata'. The 'cdata' must be a pointer or - # an array. Can be passed to functions expecting a buffer, or directly - # manipulated with: - # - # buf[:] get a copy of it in a regular string, or - # buf[idx] as a single character - # buf[:] = ... - # buf[idx] = ... change the content - # """ - # note that 'buffer' is a type, set on this instance by __init__ - - def from_buffer(self, cdecl, python_buffer=_unspecified, - require_writable=False): - """Return a cdata of the given type pointing to the data of the - given Python object, which must support the buffer interface. - Note that this is not meant to be used on the built-in types - str or unicode (you can build 'char[]' arrays explicitly) - but only on objects containing large quantities of raw data - in some other format, like 'array.array' or numpy arrays. - - The first argument is optional and default to 'char[]'. - """ - if python_buffer is _unspecified: - cdecl, python_buffer = self.BCharA, cdecl - elif isinstance(cdecl, basestring): - cdecl = self._typeof(cdecl) - return self._backend.from_buffer(cdecl, python_buffer, - require_writable) - - def memmove(self, dest, src, n): - """ffi.memmove(dest, src, n) copies n bytes of memory from src to dest. - - Like the C function memmove(), the memory areas may overlap; - apart from that it behaves like the C function memcpy(). - - 'src' can be any cdata ptr or array, or any Python buffer object. - 'dest' can be any cdata ptr or array, or a writable Python buffer - object. The size to copy, 'n', is always measured in bytes. - - Unlike other methods, this one supports all Python buffer including - byte strings and bytearrays---but it still does not support - non-contiguous buffers. - """ - return self._backend.memmove(dest, src, n) - - def callback(self, cdecl, python_callable=None, error=None, onerror=None): - """Return a callback object or a decorator making such a - callback object. 'cdecl' must name a C function pointer type. - The callback invokes the specified 'python_callable' (which may - be provided either directly or via a decorator). Important: the - callback object must be manually kept alive for as long as the - callback may be invoked from the C level. - """ - def callback_decorator_wrap(python_callable): - if not callable(python_callable): - raise TypeError("the 'python_callable' argument " - "is not callable") - return self._backend.callback(cdecl, python_callable, - error, onerror) - if isinstance(cdecl, basestring): - cdecl = self._typeof(cdecl, consider_function_as_funcptr=True) - if python_callable is None: - return callback_decorator_wrap # decorator mode - else: - return callback_decorator_wrap(python_callable) # direct mode - - def getctype(self, cdecl, replace_with=''): - """Return a string giving the C type 'cdecl', which may be itself - a string or a object. If 'replace_with' is given, it gives - extra text to append (or insert for more complicated C types), like - a variable name, or '*' to get actually the C type 'pointer-to-cdecl'. - """ - if isinstance(cdecl, basestring): - cdecl = self._typeof(cdecl) - replace_with = replace_with.strip() - if (replace_with.startswith('*') - and '&[' in self._backend.getcname(cdecl, '&')): - replace_with = '(%s)' % replace_with - elif replace_with and not replace_with[0] in '[(': - replace_with = ' ' + replace_with - return self._backend.getcname(cdecl, replace_with) - - def gc(self, cdata, destructor, size=0): - """Return a new cdata object that points to the same - data. Later, when this new cdata object is garbage-collected, - 'destructor(old_cdata_object)' will be called. - - The optional 'size' gives an estimate of the size, used to - trigger the garbage collection more eagerly. So far only used - on PyPy. It tells the GC that the returned object keeps alive - roughly 'size' bytes of external memory. - """ - return self._backend.gcp(cdata, destructor, size) - - def _get_cached_btype(self, type): - assert self._lock.acquire(False) is False - # call me with the lock! - try: - BType = self._cached_btypes[type] - except KeyError: - finishlist = [] - BType = type.get_cached_btype(self, finishlist) - for type in finishlist: - type.finish_backend_type(self, finishlist) - return BType - - def verify(self, source='', tmpdir=None, **kwargs): - """Verify that the current ffi signatures compile on this - machine, and return a dynamic library object. The dynamic - library can be used to call functions and access global - variables declared in this 'ffi'. The library is compiled - by the C compiler: it gives you C-level API compatibility - (including calling macros). This is unlike 'ffi.dlopen()', - which requires binary compatibility in the signatures. - """ - from .verifier import Verifier, _caller_dir_pycache - # - # If set_unicode(True) was called, insert the UNICODE and - # _UNICODE macro declarations - if self._windows_unicode: - self._apply_windows_unicode(kwargs) - # - # Set the tmpdir here, and not in Verifier.__init__: it picks - # up the caller's directory, which we want to be the caller of - # ffi.verify(), as opposed to the caller of Veritier(). - tmpdir = tmpdir or _caller_dir_pycache() - # - # Make a Verifier() and use it to load the library. - self.verifier = Verifier(self, source, tmpdir, **kwargs) - lib = self.verifier.load_library() - # - # Save the loaded library for keep-alive purposes, even - # if the caller doesn't keep it alive itself (it should). - self._libraries.append(lib) - return lib - - def _get_errno(self): - return self._backend.get_errno() - def _set_errno(self, errno): - self._backend.set_errno(errno) - errno = property(_get_errno, _set_errno, None, - "the value of 'errno' from/to the C calls") - - def getwinerror(self, code=-1): - return self._backend.getwinerror(code) - - def _pointer_to(self, ctype): - with self._lock: - return model.pointer_cache(self, ctype) - - def addressof(self, cdata, *fields_or_indexes): - """Return the address of a . - If 'fields_or_indexes' are given, returns the address of that - field or array item in the structure or array, recursively in - case of nested structures. - """ - try: - ctype = self._backend.typeof(cdata) - except TypeError: - if '__addressof__' in type(cdata).__dict__: - return type(cdata).__addressof__(cdata, *fields_or_indexes) - raise - if fields_or_indexes: - ctype, offset = self._typeoffsetof(ctype, *fields_or_indexes) - else: - if ctype.kind == "pointer": - raise TypeError("addressof(pointer)") - offset = 0 - ctypeptr = self._pointer_to(ctype) - return self._backend.rawaddressof(ctypeptr, cdata, offset) - - def _typeoffsetof(self, ctype, field_or_index, *fields_or_indexes): - ctype, offset = self._backend.typeoffsetof(ctype, field_or_index) - for field1 in fields_or_indexes: - ctype, offset1 = self._backend.typeoffsetof(ctype, field1, 1) - offset += offset1 - return ctype, offset - - def include(self, ffi_to_include): - """Includes the typedefs, structs, unions and enums defined - in another FFI instance. Usage is similar to a #include in C, - where a part of the program might include types defined in - another part for its own usage. Note that the include() - method has no effect on functions, constants and global - variables, which must anyway be accessed directly from the - lib object returned by the original FFI instance. - """ - if not isinstance(ffi_to_include, FFI): - raise TypeError("ffi.include() expects an argument that is also of" - " type cffi.FFI, not %r" % ( - type(ffi_to_include).__name__,)) - if ffi_to_include is self: - raise ValueError("self.include(self)") - with ffi_to_include._lock: - with self._lock: - self._parser.include(ffi_to_include._parser) - self._cdefsources.append('[') - self._cdefsources.extend(ffi_to_include._cdefsources) - self._cdefsources.append(']') - self._included_ffis.append(ffi_to_include) - - def new_handle(self, x): - return self._backend.newp_handle(self.BVoidP, x) - - def from_handle(self, x): - return self._backend.from_handle(x) - - def release(self, x): - self._backend.release(x) - - def set_unicode(self, enabled_flag): - """Windows: if 'enabled_flag' is True, enable the UNICODE and - _UNICODE defines in C, and declare the types like TCHAR and LPTCSTR - to be (pointers to) wchar_t. If 'enabled_flag' is False, - declare these types to be (pointers to) plain 8-bit characters. - This is mostly for backward compatibility; you usually want True. - """ - if self._windows_unicode is not None: - raise ValueError("set_unicode() can only be called once") - enabled_flag = bool(enabled_flag) - if enabled_flag: - self.cdef("typedef wchar_t TBYTE;" - "typedef wchar_t TCHAR;" - "typedef const wchar_t *LPCTSTR;" - "typedef const wchar_t *PCTSTR;" - "typedef wchar_t *LPTSTR;" - "typedef wchar_t *PTSTR;" - "typedef TBYTE *PTBYTE;" - "typedef TCHAR *PTCHAR;") - else: - self.cdef("typedef char TBYTE;" - "typedef char TCHAR;" - "typedef const char *LPCTSTR;" - "typedef const char *PCTSTR;" - "typedef char *LPTSTR;" - "typedef char *PTSTR;" - "typedef TBYTE *PTBYTE;" - "typedef TCHAR *PTCHAR;") - self._windows_unicode = enabled_flag - - def _apply_windows_unicode(self, kwds): - defmacros = kwds.get('define_macros', ()) - if not isinstance(defmacros, (list, tuple)): - raise TypeError("'define_macros' must be a list or tuple") - defmacros = list(defmacros) + [('UNICODE', '1'), - ('_UNICODE', '1')] - kwds['define_macros'] = defmacros - - def _apply_embedding_fix(self, kwds): - # must include an argument like "-lpython2.7" for the compiler - def ensure(key, value): - lst = kwds.setdefault(key, []) - if value not in lst: - lst.append(value) - # - if '__pypy__' in sys.builtin_module_names: - import os - if sys.platform == "win32": - # we need 'libpypy-c.lib'. Current distributions of - # pypy (>= 4.1) contain it as 'libs/python27.lib'. - pythonlib = "python{0[0]}{0[1]}".format(sys.version_info) - if hasattr(sys, 'prefix'): - ensure('library_dirs', os.path.join(sys.prefix, 'libs')) - else: - # we need 'libpypy-c.{so,dylib}', which should be by - # default located in 'sys.prefix/bin' for installed - # systems. - if sys.version_info < (3,): - pythonlib = "pypy-c" - else: - pythonlib = "pypy3-c" - if hasattr(sys, 'prefix'): - ensure('library_dirs', os.path.join(sys.prefix, 'bin')) - # On uninstalled pypy's, the libpypy-c is typically found in - # .../pypy/goal/. - if hasattr(sys, 'prefix'): - ensure('library_dirs', os.path.join(sys.prefix, 'pypy', 'goal')) - else: - if sys.platform == "win32": - template = "python%d%d" - if hasattr(sys, 'gettotalrefcount'): - template += '_d' - else: - try: - import sysconfig - except ImportError: # 2.6 - from cffi._shimmed_dist_utils import sysconfig - template = "python%d.%d" - if sysconfig.get_config_var('DEBUG_EXT'): - template += sysconfig.get_config_var('DEBUG_EXT') - pythonlib = (template % - (sys.hexversion >> 24, (sys.hexversion >> 16) & 0xff)) - if hasattr(sys, 'abiflags'): - pythonlib += sys.abiflags - ensure('libraries', pythonlib) - if sys.platform == "win32": - ensure('extra_link_args', '/MANIFEST') - - def set_source(self, module_name, source, source_extension='.c', **kwds): - import os - if hasattr(self, '_assigned_source'): - raise ValueError("set_source() cannot be called several times " - "per ffi object") - if not isinstance(module_name, basestring): - raise TypeError("'module_name' must be a string") - if os.sep in module_name or (os.altsep and os.altsep in module_name): - raise ValueError("'module_name' must not contain '/': use a dotted " - "name to make a 'package.module' location") - self._assigned_source = (str(module_name), source, - source_extension, kwds) - - def set_source_pkgconfig(self, module_name, pkgconfig_libs, source, - source_extension='.c', **kwds): - from . import pkgconfig - if not isinstance(pkgconfig_libs, list): - raise TypeError("the pkgconfig_libs argument must be a list " - "of package names") - kwds2 = pkgconfig.flags_from_pkgconfig(pkgconfig_libs) - pkgconfig.merge_flags(kwds, kwds2) - self.set_source(module_name, source, source_extension, **kwds) - - def distutils_extension(self, tmpdir='build', verbose=True): - from cffi._shimmed_dist_utils import mkpath - from .recompiler import recompile - # - if not hasattr(self, '_assigned_source'): - if hasattr(self, 'verifier'): # fallback, 'tmpdir' ignored - return self.verifier.get_extension() - raise ValueError("set_source() must be called before" - " distutils_extension()") - module_name, source, source_extension, kwds = self._assigned_source - if source is None: - raise TypeError("distutils_extension() is only for C extension " - "modules, not for dlopen()-style pure Python " - "modules") - mkpath(tmpdir) - ext, updated = recompile(self, module_name, - source, tmpdir=tmpdir, extradir=tmpdir, - source_extension=source_extension, - call_c_compiler=False, **kwds) - if verbose: - if updated: - sys.stderr.write("regenerated: %r\n" % (ext.sources[0],)) - else: - sys.stderr.write("not modified: %r\n" % (ext.sources[0],)) - return ext - - def emit_c_code(self, filename): - from .recompiler import recompile - # - if not hasattr(self, '_assigned_source'): - raise ValueError("set_source() must be called before emit_c_code()") - module_name, source, source_extension, kwds = self._assigned_source - if source is None: - raise TypeError("emit_c_code() is only for C extension modules, " - "not for dlopen()-style pure Python modules") - recompile(self, module_name, source, - c_file=filename, call_c_compiler=False, - uses_ffiplatform=False, **kwds) - - def emit_python_code(self, filename): - from .recompiler import recompile - # - if not hasattr(self, '_assigned_source'): - raise ValueError("set_source() must be called before emit_c_code()") - module_name, source, source_extension, kwds = self._assigned_source - if source is not None: - raise TypeError("emit_python_code() is only for dlopen()-style " - "pure Python modules, not for C extension modules") - recompile(self, module_name, source, - c_file=filename, call_c_compiler=False, - uses_ffiplatform=False, **kwds) - - def compile(self, tmpdir='.', verbose=0, target=None, debug=None): - """The 'target' argument gives the final file name of the - compiled DLL. Use '*' to force distutils' choice, suitable for - regular CPython C API modules. Use a file name ending in '.*' - to ask for the system's default extension for dynamic libraries - (.so/.dll/.dylib). - - The default is '*' when building a non-embedded C API extension, - and (module_name + '.*') when building an embedded library. - """ - from .recompiler import recompile - # - if not hasattr(self, '_assigned_source'): - raise ValueError("set_source() must be called before compile()") - module_name, source, source_extension, kwds = self._assigned_source - return recompile(self, module_name, source, tmpdir=tmpdir, - target=target, source_extension=source_extension, - compiler_verbose=verbose, debug=debug, **kwds) - - def init_once(self, func, tag): - # Read _init_once_cache[tag], which is either (False, lock) if - # we're calling the function now in some thread, or (True, result). - # Don't call setdefault() in most cases, to avoid allocating and - # immediately freeing a lock; but still use setdefaut() to avoid - # races. - try: - x = self._init_once_cache[tag] - except KeyError: - x = self._init_once_cache.setdefault(tag, (False, allocate_lock())) - # Common case: we got (True, result), so we return the result. - if x[0]: - return x[1] - # Else, it's a lock. Acquire it to serialize the following tests. - with x[1]: - # Read again from _init_once_cache the current status. - x = self._init_once_cache[tag] - if x[0]: - return x[1] - # Call the function and store the result back. - result = func() - self._init_once_cache[tag] = (True, result) - return result - - def embedding_init_code(self, pysource): - if self._embedding: - raise ValueError("embedding_init_code() can only be called once") - # fix 'pysource' before it gets dumped into the C file: - # - remove empty lines at the beginning, so it starts at "line 1" - # - dedent, if all non-empty lines are indented - # - check for SyntaxErrors - import re - match = re.match(r'\s*\n', pysource) - if match: - pysource = pysource[match.end():] - lines = pysource.splitlines() or [''] - prefix = re.match(r'\s*', lines[0]).group() - for i in range(1, len(lines)): - line = lines[i] - if line.rstrip(): - while not line.startswith(prefix): - prefix = prefix[:-1] - i = len(prefix) - lines = [line[i:]+'\n' for line in lines] - pysource = ''.join(lines) - # - compile(pysource, "cffi_init", "exec") - # - self._embedding = pysource - - def def_extern(self, *args, **kwds): - raise ValueError("ffi.def_extern() is only available on API-mode FFI " - "objects") - - def list_types(self): - """Returns the user type names known to this FFI instance. - This returns a tuple containing three lists of names: - (typedef_names, names_of_structs, names_of_unions) - """ - typedefs = [] - structs = [] - unions = [] - for key in self._parser._declarations: - if key.startswith('typedef '): - typedefs.append(key[8:]) - elif key.startswith('struct '): - structs.append(key[7:]) - elif key.startswith('union '): - unions.append(key[6:]) - typedefs.sort() - structs.sort() - unions.sort() - return (typedefs, structs, unions) - - -def _load_backend_lib(backend, name, flags): - import os - if not isinstance(name, basestring): - if sys.platform != "win32" or name is not None: - return backend.load_library(name, flags) - name = "c" # Windows: load_library(None) fails, but this works - # on Python 2 (backward compatibility hack only) - first_error = None - if '.' in name or '/' in name or os.sep in name: - try: - return backend.load_library(name, flags) - except OSError as e: - first_error = e - import ctypes.util - path = ctypes.util.find_library(name) - if path is None: - if name == "c" and sys.platform == "win32" and sys.version_info >= (3,): - raise OSError("dlopen(None) cannot work on Windows for Python 3 " - "(see http://bugs.python.org/issue23606)") - msg = ("ctypes.util.find_library() did not manage " - "to locate a library called %r" % (name,)) - if first_error is not None: - msg = "%s. Additionally, %s" % (first_error, msg) - raise OSError(msg) - return backend.load_library(path, flags) - -def _make_ffi_library(ffi, libname, flags): - backend = ffi._backend - backendlib = _load_backend_lib(backend, libname, flags) - # - def accessor_function(name): - key = 'function ' + name - tp, _ = ffi._parser._declarations[key] - BType = ffi._get_cached_btype(tp) - value = backendlib.load_function(BType, name) - library.__dict__[name] = value - # - def accessor_variable(name): - key = 'variable ' + name - tp, _ = ffi._parser._declarations[key] - BType = ffi._get_cached_btype(tp) - read_variable = backendlib.read_variable - write_variable = backendlib.write_variable - setattr(FFILibrary, name, property( - lambda self: read_variable(BType, name), - lambda self, value: write_variable(BType, name, value))) - # - def addressof_var(name): - try: - return addr_variables[name] - except KeyError: - with ffi._lock: - if name not in addr_variables: - key = 'variable ' + name - tp, _ = ffi._parser._declarations[key] - BType = ffi._get_cached_btype(tp) - if BType.kind != 'array': - BType = model.pointer_cache(ffi, BType) - p = backendlib.load_function(BType, name) - addr_variables[name] = p - return addr_variables[name] - # - def accessor_constant(name): - raise NotImplementedError("non-integer constant '%s' cannot be " - "accessed from a dlopen() library" % (name,)) - # - def accessor_int_constant(name): - library.__dict__[name] = ffi._parser._int_constants[name] - # - accessors = {} - accessors_version = [False] - addr_variables = {} - # - def update_accessors(): - if accessors_version[0] is ffi._cdef_version: - return - # - for key, (tp, _) in ffi._parser._declarations.items(): - if not isinstance(tp, model.EnumType): - tag, name = key.split(' ', 1) - if tag == 'function': - accessors[name] = accessor_function - elif tag == 'variable': - accessors[name] = accessor_variable - elif tag == 'constant': - accessors[name] = accessor_constant - else: - for i, enumname in enumerate(tp.enumerators): - def accessor_enum(name, tp=tp, i=i): - tp.check_not_partial() - library.__dict__[name] = tp.enumvalues[i] - accessors[enumname] = accessor_enum - for name in ffi._parser._int_constants: - accessors.setdefault(name, accessor_int_constant) - accessors_version[0] = ffi._cdef_version - # - def make_accessor(name): - with ffi._lock: - if name in library.__dict__ or name in FFILibrary.__dict__: - return # added by another thread while waiting for the lock - if name not in accessors: - update_accessors() - if name not in accessors: - raise AttributeError(name) - accessors[name](name) - # - class FFILibrary(object): - def __getattr__(self, name): - make_accessor(name) - return getattr(self, name) - def __setattr__(self, name, value): - try: - property = getattr(self.__class__, name) - except AttributeError: - make_accessor(name) - setattr(self, name, value) - else: - property.__set__(self, value) - def __dir__(self): - with ffi._lock: - update_accessors() - return accessors.keys() - def __addressof__(self, name): - if name in library.__dict__: - return library.__dict__[name] - if name in FFILibrary.__dict__: - return addressof_var(name) - make_accessor(name) - if name in library.__dict__: - return library.__dict__[name] - if name in FFILibrary.__dict__: - return addressof_var(name) - raise AttributeError("cffi library has no function or " - "global variable named '%s'" % (name,)) - def __cffi_close__(self): - backendlib.close_lib() - self.__dict__.clear() - # - if isinstance(libname, basestring): - try: - if not isinstance(libname, str): # unicode, on Python 2 - libname = libname.encode('utf-8') - FFILibrary.__name__ = 'FFILibrary_%s' % libname - except UnicodeError: - pass - library = FFILibrary() - return library, library.__dict__ - -def _builtin_function_type(func): - # a hack to make at least ffi.typeof(builtin_function) work, - # if the builtin function was obtained by 'vengine_cpy'. - import sys - try: - module = sys.modules[func.__module__] - ffi = module._cffi_original_ffi - types_of_builtin_funcs = module._cffi_types_of_builtin_funcs - tp = types_of_builtin_funcs[func] - except (KeyError, AttributeError, TypeError): - return None - else: - with ffi._lock: - return ffi._get_cached_btype(tp) diff --git a/env/lib/python3.10/site-packages/cffi/backend_ctypes.py b/env/lib/python3.10/site-packages/cffi/backend_ctypes.py deleted file mode 100644 index e7956a7..0000000 --- a/env/lib/python3.10/site-packages/cffi/backend_ctypes.py +++ /dev/null @@ -1,1121 +0,0 @@ -import ctypes, ctypes.util, operator, sys -from . import model - -if sys.version_info < (3,): - bytechr = chr -else: - unicode = str - long = int - xrange = range - bytechr = lambda num: bytes([num]) - -class CTypesType(type): - pass - -class CTypesData(object): - __metaclass__ = CTypesType - __slots__ = ['__weakref__'] - __name__ = '' - - def __init__(self, *args): - raise TypeError("cannot instantiate %r" % (self.__class__,)) - - @classmethod - def _newp(cls, init): - raise TypeError("expected a pointer or array ctype, got '%s'" - % (cls._get_c_name(),)) - - @staticmethod - def _to_ctypes(value): - raise TypeError - - @classmethod - def _arg_to_ctypes(cls, *value): - try: - ctype = cls._ctype - except AttributeError: - raise TypeError("cannot create an instance of %r" % (cls,)) - if value: - res = cls._to_ctypes(*value) - if not isinstance(res, ctype): - res = cls._ctype(res) - else: - res = cls._ctype() - return res - - @classmethod - def _create_ctype_obj(cls, init): - if init is None: - return cls._arg_to_ctypes() - else: - return cls._arg_to_ctypes(init) - - @staticmethod - def _from_ctypes(ctypes_value): - raise TypeError - - @classmethod - def _get_c_name(cls, replace_with=''): - return cls._reftypename.replace(' &', replace_with) - - @classmethod - def _fix_class(cls): - cls.__name__ = 'CData<%s>' % (cls._get_c_name(),) - cls.__qualname__ = 'CData<%s>' % (cls._get_c_name(),) - cls.__module__ = 'ffi' - - def _get_own_repr(self): - raise NotImplementedError - - def _addr_repr(self, address): - if address == 0: - return 'NULL' - else: - if address < 0: - address += 1 << (8*ctypes.sizeof(ctypes.c_void_p)) - return '0x%x' % address - - def __repr__(self, c_name=None): - own = self._get_own_repr() - return '' % (c_name or self._get_c_name(), own) - - def _convert_to_address(self, BClass): - if BClass is None: - raise TypeError("cannot convert %r to an address" % ( - self._get_c_name(),)) - else: - raise TypeError("cannot convert %r to %r" % ( - self._get_c_name(), BClass._get_c_name())) - - @classmethod - def _get_size(cls): - return ctypes.sizeof(cls._ctype) - - def _get_size_of_instance(self): - return ctypes.sizeof(self._ctype) - - @classmethod - def _cast_from(cls, source): - raise TypeError("cannot cast to %r" % (cls._get_c_name(),)) - - def _cast_to_integer(self): - return self._convert_to_address(None) - - @classmethod - def _alignment(cls): - return ctypes.alignment(cls._ctype) - - def __iter__(self): - raise TypeError("cdata %r does not support iteration" % ( - self._get_c_name()),) - - def _make_cmp(name): - cmpfunc = getattr(operator, name) - def cmp(self, other): - v_is_ptr = not isinstance(self, CTypesGenericPrimitive) - w_is_ptr = (isinstance(other, CTypesData) and - not isinstance(other, CTypesGenericPrimitive)) - if v_is_ptr and w_is_ptr: - return cmpfunc(self._convert_to_address(None), - other._convert_to_address(None)) - elif v_is_ptr or w_is_ptr: - return NotImplemented - else: - if isinstance(self, CTypesGenericPrimitive): - self = self._value - if isinstance(other, CTypesGenericPrimitive): - other = other._value - return cmpfunc(self, other) - cmp.func_name = name - return cmp - - __eq__ = _make_cmp('__eq__') - __ne__ = _make_cmp('__ne__') - __lt__ = _make_cmp('__lt__') - __le__ = _make_cmp('__le__') - __gt__ = _make_cmp('__gt__') - __ge__ = _make_cmp('__ge__') - - def __hash__(self): - return hash(self._convert_to_address(None)) - - def _to_string(self, maxlen): - raise TypeError("string(): %r" % (self,)) - - -class CTypesGenericPrimitive(CTypesData): - __slots__ = [] - - def __hash__(self): - return hash(self._value) - - def _get_own_repr(self): - return repr(self._from_ctypes(self._value)) - - -class CTypesGenericArray(CTypesData): - __slots__ = [] - - @classmethod - def _newp(cls, init): - return cls(init) - - def __iter__(self): - for i in xrange(len(self)): - yield self[i] - - def _get_own_repr(self): - return self._addr_repr(ctypes.addressof(self._blob)) - - -class CTypesGenericPtr(CTypesData): - __slots__ = ['_address', '_as_ctype_ptr'] - _automatic_casts = False - kind = "pointer" - - @classmethod - def _newp(cls, init): - return cls(init) - - @classmethod - def _cast_from(cls, source): - if source is None: - address = 0 - elif isinstance(source, CTypesData): - address = source._cast_to_integer() - elif isinstance(source, (int, long)): - address = source - else: - raise TypeError("bad type for cast to %r: %r" % - (cls, type(source).__name__)) - return cls._new_pointer_at(address) - - @classmethod - def _new_pointer_at(cls, address): - self = cls.__new__(cls) - self._address = address - self._as_ctype_ptr = ctypes.cast(address, cls._ctype) - return self - - def _get_own_repr(self): - try: - return self._addr_repr(self._address) - except AttributeError: - return '???' - - def _cast_to_integer(self): - return self._address - - def __nonzero__(self): - return bool(self._address) - __bool__ = __nonzero__ - - @classmethod - def _to_ctypes(cls, value): - if not isinstance(value, CTypesData): - raise TypeError("unexpected %s object" % type(value).__name__) - address = value._convert_to_address(cls) - return ctypes.cast(address, cls._ctype) - - @classmethod - def _from_ctypes(cls, ctypes_ptr): - address = ctypes.cast(ctypes_ptr, ctypes.c_void_p).value or 0 - return cls._new_pointer_at(address) - - @classmethod - def _initialize(cls, ctypes_ptr, value): - if value: - ctypes_ptr.contents = cls._to_ctypes(value).contents - - def _convert_to_address(self, BClass): - if (BClass in (self.__class__, None) or BClass._automatic_casts - or self._automatic_casts): - return self._address - else: - return CTypesData._convert_to_address(self, BClass) - - -class CTypesBaseStructOrUnion(CTypesData): - __slots__ = ['_blob'] - - @classmethod - def _create_ctype_obj(cls, init): - # may be overridden - raise TypeError("cannot instantiate opaque type %s" % (cls,)) - - def _get_own_repr(self): - return self._addr_repr(ctypes.addressof(self._blob)) - - @classmethod - def _offsetof(cls, fieldname): - return getattr(cls._ctype, fieldname).offset - - def _convert_to_address(self, BClass): - if getattr(BClass, '_BItem', None) is self.__class__: - return ctypes.addressof(self._blob) - else: - return CTypesData._convert_to_address(self, BClass) - - @classmethod - def _from_ctypes(cls, ctypes_struct_or_union): - self = cls.__new__(cls) - self._blob = ctypes_struct_or_union - return self - - @classmethod - def _to_ctypes(cls, value): - return value._blob - - def __repr__(self, c_name=None): - return CTypesData.__repr__(self, c_name or self._get_c_name(' &')) - - -class CTypesBackend(object): - - PRIMITIVE_TYPES = { - 'char': ctypes.c_char, - 'short': ctypes.c_short, - 'int': ctypes.c_int, - 'long': ctypes.c_long, - 'long long': ctypes.c_longlong, - 'signed char': ctypes.c_byte, - 'unsigned char': ctypes.c_ubyte, - 'unsigned short': ctypes.c_ushort, - 'unsigned int': ctypes.c_uint, - 'unsigned long': ctypes.c_ulong, - 'unsigned long long': ctypes.c_ulonglong, - 'float': ctypes.c_float, - 'double': ctypes.c_double, - '_Bool': ctypes.c_bool, - } - - for _name in ['unsigned long long', 'unsigned long', - 'unsigned int', 'unsigned short', 'unsigned char']: - _size = ctypes.sizeof(PRIMITIVE_TYPES[_name]) - PRIMITIVE_TYPES['uint%d_t' % (8*_size)] = PRIMITIVE_TYPES[_name] - if _size == ctypes.sizeof(ctypes.c_void_p): - PRIMITIVE_TYPES['uintptr_t'] = PRIMITIVE_TYPES[_name] - if _size == ctypes.sizeof(ctypes.c_size_t): - PRIMITIVE_TYPES['size_t'] = PRIMITIVE_TYPES[_name] - - for _name in ['long long', 'long', 'int', 'short', 'signed char']: - _size = ctypes.sizeof(PRIMITIVE_TYPES[_name]) - PRIMITIVE_TYPES['int%d_t' % (8*_size)] = PRIMITIVE_TYPES[_name] - if _size == ctypes.sizeof(ctypes.c_void_p): - PRIMITIVE_TYPES['intptr_t'] = PRIMITIVE_TYPES[_name] - PRIMITIVE_TYPES['ptrdiff_t'] = PRIMITIVE_TYPES[_name] - if _size == ctypes.sizeof(ctypes.c_size_t): - PRIMITIVE_TYPES['ssize_t'] = PRIMITIVE_TYPES[_name] - - - def __init__(self): - self.RTLD_LAZY = 0 # not supported anyway by ctypes - self.RTLD_NOW = 0 - self.RTLD_GLOBAL = ctypes.RTLD_GLOBAL - self.RTLD_LOCAL = ctypes.RTLD_LOCAL - - def set_ffi(self, ffi): - self.ffi = ffi - - def _get_types(self): - return CTypesData, CTypesType - - def load_library(self, path, flags=0): - cdll = ctypes.CDLL(path, flags) - return CTypesLibrary(self, cdll) - - def new_void_type(self): - class CTypesVoid(CTypesData): - __slots__ = [] - _reftypename = 'void &' - @staticmethod - def _from_ctypes(novalue): - return None - @staticmethod - def _to_ctypes(novalue): - if novalue is not None: - raise TypeError("None expected, got %s object" % - (type(novalue).__name__,)) - return None - CTypesVoid._fix_class() - return CTypesVoid - - def new_primitive_type(self, name): - if name == 'wchar_t': - raise NotImplementedError(name) - ctype = self.PRIMITIVE_TYPES[name] - if name == 'char': - kind = 'char' - elif name in ('float', 'double'): - kind = 'float' - else: - if name in ('signed char', 'unsigned char'): - kind = 'byte' - elif name == '_Bool': - kind = 'bool' - else: - kind = 'int' - is_signed = (ctype(-1).value == -1) - # - def _cast_source_to_int(source): - if isinstance(source, (int, long, float)): - source = int(source) - elif isinstance(source, CTypesData): - source = source._cast_to_integer() - elif isinstance(source, bytes): - source = ord(source) - elif source is None: - source = 0 - else: - raise TypeError("bad type for cast to %r: %r" % - (CTypesPrimitive, type(source).__name__)) - return source - # - kind1 = kind - class CTypesPrimitive(CTypesGenericPrimitive): - __slots__ = ['_value'] - _ctype = ctype - _reftypename = '%s &' % name - kind = kind1 - - def __init__(self, value): - self._value = value - - @staticmethod - def _create_ctype_obj(init): - if init is None: - return ctype() - return ctype(CTypesPrimitive._to_ctypes(init)) - - if kind == 'int' or kind == 'byte': - @classmethod - def _cast_from(cls, source): - source = _cast_source_to_int(source) - source = ctype(source).value # cast within range - return cls(source) - def __int__(self): - return self._value - - if kind == 'bool': - @classmethod - def _cast_from(cls, source): - if not isinstance(source, (int, long, float)): - source = _cast_source_to_int(source) - return cls(bool(source)) - def __int__(self): - return int(self._value) - - if kind == 'char': - @classmethod - def _cast_from(cls, source): - source = _cast_source_to_int(source) - source = bytechr(source & 0xFF) - return cls(source) - def __int__(self): - return ord(self._value) - - if kind == 'float': - @classmethod - def _cast_from(cls, source): - if isinstance(source, float): - pass - elif isinstance(source, CTypesGenericPrimitive): - if hasattr(source, '__float__'): - source = float(source) - else: - source = int(source) - else: - source = _cast_source_to_int(source) - source = ctype(source).value # fix precision - return cls(source) - def __int__(self): - return int(self._value) - def __float__(self): - return self._value - - _cast_to_integer = __int__ - - if kind == 'int' or kind == 'byte' or kind == 'bool': - @staticmethod - def _to_ctypes(x): - if not isinstance(x, (int, long)): - if isinstance(x, CTypesData): - x = int(x) - else: - raise TypeError("integer expected, got %s" % - type(x).__name__) - if ctype(x).value != x: - if not is_signed and x < 0: - raise OverflowError("%s: negative integer" % name) - else: - raise OverflowError("%s: integer out of bounds" - % name) - return x - - if kind == 'char': - @staticmethod - def _to_ctypes(x): - if isinstance(x, bytes) and len(x) == 1: - return x - if isinstance(x, CTypesPrimitive): # > - return x._value - raise TypeError("character expected, got %s" % - type(x).__name__) - def __nonzero__(self): - return ord(self._value) != 0 - else: - def __nonzero__(self): - return self._value != 0 - __bool__ = __nonzero__ - - if kind == 'float': - @staticmethod - def _to_ctypes(x): - if not isinstance(x, (int, long, float, CTypesData)): - raise TypeError("float expected, got %s" % - type(x).__name__) - return ctype(x).value - - @staticmethod - def _from_ctypes(value): - return getattr(value, 'value', value) - - @staticmethod - def _initialize(blob, init): - blob.value = CTypesPrimitive._to_ctypes(init) - - if kind == 'char': - def _to_string(self, maxlen): - return self._value - if kind == 'byte': - def _to_string(self, maxlen): - return chr(self._value & 0xff) - # - CTypesPrimitive._fix_class() - return CTypesPrimitive - - def new_pointer_type(self, BItem): - getbtype = self.ffi._get_cached_btype - if BItem is getbtype(model.PrimitiveType('char')): - kind = 'charp' - elif BItem in (getbtype(model.PrimitiveType('signed char')), - getbtype(model.PrimitiveType('unsigned char'))): - kind = 'bytep' - elif BItem is getbtype(model.void_type): - kind = 'voidp' - else: - kind = 'generic' - # - class CTypesPtr(CTypesGenericPtr): - __slots__ = ['_own'] - if kind == 'charp': - __slots__ += ['__as_strbuf'] - _BItem = BItem - if hasattr(BItem, '_ctype'): - _ctype = ctypes.POINTER(BItem._ctype) - _bitem_size = ctypes.sizeof(BItem._ctype) - else: - _ctype = ctypes.c_void_p - if issubclass(BItem, CTypesGenericArray): - _reftypename = BItem._get_c_name('(* &)') - else: - _reftypename = BItem._get_c_name(' * &') - - def __init__(self, init): - ctypeobj = BItem._create_ctype_obj(init) - if kind == 'charp': - self.__as_strbuf = ctypes.create_string_buffer( - ctypeobj.value + b'\x00') - self._as_ctype_ptr = ctypes.cast( - self.__as_strbuf, self._ctype) - else: - self._as_ctype_ptr = ctypes.pointer(ctypeobj) - self._address = ctypes.cast(self._as_ctype_ptr, - ctypes.c_void_p).value - self._own = True - - def __add__(self, other): - if isinstance(other, (int, long)): - return self._new_pointer_at(self._address + - other * self._bitem_size) - else: - return NotImplemented - - def __sub__(self, other): - if isinstance(other, (int, long)): - return self._new_pointer_at(self._address - - other * self._bitem_size) - elif type(self) is type(other): - return (self._address - other._address) // self._bitem_size - else: - return NotImplemented - - def __getitem__(self, index): - if getattr(self, '_own', False) and index != 0: - raise IndexError - return BItem._from_ctypes(self._as_ctype_ptr[index]) - - def __setitem__(self, index, value): - self._as_ctype_ptr[index] = BItem._to_ctypes(value) - - if kind == 'charp' or kind == 'voidp': - @classmethod - def _arg_to_ctypes(cls, *value): - if value and isinstance(value[0], bytes): - return ctypes.c_char_p(value[0]) - else: - return super(CTypesPtr, cls)._arg_to_ctypes(*value) - - if kind == 'charp' or kind == 'bytep': - def _to_string(self, maxlen): - if maxlen < 0: - maxlen = sys.maxsize - p = ctypes.cast(self._as_ctype_ptr, - ctypes.POINTER(ctypes.c_char)) - n = 0 - while n < maxlen and p[n] != b'\x00': - n += 1 - return b''.join([p[i] for i in range(n)]) - - def _get_own_repr(self): - if getattr(self, '_own', False): - return 'owning %d bytes' % ( - ctypes.sizeof(self._as_ctype_ptr.contents),) - return super(CTypesPtr, self)._get_own_repr() - # - if (BItem is self.ffi._get_cached_btype(model.void_type) or - BItem is self.ffi._get_cached_btype(model.PrimitiveType('char'))): - CTypesPtr._automatic_casts = True - # - CTypesPtr._fix_class() - return CTypesPtr - - def new_array_type(self, CTypesPtr, length): - if length is None: - brackets = ' &[]' - else: - brackets = ' &[%d]' % length - BItem = CTypesPtr._BItem - getbtype = self.ffi._get_cached_btype - if BItem is getbtype(model.PrimitiveType('char')): - kind = 'char' - elif BItem in (getbtype(model.PrimitiveType('signed char')), - getbtype(model.PrimitiveType('unsigned char'))): - kind = 'byte' - else: - kind = 'generic' - # - class CTypesArray(CTypesGenericArray): - __slots__ = ['_blob', '_own'] - if length is not None: - _ctype = BItem._ctype * length - else: - __slots__.append('_ctype') - _reftypename = BItem._get_c_name(brackets) - _declared_length = length - _CTPtr = CTypesPtr - - def __init__(self, init): - if length is None: - if isinstance(init, (int, long)): - len1 = init - init = None - elif kind == 'char' and isinstance(init, bytes): - len1 = len(init) + 1 # extra null - else: - init = tuple(init) - len1 = len(init) - self._ctype = BItem._ctype * len1 - self._blob = self._ctype() - self._own = True - if init is not None: - self._initialize(self._blob, init) - - @staticmethod - def _initialize(blob, init): - if isinstance(init, bytes): - init = [init[i:i+1] for i in range(len(init))] - else: - if isinstance(init, CTypesGenericArray): - if (len(init) != len(blob) or - not isinstance(init, CTypesArray)): - raise TypeError("length/type mismatch: %s" % (init,)) - init = tuple(init) - if len(init) > len(blob): - raise IndexError("too many initializers") - addr = ctypes.cast(blob, ctypes.c_void_p).value - PTR = ctypes.POINTER(BItem._ctype) - itemsize = ctypes.sizeof(BItem._ctype) - for i, value in enumerate(init): - p = ctypes.cast(addr + i * itemsize, PTR) - BItem._initialize(p.contents, value) - - def __len__(self): - return len(self._blob) - - def __getitem__(self, index): - if not (0 <= index < len(self._blob)): - raise IndexError - return BItem._from_ctypes(self._blob[index]) - - def __setitem__(self, index, value): - if not (0 <= index < len(self._blob)): - raise IndexError - self._blob[index] = BItem._to_ctypes(value) - - if kind == 'char' or kind == 'byte': - def _to_string(self, maxlen): - if maxlen < 0: - maxlen = len(self._blob) - p = ctypes.cast(self._blob, - ctypes.POINTER(ctypes.c_char)) - n = 0 - while n < maxlen and p[n] != b'\x00': - n += 1 - return b''.join([p[i] for i in range(n)]) - - def _get_own_repr(self): - if getattr(self, '_own', False): - return 'owning %d bytes' % (ctypes.sizeof(self._blob),) - return super(CTypesArray, self)._get_own_repr() - - def _convert_to_address(self, BClass): - if BClass in (CTypesPtr, None) or BClass._automatic_casts: - return ctypes.addressof(self._blob) - else: - return CTypesData._convert_to_address(self, BClass) - - @staticmethod - def _from_ctypes(ctypes_array): - self = CTypesArray.__new__(CTypesArray) - self._blob = ctypes_array - return self - - @staticmethod - def _arg_to_ctypes(value): - return CTypesPtr._arg_to_ctypes(value) - - def __add__(self, other): - if isinstance(other, (int, long)): - return CTypesPtr._new_pointer_at( - ctypes.addressof(self._blob) + - other * ctypes.sizeof(BItem._ctype)) - else: - return NotImplemented - - @classmethod - def _cast_from(cls, source): - raise NotImplementedError("casting to %r" % ( - cls._get_c_name(),)) - # - CTypesArray._fix_class() - return CTypesArray - - def _new_struct_or_union(self, kind, name, base_ctypes_class): - # - class struct_or_union(base_ctypes_class): - pass - struct_or_union.__name__ = '%s_%s' % (kind, name) - kind1 = kind - # - class CTypesStructOrUnion(CTypesBaseStructOrUnion): - __slots__ = ['_blob'] - _ctype = struct_or_union - _reftypename = '%s &' % (name,) - _kind = kind = kind1 - # - CTypesStructOrUnion._fix_class() - return CTypesStructOrUnion - - def new_struct_type(self, name): - return self._new_struct_or_union('struct', name, ctypes.Structure) - - def new_union_type(self, name): - return self._new_struct_or_union('union', name, ctypes.Union) - - def complete_struct_or_union(self, CTypesStructOrUnion, fields, tp, - totalsize=-1, totalalignment=-1, sflags=0, - pack=0): - if totalsize >= 0 or totalalignment >= 0: - raise NotImplementedError("the ctypes backend of CFFI does not support " - "structures completed by verify(); please " - "compile and install the _cffi_backend module.") - struct_or_union = CTypesStructOrUnion._ctype - fnames = [fname for (fname, BField, bitsize) in fields] - btypes = [BField for (fname, BField, bitsize) in fields] - bitfields = [bitsize for (fname, BField, bitsize) in fields] - # - bfield_types = {} - cfields = [] - for (fname, BField, bitsize) in fields: - if bitsize < 0: - cfields.append((fname, BField._ctype)) - bfield_types[fname] = BField - else: - cfields.append((fname, BField._ctype, bitsize)) - bfield_types[fname] = Ellipsis - if sflags & 8: - struct_or_union._pack_ = 1 - elif pack: - struct_or_union._pack_ = pack - struct_or_union._fields_ = cfields - CTypesStructOrUnion._bfield_types = bfield_types - # - @staticmethod - def _create_ctype_obj(init): - result = struct_or_union() - if init is not None: - initialize(result, init) - return result - CTypesStructOrUnion._create_ctype_obj = _create_ctype_obj - # - def initialize(blob, init): - if is_union: - if len(init) > 1: - raise ValueError("union initializer: %d items given, but " - "only one supported (use a dict if needed)" - % (len(init),)) - if not isinstance(init, dict): - if isinstance(init, (bytes, unicode)): - raise TypeError("union initializer: got a str") - init = tuple(init) - if len(init) > len(fnames): - raise ValueError("too many values for %s initializer" % - CTypesStructOrUnion._get_c_name()) - init = dict(zip(fnames, init)) - addr = ctypes.addressof(blob) - for fname, value in init.items(): - BField, bitsize = name2fieldtype[fname] - assert bitsize < 0, \ - "not implemented: initializer with bit fields" - offset = CTypesStructOrUnion._offsetof(fname) - PTR = ctypes.POINTER(BField._ctype) - p = ctypes.cast(addr + offset, PTR) - BField._initialize(p.contents, value) - is_union = CTypesStructOrUnion._kind == 'union' - name2fieldtype = dict(zip(fnames, zip(btypes, bitfields))) - # - for fname, BField, bitsize in fields: - if fname == '': - raise NotImplementedError("nested anonymous structs/unions") - if hasattr(CTypesStructOrUnion, fname): - raise ValueError("the field name %r conflicts in " - "the ctypes backend" % fname) - if bitsize < 0: - def getter(self, fname=fname, BField=BField, - offset=CTypesStructOrUnion._offsetof(fname), - PTR=ctypes.POINTER(BField._ctype)): - addr = ctypes.addressof(self._blob) - p = ctypes.cast(addr + offset, PTR) - return BField._from_ctypes(p.contents) - def setter(self, value, fname=fname, BField=BField): - setattr(self._blob, fname, BField._to_ctypes(value)) - # - if issubclass(BField, CTypesGenericArray): - setter = None - if BField._declared_length == 0: - def getter(self, fname=fname, BFieldPtr=BField._CTPtr, - offset=CTypesStructOrUnion._offsetof(fname), - PTR=ctypes.POINTER(BField._ctype)): - addr = ctypes.addressof(self._blob) - p = ctypes.cast(addr + offset, PTR) - return BFieldPtr._from_ctypes(p) - # - else: - def getter(self, fname=fname, BField=BField): - return BField._from_ctypes(getattr(self._blob, fname)) - def setter(self, value, fname=fname, BField=BField): - # xxx obscure workaround - value = BField._to_ctypes(value) - oldvalue = getattr(self._blob, fname) - setattr(self._blob, fname, value) - if value != getattr(self._blob, fname): - setattr(self._blob, fname, oldvalue) - raise OverflowError("value too large for bitfield") - setattr(CTypesStructOrUnion, fname, property(getter, setter)) - # - CTypesPtr = self.ffi._get_cached_btype(model.PointerType(tp)) - for fname in fnames: - if hasattr(CTypesPtr, fname): - raise ValueError("the field name %r conflicts in " - "the ctypes backend" % fname) - def getter(self, fname=fname): - return getattr(self[0], fname) - def setter(self, value, fname=fname): - setattr(self[0], fname, value) - setattr(CTypesPtr, fname, property(getter, setter)) - - def new_function_type(self, BArgs, BResult, has_varargs): - nameargs = [BArg._get_c_name() for BArg in BArgs] - if has_varargs: - nameargs.append('...') - nameargs = ', '.join(nameargs) - # - class CTypesFunctionPtr(CTypesGenericPtr): - __slots__ = ['_own_callback', '_name'] - _ctype = ctypes.CFUNCTYPE(getattr(BResult, '_ctype', None), - *[BArg._ctype for BArg in BArgs], - use_errno=True) - _reftypename = BResult._get_c_name('(* &)(%s)' % (nameargs,)) - - def __init__(self, init, error=None): - # create a callback to the Python callable init() - import traceback - assert not has_varargs, "varargs not supported for callbacks" - if getattr(BResult, '_ctype', None) is not None: - error = BResult._from_ctypes( - BResult._create_ctype_obj(error)) - else: - error = None - def callback(*args): - args2 = [] - for arg, BArg in zip(args, BArgs): - args2.append(BArg._from_ctypes(arg)) - try: - res2 = init(*args2) - res2 = BResult._to_ctypes(res2) - except: - traceback.print_exc() - res2 = error - if issubclass(BResult, CTypesGenericPtr): - if res2: - res2 = ctypes.cast(res2, ctypes.c_void_p).value - # .value: http://bugs.python.org/issue1574593 - else: - res2 = None - #print repr(res2) - return res2 - if issubclass(BResult, CTypesGenericPtr): - # The only pointers callbacks can return are void*s: - # http://bugs.python.org/issue5710 - callback_ctype = ctypes.CFUNCTYPE( - ctypes.c_void_p, - *[BArg._ctype for BArg in BArgs], - use_errno=True) - else: - callback_ctype = CTypesFunctionPtr._ctype - self._as_ctype_ptr = callback_ctype(callback) - self._address = ctypes.cast(self._as_ctype_ptr, - ctypes.c_void_p).value - self._own_callback = init - - @staticmethod - def _initialize(ctypes_ptr, value): - if value: - raise NotImplementedError("ctypes backend: not supported: " - "initializers for function pointers") - - def __repr__(self): - c_name = getattr(self, '_name', None) - if c_name: - i = self._reftypename.index('(* &)') - if self._reftypename[i-1] not in ' )*': - c_name = ' ' + c_name - c_name = self._reftypename.replace('(* &)', c_name) - return CTypesData.__repr__(self, c_name) - - def _get_own_repr(self): - if getattr(self, '_own_callback', None) is not None: - return 'calling %r' % (self._own_callback,) - return super(CTypesFunctionPtr, self)._get_own_repr() - - def __call__(self, *args): - if has_varargs: - assert len(args) >= len(BArgs) - extraargs = args[len(BArgs):] - args = args[:len(BArgs)] - else: - assert len(args) == len(BArgs) - ctypes_args = [] - for arg, BArg in zip(args, BArgs): - ctypes_args.append(BArg._arg_to_ctypes(arg)) - if has_varargs: - for i, arg in enumerate(extraargs): - if arg is None: - ctypes_args.append(ctypes.c_void_p(0)) # NULL - continue - if not isinstance(arg, CTypesData): - raise TypeError( - "argument %d passed in the variadic part " - "needs to be a cdata object (got %s)" % - (1 + len(BArgs) + i, type(arg).__name__)) - ctypes_args.append(arg._arg_to_ctypes(arg)) - result = self._as_ctype_ptr(*ctypes_args) - return BResult._from_ctypes(result) - # - CTypesFunctionPtr._fix_class() - return CTypesFunctionPtr - - def new_enum_type(self, name, enumerators, enumvalues, CTypesInt): - assert isinstance(name, str) - reverse_mapping = dict(zip(reversed(enumvalues), - reversed(enumerators))) - # - class CTypesEnum(CTypesInt): - __slots__ = [] - _reftypename = '%s &' % name - - def _get_own_repr(self): - value = self._value - try: - return '%d: %s' % (value, reverse_mapping[value]) - except KeyError: - return str(value) - - def _to_string(self, maxlen): - value = self._value - try: - return reverse_mapping[value] - except KeyError: - return str(value) - # - CTypesEnum._fix_class() - return CTypesEnum - - def get_errno(self): - return ctypes.get_errno() - - def set_errno(self, value): - ctypes.set_errno(value) - - def string(self, b, maxlen=-1): - return b._to_string(maxlen) - - def buffer(self, bptr, size=-1): - raise NotImplementedError("buffer() with ctypes backend") - - def sizeof(self, cdata_or_BType): - if isinstance(cdata_or_BType, CTypesData): - return cdata_or_BType._get_size_of_instance() - else: - assert issubclass(cdata_or_BType, CTypesData) - return cdata_or_BType._get_size() - - def alignof(self, BType): - assert issubclass(BType, CTypesData) - return BType._alignment() - - def newp(self, BType, source): - if not issubclass(BType, CTypesData): - raise TypeError - return BType._newp(source) - - def cast(self, BType, source): - return BType._cast_from(source) - - def callback(self, BType, source, error, onerror): - assert onerror is None # XXX not implemented - return BType(source, error) - - _weakref_cache_ref = None - - def gcp(self, cdata, destructor, size=0): - if self._weakref_cache_ref is None: - import weakref - class MyRef(weakref.ref): - def __eq__(self, other): - myref = self() - return self is other or ( - myref is not None and myref is other()) - def __ne__(self, other): - return not (self == other) - def __hash__(self): - try: - return self._hash - except AttributeError: - self._hash = hash(self()) - return self._hash - self._weakref_cache_ref = {}, MyRef - weak_cache, MyRef = self._weakref_cache_ref - - if destructor is None: - try: - del weak_cache[MyRef(cdata)] - except KeyError: - raise TypeError("Can remove destructor only on a object " - "previously returned by ffi.gc()") - return None - - def remove(k): - cdata, destructor = weak_cache.pop(k, (None, None)) - if destructor is not None: - destructor(cdata) - - new_cdata = self.cast(self.typeof(cdata), cdata) - assert new_cdata is not cdata - weak_cache[MyRef(new_cdata, remove)] = (cdata, destructor) - return new_cdata - - typeof = type - - def getcname(self, BType, replace_with): - return BType._get_c_name(replace_with) - - def typeoffsetof(self, BType, fieldname, num=0): - if isinstance(fieldname, str): - if num == 0 and issubclass(BType, CTypesGenericPtr): - BType = BType._BItem - if not issubclass(BType, CTypesBaseStructOrUnion): - raise TypeError("expected a struct or union ctype") - BField = BType._bfield_types[fieldname] - if BField is Ellipsis: - raise TypeError("not supported for bitfields") - return (BField, BType._offsetof(fieldname)) - elif isinstance(fieldname, (int, long)): - if issubclass(BType, CTypesGenericArray): - BType = BType._CTPtr - if not issubclass(BType, CTypesGenericPtr): - raise TypeError("expected an array or ptr ctype") - BItem = BType._BItem - offset = BItem._get_size() * fieldname - if offset > sys.maxsize: - raise OverflowError - return (BItem, offset) - else: - raise TypeError(type(fieldname)) - - def rawaddressof(self, BTypePtr, cdata, offset=None): - if isinstance(cdata, CTypesBaseStructOrUnion): - ptr = ctypes.pointer(type(cdata)._to_ctypes(cdata)) - elif isinstance(cdata, CTypesGenericPtr): - if offset is None or not issubclass(type(cdata)._BItem, - CTypesBaseStructOrUnion): - raise TypeError("unexpected cdata type") - ptr = type(cdata)._to_ctypes(cdata) - elif isinstance(cdata, CTypesGenericArray): - ptr = type(cdata)._to_ctypes(cdata) - else: - raise TypeError("expected a ") - if offset: - ptr = ctypes.cast( - ctypes.c_void_p( - ctypes.cast(ptr, ctypes.c_void_p).value + offset), - type(ptr)) - return BTypePtr._from_ctypes(ptr) - - -class CTypesLibrary(object): - - def __init__(self, backend, cdll): - self.backend = backend - self.cdll = cdll - - def load_function(self, BType, name): - c_func = getattr(self.cdll, name) - funcobj = BType._from_ctypes(c_func) - funcobj._name = name - return funcobj - - def read_variable(self, BType, name): - try: - ctypes_obj = BType._ctype.in_dll(self.cdll, name) - except AttributeError as e: - raise NotImplementedError(e) - return BType._from_ctypes(ctypes_obj) - - def write_variable(self, BType, name, value): - new_ctypes_obj = BType._to_ctypes(value) - ctypes_obj = BType._ctype.in_dll(self.cdll, name) - ctypes.memmove(ctypes.addressof(ctypes_obj), - ctypes.addressof(new_ctypes_obj), - ctypes.sizeof(BType._ctype)) diff --git a/env/lib/python3.10/site-packages/cffi/cffi_opcode.py b/env/lib/python3.10/site-packages/cffi/cffi_opcode.py deleted file mode 100644 index 6421df6..0000000 --- a/env/lib/python3.10/site-packages/cffi/cffi_opcode.py +++ /dev/null @@ -1,187 +0,0 @@ -from .error import VerificationError - -class CffiOp(object): - def __init__(self, op, arg): - self.op = op - self.arg = arg - - def as_c_expr(self): - if self.op is None: - assert isinstance(self.arg, str) - return '(_cffi_opcode_t)(%s)' % (self.arg,) - classname = CLASS_NAME[self.op] - return '_CFFI_OP(_CFFI_OP_%s, %s)' % (classname, self.arg) - - def as_python_bytes(self): - if self.op is None and self.arg.isdigit(): - value = int(self.arg) # non-negative: '-' not in self.arg - if value >= 2**31: - raise OverflowError("cannot emit %r: limited to 2**31-1" - % (self.arg,)) - return format_four_bytes(value) - if isinstance(self.arg, str): - raise VerificationError("cannot emit to Python: %r" % (self.arg,)) - return format_four_bytes((self.arg << 8) | self.op) - - def __str__(self): - classname = CLASS_NAME.get(self.op, self.op) - return '(%s %s)' % (classname, self.arg) - -def format_four_bytes(num): - return '\\x%02X\\x%02X\\x%02X\\x%02X' % ( - (num >> 24) & 0xFF, - (num >> 16) & 0xFF, - (num >> 8) & 0xFF, - (num ) & 0xFF) - -OP_PRIMITIVE = 1 -OP_POINTER = 3 -OP_ARRAY = 5 -OP_OPEN_ARRAY = 7 -OP_STRUCT_UNION = 9 -OP_ENUM = 11 -OP_FUNCTION = 13 -OP_FUNCTION_END = 15 -OP_NOOP = 17 -OP_BITFIELD = 19 -OP_TYPENAME = 21 -OP_CPYTHON_BLTN_V = 23 # varargs -OP_CPYTHON_BLTN_N = 25 # noargs -OP_CPYTHON_BLTN_O = 27 # O (i.e. a single arg) -OP_CONSTANT = 29 -OP_CONSTANT_INT = 31 -OP_GLOBAL_VAR = 33 -OP_DLOPEN_FUNC = 35 -OP_DLOPEN_CONST = 37 -OP_GLOBAL_VAR_F = 39 -OP_EXTERN_PYTHON = 41 - -PRIM_VOID = 0 -PRIM_BOOL = 1 -PRIM_CHAR = 2 -PRIM_SCHAR = 3 -PRIM_UCHAR = 4 -PRIM_SHORT = 5 -PRIM_USHORT = 6 -PRIM_INT = 7 -PRIM_UINT = 8 -PRIM_LONG = 9 -PRIM_ULONG = 10 -PRIM_LONGLONG = 11 -PRIM_ULONGLONG = 12 -PRIM_FLOAT = 13 -PRIM_DOUBLE = 14 -PRIM_LONGDOUBLE = 15 - -PRIM_WCHAR = 16 -PRIM_INT8 = 17 -PRIM_UINT8 = 18 -PRIM_INT16 = 19 -PRIM_UINT16 = 20 -PRIM_INT32 = 21 -PRIM_UINT32 = 22 -PRIM_INT64 = 23 -PRIM_UINT64 = 24 -PRIM_INTPTR = 25 -PRIM_UINTPTR = 26 -PRIM_PTRDIFF = 27 -PRIM_SIZE = 28 -PRIM_SSIZE = 29 -PRIM_INT_LEAST8 = 30 -PRIM_UINT_LEAST8 = 31 -PRIM_INT_LEAST16 = 32 -PRIM_UINT_LEAST16 = 33 -PRIM_INT_LEAST32 = 34 -PRIM_UINT_LEAST32 = 35 -PRIM_INT_LEAST64 = 36 -PRIM_UINT_LEAST64 = 37 -PRIM_INT_FAST8 = 38 -PRIM_UINT_FAST8 = 39 -PRIM_INT_FAST16 = 40 -PRIM_UINT_FAST16 = 41 -PRIM_INT_FAST32 = 42 -PRIM_UINT_FAST32 = 43 -PRIM_INT_FAST64 = 44 -PRIM_UINT_FAST64 = 45 -PRIM_INTMAX = 46 -PRIM_UINTMAX = 47 -PRIM_FLOATCOMPLEX = 48 -PRIM_DOUBLECOMPLEX = 49 -PRIM_CHAR16 = 50 -PRIM_CHAR32 = 51 - -_NUM_PRIM = 52 -_UNKNOWN_PRIM = -1 -_UNKNOWN_FLOAT_PRIM = -2 -_UNKNOWN_LONG_DOUBLE = -3 - -_IO_FILE_STRUCT = -1 - -PRIMITIVE_TO_INDEX = { - 'char': PRIM_CHAR, - 'short': PRIM_SHORT, - 'int': PRIM_INT, - 'long': PRIM_LONG, - 'long long': PRIM_LONGLONG, - 'signed char': PRIM_SCHAR, - 'unsigned char': PRIM_UCHAR, - 'unsigned short': PRIM_USHORT, - 'unsigned int': PRIM_UINT, - 'unsigned long': PRIM_ULONG, - 'unsigned long long': PRIM_ULONGLONG, - 'float': PRIM_FLOAT, - 'double': PRIM_DOUBLE, - 'long double': PRIM_LONGDOUBLE, - '_cffi_float_complex_t': PRIM_FLOATCOMPLEX, - '_cffi_double_complex_t': PRIM_DOUBLECOMPLEX, - '_Bool': PRIM_BOOL, - 'wchar_t': PRIM_WCHAR, - 'char16_t': PRIM_CHAR16, - 'char32_t': PRIM_CHAR32, - 'int8_t': PRIM_INT8, - 'uint8_t': PRIM_UINT8, - 'int16_t': PRIM_INT16, - 'uint16_t': PRIM_UINT16, - 'int32_t': PRIM_INT32, - 'uint32_t': PRIM_UINT32, - 'int64_t': PRIM_INT64, - 'uint64_t': PRIM_UINT64, - 'intptr_t': PRIM_INTPTR, - 'uintptr_t': PRIM_UINTPTR, - 'ptrdiff_t': PRIM_PTRDIFF, - 'size_t': PRIM_SIZE, - 'ssize_t': PRIM_SSIZE, - 'int_least8_t': PRIM_INT_LEAST8, - 'uint_least8_t': PRIM_UINT_LEAST8, - 'int_least16_t': PRIM_INT_LEAST16, - 'uint_least16_t': PRIM_UINT_LEAST16, - 'int_least32_t': PRIM_INT_LEAST32, - 'uint_least32_t': PRIM_UINT_LEAST32, - 'int_least64_t': PRIM_INT_LEAST64, - 'uint_least64_t': PRIM_UINT_LEAST64, - 'int_fast8_t': PRIM_INT_FAST8, - 'uint_fast8_t': PRIM_UINT_FAST8, - 'int_fast16_t': PRIM_INT_FAST16, - 'uint_fast16_t': PRIM_UINT_FAST16, - 'int_fast32_t': PRIM_INT_FAST32, - 'uint_fast32_t': PRIM_UINT_FAST32, - 'int_fast64_t': PRIM_INT_FAST64, - 'uint_fast64_t': PRIM_UINT_FAST64, - 'intmax_t': PRIM_INTMAX, - 'uintmax_t': PRIM_UINTMAX, - } - -F_UNION = 0x01 -F_CHECK_FIELDS = 0x02 -F_PACKED = 0x04 -F_EXTERNAL = 0x08 -F_OPAQUE = 0x10 - -G_FLAGS = dict([('_CFFI_' + _key, globals()[_key]) - for _key in ['F_UNION', 'F_CHECK_FIELDS', 'F_PACKED', - 'F_EXTERNAL', 'F_OPAQUE']]) - -CLASS_NAME = {} -for _name, _value in list(globals().items()): - if _name.startswith('OP_') and isinstance(_value, int): - CLASS_NAME[_value] = _name[3:] diff --git a/env/lib/python3.10/site-packages/cffi/commontypes.py b/env/lib/python3.10/site-packages/cffi/commontypes.py deleted file mode 100644 index d4dae35..0000000 --- a/env/lib/python3.10/site-packages/cffi/commontypes.py +++ /dev/null @@ -1,82 +0,0 @@ -import sys -from . import model -from .error import FFIError - - -COMMON_TYPES = {} - -try: - # fetch "bool" and all simple Windows types - from _cffi_backend import _get_common_types - _get_common_types(COMMON_TYPES) -except ImportError: - pass - -COMMON_TYPES['FILE'] = model.unknown_type('FILE', '_IO_FILE') -COMMON_TYPES['bool'] = '_Bool' # in case we got ImportError above -COMMON_TYPES['float _Complex'] = '_cffi_float_complex_t' -COMMON_TYPES['double _Complex'] = '_cffi_double_complex_t' - -for _type in model.PrimitiveType.ALL_PRIMITIVE_TYPES: - if _type.endswith('_t'): - COMMON_TYPES[_type] = _type -del _type - -_CACHE = {} - -def resolve_common_type(parser, commontype): - try: - return _CACHE[commontype] - except KeyError: - cdecl = COMMON_TYPES.get(commontype, commontype) - if not isinstance(cdecl, str): - result, quals = cdecl, 0 # cdecl is already a BaseType - elif cdecl in model.PrimitiveType.ALL_PRIMITIVE_TYPES: - result, quals = model.PrimitiveType(cdecl), 0 - elif cdecl == 'set-unicode-needed': - raise FFIError("The Windows type %r is only available after " - "you call ffi.set_unicode()" % (commontype,)) - else: - if commontype == cdecl: - raise FFIError( - "Unsupported type: %r. Please look at " - "http://cffi.readthedocs.io/en/latest/cdef.html#ffi-cdef-limitations " - "and file an issue if you think this type should really " - "be supported." % (commontype,)) - result, quals = parser.parse_type_and_quals(cdecl) # recursive - - assert isinstance(result, model.BaseTypeByIdentity) - _CACHE[commontype] = result, quals - return result, quals - - -# ____________________________________________________________ -# extra types for Windows (most of them are in commontypes.c) - - -def win_common_types(): - return { - "UNICODE_STRING": model.StructType( - "_UNICODE_STRING", - ["Length", - "MaximumLength", - "Buffer"], - [model.PrimitiveType("unsigned short"), - model.PrimitiveType("unsigned short"), - model.PointerType(model.PrimitiveType("wchar_t"))], - [-1, -1, -1]), - "PUNICODE_STRING": "UNICODE_STRING *", - "PCUNICODE_STRING": "const UNICODE_STRING *", - - "TBYTE": "set-unicode-needed", - "TCHAR": "set-unicode-needed", - "LPCTSTR": "set-unicode-needed", - "PCTSTR": "set-unicode-needed", - "LPTSTR": "set-unicode-needed", - "PTSTR": "set-unicode-needed", - "PTBYTE": "set-unicode-needed", - "PTCHAR": "set-unicode-needed", - } - -if sys.platform == 'win32': - COMMON_TYPES.update(win_common_types()) diff --git a/env/lib/python3.10/site-packages/cffi/cparser.py b/env/lib/python3.10/site-packages/cffi/cparser.py deleted file mode 100644 index eee83ca..0000000 --- a/env/lib/python3.10/site-packages/cffi/cparser.py +++ /dev/null @@ -1,1015 +0,0 @@ -from . import model -from .commontypes import COMMON_TYPES, resolve_common_type -from .error import FFIError, CDefError -try: - from . import _pycparser as pycparser -except ImportError: - import pycparser -import weakref, re, sys - -try: - if sys.version_info < (3,): - import thread as _thread - else: - import _thread - lock = _thread.allocate_lock() -except ImportError: - lock = None - -def _workaround_for_static_import_finders(): - # Issue #392: packaging tools like cx_Freeze can not find these - # because pycparser uses exec dynamic import. This is an obscure - # workaround. This function is never called. - import pycparser.yacctab - import pycparser.lextab - -CDEF_SOURCE_STRING = "" -_r_comment = re.compile(r"/\*.*?\*/|//([^\n\\]|\\.)*?$", - re.DOTALL | re.MULTILINE) -_r_define = re.compile(r"^\s*#\s*define\s+([A-Za-z_][A-Za-z_0-9]*)" - r"\b((?:[^\n\\]|\\.)*?)$", - re.DOTALL | re.MULTILINE) -_r_line_directive = re.compile(r"^[ \t]*#[ \t]*(?:line|\d+)\b.*$", re.MULTILINE) -_r_partial_enum = re.compile(r"=\s*\.\.\.\s*[,}]|\.\.\.\s*\}") -_r_enum_dotdotdot = re.compile(r"__dotdotdot\d+__$") -_r_partial_array = re.compile(r"\[\s*\.\.\.\s*\]") -_r_words = re.compile(r"\w+|\S") -_parser_cache = None -_r_int_literal = re.compile(r"-?0?x?[0-9a-f]+[lu]*$", re.IGNORECASE) -_r_stdcall1 = re.compile(r"\b(__stdcall|WINAPI)\b") -_r_stdcall2 = re.compile(r"[(]\s*(__stdcall|WINAPI)\b") -_r_cdecl = re.compile(r"\b__cdecl\b") -_r_extern_python = re.compile(r'\bextern\s*"' - r'(Python|Python\s*\+\s*C|C\s*\+\s*Python)"\s*.') -_r_star_const_space = re.compile( # matches "* const " - r"[*]\s*((const|volatile|restrict)\b\s*)+") -_r_int_dotdotdot = re.compile(r"(\b(int|long|short|signed|unsigned|char)\s*)+" - r"\.\.\.") -_r_float_dotdotdot = re.compile(r"\b(double|float)\s*\.\.\.") - -def _get_parser(): - global _parser_cache - if _parser_cache is None: - _parser_cache = pycparser.CParser() - return _parser_cache - -def _workaround_for_old_pycparser(csource): - # Workaround for a pycparser issue (fixed between pycparser 2.10 and - # 2.14): "char*const***" gives us a wrong syntax tree, the same as - # for "char***(*const)". This means we can't tell the difference - # afterwards. But "char(*const(***))" gives us the right syntax - # tree. The issue only occurs if there are several stars in - # sequence with no parenthesis inbetween, just possibly qualifiers. - # Attempt to fix it by adding some parentheses in the source: each - # time we see "* const" or "* const *", we add an opening - # parenthesis before each star---the hard part is figuring out where - # to close them. - parts = [] - while True: - match = _r_star_const_space.search(csource) - if not match: - break - #print repr(''.join(parts)+csource), '=>', - parts.append(csource[:match.start()]) - parts.append('('); closing = ')' - parts.append(match.group()) # e.g. "* const " - endpos = match.end() - if csource.startswith('*', endpos): - parts.append('('); closing += ')' - level = 0 - i = endpos - while i < len(csource): - c = csource[i] - if c == '(': - level += 1 - elif c == ')': - if level == 0: - break - level -= 1 - elif c in ',;=': - if level == 0: - break - i += 1 - csource = csource[endpos:i] + closing + csource[i:] - #print repr(''.join(parts)+csource) - parts.append(csource) - return ''.join(parts) - -def _preprocess_extern_python(csource): - # input: `extern "Python" int foo(int);` or - # `extern "Python" { int foo(int); }` - # output: - # void __cffi_extern_python_start; - # int foo(int); - # void __cffi_extern_python_stop; - # - # input: `extern "Python+C" int foo(int);` - # output: - # void __cffi_extern_python_plus_c_start; - # int foo(int); - # void __cffi_extern_python_stop; - parts = [] - while True: - match = _r_extern_python.search(csource) - if not match: - break - endpos = match.end() - 1 - #print - #print ''.join(parts)+csource - #print '=>' - parts.append(csource[:match.start()]) - if 'C' in match.group(1): - parts.append('void __cffi_extern_python_plus_c_start; ') - else: - parts.append('void __cffi_extern_python_start; ') - if csource[endpos] == '{': - # grouping variant - closing = csource.find('}', endpos) - if closing < 0: - raise CDefError("'extern \"Python\" {': no '}' found") - if csource.find('{', endpos + 1, closing) >= 0: - raise NotImplementedError("cannot use { } inside a block " - "'extern \"Python\" { ... }'") - parts.append(csource[endpos+1:closing]) - csource = csource[closing+1:] - else: - # non-grouping variant - semicolon = csource.find(';', endpos) - if semicolon < 0: - raise CDefError("'extern \"Python\": no ';' found") - parts.append(csource[endpos:semicolon+1]) - csource = csource[semicolon+1:] - parts.append(' void __cffi_extern_python_stop;') - #print ''.join(parts)+csource - #print - parts.append(csource) - return ''.join(parts) - -def _warn_for_string_literal(csource): - if '"' not in csource: - return - for line in csource.splitlines(): - if '"' in line and not line.lstrip().startswith('#'): - import warnings - warnings.warn("String literal found in cdef() or type source. " - "String literals are ignored here, but you should " - "remove them anyway because some character sequences " - "confuse pre-parsing.") - break - -def _warn_for_non_extern_non_static_global_variable(decl): - if not decl.storage: - import warnings - warnings.warn("Global variable '%s' in cdef(): for consistency " - "with C it should have a storage class specifier " - "(usually 'extern')" % (decl.name,)) - -def _remove_line_directives(csource): - # _r_line_directive matches whole lines, without the final \n, if they - # start with '#line' with some spacing allowed, or '#NUMBER'. This - # function stores them away and replaces them with exactly the string - # '#line@N', where N is the index in the list 'line_directives'. - line_directives = [] - def replace(m): - i = len(line_directives) - line_directives.append(m.group()) - return '#line@%d' % i - csource = _r_line_directive.sub(replace, csource) - return csource, line_directives - -def _put_back_line_directives(csource, line_directives): - def replace(m): - s = m.group() - if not s.startswith('#line@'): - raise AssertionError("unexpected #line directive " - "(should have been processed and removed") - return line_directives[int(s[6:])] - return _r_line_directive.sub(replace, csource) - -def _preprocess(csource): - # First, remove the lines of the form '#line N "filename"' because - # the "filename" part could confuse the rest - csource, line_directives = _remove_line_directives(csource) - # Remove comments. NOTE: this only work because the cdef() section - # should not contain any string literals (except in line directives)! - def replace_keeping_newlines(m): - return ' ' + m.group().count('\n') * '\n' - csource = _r_comment.sub(replace_keeping_newlines, csource) - # Remove the "#define FOO x" lines - macros = {} - for match in _r_define.finditer(csource): - macroname, macrovalue = match.groups() - macrovalue = macrovalue.replace('\\\n', '').strip() - macros[macroname] = macrovalue - csource = _r_define.sub('', csource) - # - if pycparser.__version__ < '2.14': - csource = _workaround_for_old_pycparser(csource) - # - # BIG HACK: replace WINAPI or __stdcall with "volatile const". - # It doesn't make sense for the return type of a function to be - # "volatile volatile const", so we abuse it to detect __stdcall... - # Hack number 2 is that "int(volatile *fptr)();" is not valid C - # syntax, so we place the "volatile" before the opening parenthesis. - csource = _r_stdcall2.sub(' volatile volatile const(', csource) - csource = _r_stdcall1.sub(' volatile volatile const ', csource) - csource = _r_cdecl.sub(' ', csource) - # - # Replace `extern "Python"` with start/end markers - csource = _preprocess_extern_python(csource) - # - # Now there should not be any string literal left; warn if we get one - _warn_for_string_literal(csource) - # - # Replace "[...]" with "[__dotdotdotarray__]" - csource = _r_partial_array.sub('[__dotdotdotarray__]', csource) - # - # Replace "...}" with "__dotdotdotNUM__}". This construction should - # occur only at the end of enums; at the end of structs we have "...;}" - # and at the end of vararg functions "...);". Also replace "=...[,}]" - # with ",__dotdotdotNUM__[,}]": this occurs in the enums too, when - # giving an unknown value. - matches = list(_r_partial_enum.finditer(csource)) - for number, match in enumerate(reversed(matches)): - p = match.start() - if csource[p] == '=': - p2 = csource.find('...', p, match.end()) - assert p2 > p - csource = '%s,__dotdotdot%d__ %s' % (csource[:p], number, - csource[p2+3:]) - else: - assert csource[p:p+3] == '...' - csource = '%s __dotdotdot%d__ %s' % (csource[:p], number, - csource[p+3:]) - # Replace "int ..." or "unsigned long int..." with "__dotdotdotint__" - csource = _r_int_dotdotdot.sub(' __dotdotdotint__ ', csource) - # Replace "float ..." or "double..." with "__dotdotdotfloat__" - csource = _r_float_dotdotdot.sub(' __dotdotdotfloat__ ', csource) - # Replace all remaining "..." with the same name, "__dotdotdot__", - # which is declared with a typedef for the purpose of C parsing. - csource = csource.replace('...', ' __dotdotdot__ ') - # Finally, put back the line directives - csource = _put_back_line_directives(csource, line_directives) - return csource, macros - -def _common_type_names(csource): - # Look in the source for what looks like usages of types from the - # list of common types. A "usage" is approximated here as the - # appearance of the word, minus a "definition" of the type, which - # is the last word in a "typedef" statement. Approximative only - # but should be fine for all the common types. - look_for_words = set(COMMON_TYPES) - look_for_words.add(';') - look_for_words.add(',') - look_for_words.add('(') - look_for_words.add(')') - look_for_words.add('typedef') - words_used = set() - is_typedef = False - paren = 0 - previous_word = '' - for word in _r_words.findall(csource): - if word in look_for_words: - if word == ';': - if is_typedef: - words_used.discard(previous_word) - look_for_words.discard(previous_word) - is_typedef = False - elif word == 'typedef': - is_typedef = True - paren = 0 - elif word == '(': - paren += 1 - elif word == ')': - paren -= 1 - elif word == ',': - if is_typedef and paren == 0: - words_used.discard(previous_word) - look_for_words.discard(previous_word) - else: # word in COMMON_TYPES - words_used.add(word) - previous_word = word - return words_used - - -class Parser(object): - - def __init__(self): - self._declarations = {} - self._included_declarations = set() - self._anonymous_counter = 0 - self._structnode2type = weakref.WeakKeyDictionary() - self._options = {} - self._int_constants = {} - self._recomplete = [] - self._uses_new_feature = None - - def _parse(self, csource): - csource, macros = _preprocess(csource) - # XXX: for more efficiency we would need to poke into the - # internals of CParser... the following registers the - # typedefs, because their presence or absence influences the - # parsing itself (but what they are typedef'ed to plays no role) - ctn = _common_type_names(csource) - typenames = [] - for name in sorted(self._declarations): - if name.startswith('typedef '): - name = name[8:] - typenames.append(name) - ctn.discard(name) - typenames += sorted(ctn) - # - csourcelines = [] - csourcelines.append('# 1 ""') - for typename in typenames: - csourcelines.append('typedef int %s;' % typename) - csourcelines.append('typedef int __dotdotdotint__, __dotdotdotfloat__,' - ' __dotdotdot__;') - # this forces pycparser to consider the following in the file - # called from line 1 - csourcelines.append('# 1 "%s"' % (CDEF_SOURCE_STRING,)) - csourcelines.append(csource) - csourcelines.append('') # see test_missing_newline_bug - fullcsource = '\n'.join(csourcelines) - if lock is not None: - lock.acquire() # pycparser is not thread-safe... - try: - ast = _get_parser().parse(fullcsource) - except pycparser.c_parser.ParseError as e: - self.convert_pycparser_error(e, csource) - finally: - if lock is not None: - lock.release() - # csource will be used to find buggy source text - return ast, macros, csource - - def _convert_pycparser_error(self, e, csource): - # xxx look for ":NUM:" at the start of str(e) - # and interpret that as a line number. This will not work if - # the user gives explicit ``# NUM "FILE"`` directives. - line = None - msg = str(e) - match = re.match(r"%s:(\d+):" % (CDEF_SOURCE_STRING,), msg) - if match: - linenum = int(match.group(1), 10) - csourcelines = csource.splitlines() - if 1 <= linenum <= len(csourcelines): - line = csourcelines[linenum-1] - return line - - def convert_pycparser_error(self, e, csource): - line = self._convert_pycparser_error(e, csource) - - msg = str(e) - if line: - msg = 'cannot parse "%s"\n%s' % (line.strip(), msg) - else: - msg = 'parse error\n%s' % (msg,) - raise CDefError(msg) - - def parse(self, csource, override=False, packed=False, pack=None, - dllexport=False): - if packed: - if packed != True: - raise ValueError("'packed' should be False or True; use " - "'pack' to give another value") - if pack: - raise ValueError("cannot give both 'pack' and 'packed'") - pack = 1 - elif pack: - if pack & (pack - 1): - raise ValueError("'pack' must be a power of two, not %r" % - (pack,)) - else: - pack = 0 - prev_options = self._options - try: - self._options = {'override': override, - 'packed': pack, - 'dllexport': dllexport} - self._internal_parse(csource) - finally: - self._options = prev_options - - def _internal_parse(self, csource): - ast, macros, csource = self._parse(csource) - # add the macros - self._process_macros(macros) - # find the first "__dotdotdot__" and use that as a separator - # between the repeated typedefs and the real csource - iterator = iter(ast.ext) - for decl in iterator: - if decl.name == '__dotdotdot__': - break - else: - assert 0 - current_decl = None - # - try: - self._inside_extern_python = '__cffi_extern_python_stop' - for decl in iterator: - current_decl = decl - if isinstance(decl, pycparser.c_ast.Decl): - self._parse_decl(decl) - elif isinstance(decl, pycparser.c_ast.Typedef): - if not decl.name: - raise CDefError("typedef does not declare any name", - decl) - quals = 0 - if (isinstance(decl.type.type, pycparser.c_ast.IdentifierType) and - decl.type.type.names[-1].startswith('__dotdotdot')): - realtype = self._get_unknown_type(decl) - elif (isinstance(decl.type, pycparser.c_ast.PtrDecl) and - isinstance(decl.type.type, pycparser.c_ast.TypeDecl) and - isinstance(decl.type.type.type, - pycparser.c_ast.IdentifierType) and - decl.type.type.type.names[-1].startswith('__dotdotdot')): - realtype = self._get_unknown_ptr_type(decl) - else: - realtype, quals = self._get_type_and_quals( - decl.type, name=decl.name, partial_length_ok=True, - typedef_example="*(%s *)0" % (decl.name,)) - self._declare('typedef ' + decl.name, realtype, quals=quals) - elif decl.__class__.__name__ == 'Pragma': - # skip pragma, only in pycparser 2.15 - import warnings - warnings.warn( - "#pragma in cdef() are entirely ignored. " - "They should be removed for now, otherwise your " - "code might behave differently in a future version " - "of CFFI if #pragma support gets added. Note that " - "'#pragma pack' needs to be replaced with the " - "'packed' keyword argument to cdef().") - else: - raise CDefError("unexpected <%s>: this construct is valid " - "C but not valid in cdef()" % - decl.__class__.__name__, decl) - except CDefError as e: - if len(e.args) == 1: - e.args = e.args + (current_decl,) - raise - except FFIError as e: - msg = self._convert_pycparser_error(e, csource) - if msg: - e.args = (e.args[0] + "\n *** Err: %s" % msg,) - raise - - def _add_constants(self, key, val): - if key in self._int_constants: - if self._int_constants[key] == val: - return # ignore identical double declarations - raise FFIError( - "multiple declarations of constant: %s" % (key,)) - self._int_constants[key] = val - - def _add_integer_constant(self, name, int_str): - int_str = int_str.lower().rstrip("ul") - neg = int_str.startswith('-') - if neg: - int_str = int_str[1:] - # "010" is not valid oct in py3 - if (int_str.startswith("0") and int_str != '0' - and not int_str.startswith("0x")): - int_str = "0o" + int_str[1:] - pyvalue = int(int_str, 0) - if neg: - pyvalue = -pyvalue - self._add_constants(name, pyvalue) - self._declare('macro ' + name, pyvalue) - - def _process_macros(self, macros): - for key, value in macros.items(): - value = value.strip() - if _r_int_literal.match(value): - self._add_integer_constant(key, value) - elif value == '...': - self._declare('macro ' + key, value) - else: - raise CDefError( - 'only supports one of the following syntax:\n' - ' #define %s ... (literally dot-dot-dot)\n' - ' #define %s NUMBER (with NUMBER an integer' - ' constant, decimal/hex/octal)\n' - 'got:\n' - ' #define %s %s' - % (key, key, key, value)) - - def _declare_function(self, tp, quals, decl): - tp = self._get_type_pointer(tp, quals) - if self._options.get('dllexport'): - tag = 'dllexport_python ' - elif self._inside_extern_python == '__cffi_extern_python_start': - tag = 'extern_python ' - elif self._inside_extern_python == '__cffi_extern_python_plus_c_start': - tag = 'extern_python_plus_c ' - else: - tag = 'function ' - self._declare(tag + decl.name, tp) - - def _parse_decl(self, decl): - node = decl.type - if isinstance(node, pycparser.c_ast.FuncDecl): - tp, quals = self._get_type_and_quals(node, name=decl.name) - assert isinstance(tp, model.RawFunctionType) - self._declare_function(tp, quals, decl) - else: - if isinstance(node, pycparser.c_ast.Struct): - self._get_struct_union_enum_type('struct', node) - elif isinstance(node, pycparser.c_ast.Union): - self._get_struct_union_enum_type('union', node) - elif isinstance(node, pycparser.c_ast.Enum): - self._get_struct_union_enum_type('enum', node) - elif not decl.name: - raise CDefError("construct does not declare any variable", - decl) - # - if decl.name: - tp, quals = self._get_type_and_quals(node, - partial_length_ok=True) - if tp.is_raw_function: - self._declare_function(tp, quals, decl) - elif (tp.is_integer_type() and - hasattr(decl, 'init') and - hasattr(decl.init, 'value') and - _r_int_literal.match(decl.init.value)): - self._add_integer_constant(decl.name, decl.init.value) - elif (tp.is_integer_type() and - isinstance(decl.init, pycparser.c_ast.UnaryOp) and - decl.init.op == '-' and - hasattr(decl.init.expr, 'value') and - _r_int_literal.match(decl.init.expr.value)): - self._add_integer_constant(decl.name, - '-' + decl.init.expr.value) - elif (tp is model.void_type and - decl.name.startswith('__cffi_extern_python_')): - # hack: `extern "Python"` in the C source is replaced - # with "void __cffi_extern_python_start;" and - # "void __cffi_extern_python_stop;" - self._inside_extern_python = decl.name - else: - if self._inside_extern_python !='__cffi_extern_python_stop': - raise CDefError( - "cannot declare constants or " - "variables with 'extern \"Python\"'") - if (quals & model.Q_CONST) and not tp.is_array_type: - self._declare('constant ' + decl.name, tp, quals=quals) - else: - _warn_for_non_extern_non_static_global_variable(decl) - self._declare('variable ' + decl.name, tp, quals=quals) - - def parse_type(self, cdecl): - return self.parse_type_and_quals(cdecl)[0] - - def parse_type_and_quals(self, cdecl): - ast, macros = self._parse('void __dummy(\n%s\n);' % cdecl)[:2] - assert not macros - exprnode = ast.ext[-1].type.args.params[0] - if isinstance(exprnode, pycparser.c_ast.ID): - raise CDefError("unknown identifier '%s'" % (exprnode.name,)) - return self._get_type_and_quals(exprnode.type) - - def _declare(self, name, obj, included=False, quals=0): - if name in self._declarations: - prevobj, prevquals = self._declarations[name] - if prevobj is obj and prevquals == quals: - return - if not self._options.get('override'): - raise FFIError( - "multiple declarations of %s (for interactive usage, " - "try cdef(xx, override=True))" % (name,)) - assert '__dotdotdot__' not in name.split() - self._declarations[name] = (obj, quals) - if included: - self._included_declarations.add(obj) - - def _extract_quals(self, type): - quals = 0 - if isinstance(type, (pycparser.c_ast.TypeDecl, - pycparser.c_ast.PtrDecl)): - if 'const' in type.quals: - quals |= model.Q_CONST - if 'volatile' in type.quals: - quals |= model.Q_VOLATILE - if 'restrict' in type.quals: - quals |= model.Q_RESTRICT - return quals - - def _get_type_pointer(self, type, quals, declname=None): - if isinstance(type, model.RawFunctionType): - return type.as_function_pointer() - if (isinstance(type, model.StructOrUnionOrEnum) and - type.name.startswith('$') and type.name[1:].isdigit() and - type.forcename is None and declname is not None): - return model.NamedPointerType(type, declname, quals) - return model.PointerType(type, quals) - - def _get_type_and_quals(self, typenode, name=None, partial_length_ok=False, - typedef_example=None): - # first, dereference typedefs, if we have it already parsed, we're good - if (isinstance(typenode, pycparser.c_ast.TypeDecl) and - isinstance(typenode.type, pycparser.c_ast.IdentifierType) and - len(typenode.type.names) == 1 and - ('typedef ' + typenode.type.names[0]) in self._declarations): - tp, quals = self._declarations['typedef ' + typenode.type.names[0]] - quals |= self._extract_quals(typenode) - return tp, quals - # - if isinstance(typenode, pycparser.c_ast.ArrayDecl): - # array type - if typenode.dim is None: - length = None - else: - length = self._parse_constant( - typenode.dim, partial_length_ok=partial_length_ok) - # a hack: in 'typedef int foo_t[...][...];', don't use '...' as - # the length but use directly the C expression that would be - # generated by recompiler.py. This lets the typedef be used in - # many more places within recompiler.py - if typedef_example is not None: - if length == '...': - length = '_cffi_array_len(%s)' % (typedef_example,) - typedef_example = "*" + typedef_example - # - tp, quals = self._get_type_and_quals(typenode.type, - partial_length_ok=partial_length_ok, - typedef_example=typedef_example) - return model.ArrayType(tp, length), quals - # - if isinstance(typenode, pycparser.c_ast.PtrDecl): - # pointer type - itemtype, itemquals = self._get_type_and_quals(typenode.type) - tp = self._get_type_pointer(itemtype, itemquals, declname=name) - quals = self._extract_quals(typenode) - return tp, quals - # - if isinstance(typenode, pycparser.c_ast.TypeDecl): - quals = self._extract_quals(typenode) - type = typenode.type - if isinstance(type, pycparser.c_ast.IdentifierType): - # assume a primitive type. get it from .names, but reduce - # synonyms to a single chosen combination - names = list(type.names) - if names != ['signed', 'char']: # keep this unmodified - prefixes = {} - while names: - name = names[0] - if name in ('short', 'long', 'signed', 'unsigned'): - prefixes[name] = prefixes.get(name, 0) + 1 - del names[0] - else: - break - # ignore the 'signed' prefix below, and reorder the others - newnames = [] - for prefix in ('unsigned', 'short', 'long'): - for i in range(prefixes.get(prefix, 0)): - newnames.append(prefix) - if not names: - names = ['int'] # implicitly - if names == ['int']: # but kill it if 'short' or 'long' - if 'short' in prefixes or 'long' in prefixes: - names = [] - names = newnames + names - ident = ' '.join(names) - if ident == 'void': - return model.void_type, quals - if ident == '__dotdotdot__': - raise FFIError(':%d: bad usage of "..."' % - typenode.coord.line) - tp0, quals0 = resolve_common_type(self, ident) - return tp0, (quals | quals0) - # - if isinstance(type, pycparser.c_ast.Struct): - # 'struct foobar' - tp = self._get_struct_union_enum_type('struct', type, name) - return tp, quals - # - if isinstance(type, pycparser.c_ast.Union): - # 'union foobar' - tp = self._get_struct_union_enum_type('union', type, name) - return tp, quals - # - if isinstance(type, pycparser.c_ast.Enum): - # 'enum foobar' - tp = self._get_struct_union_enum_type('enum', type, name) - return tp, quals - # - if isinstance(typenode, pycparser.c_ast.FuncDecl): - # a function type - return self._parse_function_type(typenode, name), 0 - # - # nested anonymous structs or unions end up here - if isinstance(typenode, pycparser.c_ast.Struct): - return self._get_struct_union_enum_type('struct', typenode, name, - nested=True), 0 - if isinstance(typenode, pycparser.c_ast.Union): - return self._get_struct_union_enum_type('union', typenode, name, - nested=True), 0 - # - raise FFIError(":%d: bad or unsupported type declaration" % - typenode.coord.line) - - def _parse_function_type(self, typenode, funcname=None): - params = list(getattr(typenode.args, 'params', [])) - for i, arg in enumerate(params): - if not hasattr(arg, 'type'): - raise CDefError("%s arg %d: unknown type '%s'" - " (if you meant to use the old C syntax of giving" - " untyped arguments, it is not supported)" - % (funcname or 'in expression', i + 1, - getattr(arg, 'name', '?'))) - ellipsis = ( - len(params) > 0 and - isinstance(params[-1].type, pycparser.c_ast.TypeDecl) and - isinstance(params[-1].type.type, - pycparser.c_ast.IdentifierType) and - params[-1].type.type.names == ['__dotdotdot__']) - if ellipsis: - params.pop() - if not params: - raise CDefError( - "%s: a function with only '(...)' as argument" - " is not correct C" % (funcname or 'in expression')) - args = [self._as_func_arg(*self._get_type_and_quals(argdeclnode.type)) - for argdeclnode in params] - if not ellipsis and args == [model.void_type]: - args = [] - result, quals = self._get_type_and_quals(typenode.type) - # the 'quals' on the result type are ignored. HACK: we absure them - # to detect __stdcall functions: we textually replace "__stdcall" - # with "volatile volatile const" above. - abi = None - if hasattr(typenode.type, 'quals'): # else, probable syntax error anyway - if typenode.type.quals[-3:] == ['volatile', 'volatile', 'const']: - abi = '__stdcall' - return model.RawFunctionType(tuple(args), result, ellipsis, abi) - - def _as_func_arg(self, type, quals): - if isinstance(type, model.ArrayType): - return model.PointerType(type.item, quals) - elif isinstance(type, model.RawFunctionType): - return type.as_function_pointer() - else: - return type - - def _get_struct_union_enum_type(self, kind, type, name=None, nested=False): - # First, a level of caching on the exact 'type' node of the AST. - # This is obscure, but needed because pycparser "unrolls" declarations - # such as "typedef struct { } foo_t, *foo_p" and we end up with - # an AST that is not a tree, but a DAG, with the "type" node of the - # two branches foo_t and foo_p of the trees being the same node. - # It's a bit silly but detecting "DAG-ness" in the AST tree seems - # to be the only way to distinguish this case from two independent - # structs. See test_struct_with_two_usages. - try: - return self._structnode2type[type] - except KeyError: - pass - # - # Note that this must handle parsing "struct foo" any number of - # times and always return the same StructType object. Additionally, - # one of these times (not necessarily the first), the fields of - # the struct can be specified with "struct foo { ...fields... }". - # If no name is given, then we have to create a new anonymous struct - # with no caching; in this case, the fields are either specified - # right now or never. - # - force_name = name - name = type.name - # - # get the type or create it if needed - if name is None: - # 'force_name' is used to guess a more readable name for - # anonymous structs, for the common case "typedef struct { } foo". - if force_name is not None: - explicit_name = '$%s' % force_name - else: - self._anonymous_counter += 1 - explicit_name = '$%d' % self._anonymous_counter - tp = None - else: - explicit_name = name - key = '%s %s' % (kind, name) - tp, _ = self._declarations.get(key, (None, None)) - # - if tp is None: - if kind == 'struct': - tp = model.StructType(explicit_name, None, None, None) - elif kind == 'union': - tp = model.UnionType(explicit_name, None, None, None) - elif kind == 'enum': - if explicit_name == '__dotdotdot__': - raise CDefError("Enums cannot be declared with ...") - tp = self._build_enum_type(explicit_name, type.values) - else: - raise AssertionError("kind = %r" % (kind,)) - if name is not None: - self._declare(key, tp) - else: - if kind == 'enum' and type.values is not None: - raise NotImplementedError( - "enum %s: the '{}' declaration should appear on the first " - "time the enum is mentioned, not later" % explicit_name) - if not tp.forcename: - tp.force_the_name(force_name) - if tp.forcename and '$' in tp.name: - self._declare('anonymous %s' % tp.forcename, tp) - # - self._structnode2type[type] = tp - # - # enums: done here - if kind == 'enum': - return tp - # - # is there a 'type.decls'? If yes, then this is the place in the - # C sources that declare the fields. If no, then just return the - # existing type, possibly still incomplete. - if type.decls is None: - return tp - # - if tp.fldnames is not None: - raise CDefError("duplicate declaration of struct %s" % name) - fldnames = [] - fldtypes = [] - fldbitsize = [] - fldquals = [] - for decl in type.decls: - if (isinstance(decl.type, pycparser.c_ast.IdentifierType) and - ''.join(decl.type.names) == '__dotdotdot__'): - # XXX pycparser is inconsistent: 'names' should be a list - # of strings, but is sometimes just one string. Use - # str.join() as a way to cope with both. - self._make_partial(tp, nested) - continue - if decl.bitsize is None: - bitsize = -1 - else: - bitsize = self._parse_constant(decl.bitsize) - self._partial_length = False - type, fqual = self._get_type_and_quals(decl.type, - partial_length_ok=True) - if self._partial_length: - self._make_partial(tp, nested) - if isinstance(type, model.StructType) and type.partial: - self._make_partial(tp, nested) - fldnames.append(decl.name or '') - fldtypes.append(type) - fldbitsize.append(bitsize) - fldquals.append(fqual) - tp.fldnames = tuple(fldnames) - tp.fldtypes = tuple(fldtypes) - tp.fldbitsize = tuple(fldbitsize) - tp.fldquals = tuple(fldquals) - if fldbitsize != [-1] * len(fldbitsize): - if isinstance(tp, model.StructType) and tp.partial: - raise NotImplementedError("%s: using both bitfields and '...;'" - % (tp,)) - tp.packed = self._options.get('packed') - if tp.completed: # must be re-completed: it is not opaque any more - tp.completed = 0 - self._recomplete.append(tp) - return tp - - def _make_partial(self, tp, nested): - if not isinstance(tp, model.StructOrUnion): - raise CDefError("%s cannot be partial" % (tp,)) - if not tp.has_c_name() and not nested: - raise NotImplementedError("%s is partial but has no C name" %(tp,)) - tp.partial = True - - def _parse_constant(self, exprnode, partial_length_ok=False): - # for now, limited to expressions that are an immediate number - # or positive/negative number - if isinstance(exprnode, pycparser.c_ast.Constant): - s = exprnode.value - if '0' <= s[0] <= '9': - s = s.rstrip('uUlL') - try: - if s.startswith('0'): - return int(s, 8) - else: - return int(s, 10) - except ValueError: - if len(s) > 1: - if s.lower()[0:2] == '0x': - return int(s, 16) - elif s.lower()[0:2] == '0b': - return int(s, 2) - raise CDefError("invalid constant %r" % (s,)) - elif s[0] == "'" and s[-1] == "'" and ( - len(s) == 3 or (len(s) == 4 and s[1] == "\\")): - return ord(s[-2]) - else: - raise CDefError("invalid constant %r" % (s,)) - # - if (isinstance(exprnode, pycparser.c_ast.UnaryOp) and - exprnode.op == '+'): - return self._parse_constant(exprnode.expr) - # - if (isinstance(exprnode, pycparser.c_ast.UnaryOp) and - exprnode.op == '-'): - return -self._parse_constant(exprnode.expr) - # load previously defined int constant - if (isinstance(exprnode, pycparser.c_ast.ID) and - exprnode.name in self._int_constants): - return self._int_constants[exprnode.name] - # - if (isinstance(exprnode, pycparser.c_ast.ID) and - exprnode.name == '__dotdotdotarray__'): - if partial_length_ok: - self._partial_length = True - return '...' - raise FFIError(":%d: unsupported '[...]' here, cannot derive " - "the actual array length in this context" - % exprnode.coord.line) - # - if isinstance(exprnode, pycparser.c_ast.BinaryOp): - left = self._parse_constant(exprnode.left) - right = self._parse_constant(exprnode.right) - if exprnode.op == '+': - return left + right - elif exprnode.op == '-': - return left - right - elif exprnode.op == '*': - return left * right - elif exprnode.op == '/': - return self._c_div(left, right) - elif exprnode.op == '%': - return left - self._c_div(left, right) * right - elif exprnode.op == '<<': - return left << right - elif exprnode.op == '>>': - return left >> right - elif exprnode.op == '&': - return left & right - elif exprnode.op == '|': - return left | right - elif exprnode.op == '^': - return left ^ right - # - raise FFIError(":%d: unsupported expression: expected a " - "simple numeric constant" % exprnode.coord.line) - - def _c_div(self, a, b): - result = a // b - if ((a < 0) ^ (b < 0)) and (a % b) != 0: - result += 1 - return result - - def _build_enum_type(self, explicit_name, decls): - if decls is not None: - partial = False - enumerators = [] - enumvalues = [] - nextenumvalue = 0 - for enum in decls.enumerators: - if _r_enum_dotdotdot.match(enum.name): - partial = True - continue - if enum.value is not None: - nextenumvalue = self._parse_constant(enum.value) - enumerators.append(enum.name) - enumvalues.append(nextenumvalue) - self._add_constants(enum.name, nextenumvalue) - nextenumvalue += 1 - enumerators = tuple(enumerators) - enumvalues = tuple(enumvalues) - tp = model.EnumType(explicit_name, enumerators, enumvalues) - tp.partial = partial - else: # opaque enum - tp = model.EnumType(explicit_name, (), ()) - return tp - - def include(self, other): - for name, (tp, quals) in other._declarations.items(): - if name.startswith('anonymous $enum_$'): - continue # fix for test_anonymous_enum_include - kind = name.split(' ', 1)[0] - if kind in ('struct', 'union', 'enum', 'anonymous', 'typedef'): - self._declare(name, tp, included=True, quals=quals) - for k, v in other._int_constants.items(): - self._add_constants(k, v) - - def _get_unknown_type(self, decl): - typenames = decl.type.type.names - if typenames == ['__dotdotdot__']: - return model.unknown_type(decl.name) - - if typenames == ['__dotdotdotint__']: - if self._uses_new_feature is None: - self._uses_new_feature = "'typedef int... %s'" % decl.name - return model.UnknownIntegerType(decl.name) - - if typenames == ['__dotdotdotfloat__']: - # note: not for 'long double' so far - if self._uses_new_feature is None: - self._uses_new_feature = "'typedef float... %s'" % decl.name - return model.UnknownFloatType(decl.name) - - raise FFIError(':%d: unsupported usage of "..." in typedef' - % decl.coord.line) - - def _get_unknown_ptr_type(self, decl): - if decl.type.type.type.names == ['__dotdotdot__']: - return model.unknown_ptr_type(decl.name) - raise FFIError(':%d: unsupported usage of "..." in typedef' - % decl.coord.line) diff --git a/env/lib/python3.10/site-packages/cffi/error.py b/env/lib/python3.10/site-packages/cffi/error.py deleted file mode 100644 index 0a27247..0000000 --- a/env/lib/python3.10/site-packages/cffi/error.py +++ /dev/null @@ -1,31 +0,0 @@ - -class FFIError(Exception): - __module__ = 'cffi' - -class CDefError(Exception): - __module__ = 'cffi' - def __str__(self): - try: - current_decl = self.args[1] - filename = current_decl.coord.file - linenum = current_decl.coord.line - prefix = '%s:%d: ' % (filename, linenum) - except (AttributeError, TypeError, IndexError): - prefix = '' - return '%s%s' % (prefix, self.args[0]) - -class VerificationError(Exception): - """ An error raised when verification fails - """ - __module__ = 'cffi' - -class VerificationMissing(Exception): - """ An error raised when incomplete structures are passed into - cdef, but no verification has been done - """ - __module__ = 'cffi' - -class PkgConfigError(Exception): - """ An error raised for missing modules in pkg-config - """ - __module__ = 'cffi' diff --git a/env/lib/python3.10/site-packages/cffi/ffiplatform.py b/env/lib/python3.10/site-packages/cffi/ffiplatform.py deleted file mode 100644 index adca28f..0000000 --- a/env/lib/python3.10/site-packages/cffi/ffiplatform.py +++ /dev/null @@ -1,113 +0,0 @@ -import sys, os -from .error import VerificationError - - -LIST_OF_FILE_NAMES = ['sources', 'include_dirs', 'library_dirs', - 'extra_objects', 'depends'] - -def get_extension(srcfilename, modname, sources=(), **kwds): - from cffi._shimmed_dist_utils import Extension - allsources = [srcfilename] - for src in sources: - allsources.append(os.path.normpath(src)) - return Extension(name=modname, sources=allsources, **kwds) - -def compile(tmpdir, ext, compiler_verbose=0, debug=None): - """Compile a C extension module using distutils.""" - - saved_environ = os.environ.copy() - try: - outputfilename = _build(tmpdir, ext, compiler_verbose, debug) - outputfilename = os.path.abspath(outputfilename) - finally: - # workaround for a distutils bugs where some env vars can - # become longer and longer every time it is used - for key, value in saved_environ.items(): - if os.environ.get(key) != value: - os.environ[key] = value - return outputfilename - -def _build(tmpdir, ext, compiler_verbose=0, debug=None): - # XXX compact but horrible :-( - from cffi._shimmed_dist_utils import Distribution, CompileError, LinkError, set_threshold, set_verbosity - - dist = Distribution({'ext_modules': [ext]}) - dist.parse_config_files() - options = dist.get_option_dict('build_ext') - if debug is None: - debug = sys.flags.debug - options['debug'] = ('ffiplatform', debug) - options['force'] = ('ffiplatform', True) - options['build_lib'] = ('ffiplatform', tmpdir) - options['build_temp'] = ('ffiplatform', tmpdir) - # - try: - old_level = set_threshold(0) or 0 - try: - set_verbosity(compiler_verbose) - dist.run_command('build_ext') - cmd_obj = dist.get_command_obj('build_ext') - [soname] = cmd_obj.get_outputs() - finally: - set_threshold(old_level) - except (CompileError, LinkError) as e: - raise VerificationError('%s: %s' % (e.__class__.__name__, e)) - # - return soname - -try: - from os.path import samefile -except ImportError: - def samefile(f1, f2): - return os.path.abspath(f1) == os.path.abspath(f2) - -def maybe_relative_path(path): - if not os.path.isabs(path): - return path # already relative - dir = path - names = [] - while True: - prevdir = dir - dir, name = os.path.split(prevdir) - if dir == prevdir or not dir: - return path # failed to make it relative - names.append(name) - try: - if samefile(dir, os.curdir): - names.reverse() - return os.path.join(*names) - except OSError: - pass - -# ____________________________________________________________ - -try: - int_or_long = (int, long) - import cStringIO -except NameError: - int_or_long = int # Python 3 - import io as cStringIO - -def _flatten(x, f): - if isinstance(x, str): - f.write('%ds%s' % (len(x), x)) - elif isinstance(x, dict): - keys = sorted(x.keys()) - f.write('%dd' % len(keys)) - for key in keys: - _flatten(key, f) - _flatten(x[key], f) - elif isinstance(x, (list, tuple)): - f.write('%dl' % len(x)) - for value in x: - _flatten(value, f) - elif isinstance(x, int_or_long): - f.write('%di' % (x,)) - else: - raise TypeError( - "the keywords to verify() contains unsupported object %r" % (x,)) - -def flatten(x): - f = cStringIO.StringIO() - _flatten(x, f) - return f.getvalue() diff --git a/env/lib/python3.10/site-packages/cffi/lock.py b/env/lib/python3.10/site-packages/cffi/lock.py deleted file mode 100644 index db91b71..0000000 --- a/env/lib/python3.10/site-packages/cffi/lock.py +++ /dev/null @@ -1,30 +0,0 @@ -import sys - -if sys.version_info < (3,): - try: - from thread import allocate_lock - except ImportError: - from dummy_thread import allocate_lock -else: - try: - from _thread import allocate_lock - except ImportError: - from _dummy_thread import allocate_lock - - -##import sys -##l1 = allocate_lock - -##class allocate_lock(object): -## def __init__(self): -## self._real = l1() -## def __enter__(self): -## for i in range(4, 0, -1): -## print sys._getframe(i).f_code -## print -## return self._real.__enter__() -## def __exit__(self, *args): -## return self._real.__exit__(*args) -## def acquire(self, f): -## assert f is False -## return self._real.acquire(f) diff --git a/env/lib/python3.10/site-packages/cffi/model.py b/env/lib/python3.10/site-packages/cffi/model.py deleted file mode 100644 index e5f4cae..0000000 --- a/env/lib/python3.10/site-packages/cffi/model.py +++ /dev/null @@ -1,618 +0,0 @@ -import types -import weakref - -from .lock import allocate_lock -from .error import CDefError, VerificationError, VerificationMissing - -# type qualifiers -Q_CONST = 0x01 -Q_RESTRICT = 0x02 -Q_VOLATILE = 0x04 - -def qualify(quals, replace_with): - if quals & Q_CONST: - replace_with = ' const ' + replace_with.lstrip() - if quals & Q_VOLATILE: - replace_with = ' volatile ' + replace_with.lstrip() - if quals & Q_RESTRICT: - # It seems that __restrict is supported by gcc and msvc. - # If you hit some different compiler, add a #define in - # _cffi_include.h for it (and in its copies, documented there) - replace_with = ' __restrict ' + replace_with.lstrip() - return replace_with - - -class BaseTypeByIdentity(object): - is_array_type = False - is_raw_function = False - - def get_c_name(self, replace_with='', context='a C file', quals=0): - result = self.c_name_with_marker - assert result.count('&') == 1 - # some logic duplication with ffi.getctype()... :-( - replace_with = replace_with.strip() - if replace_with: - if replace_with.startswith('*') and '&[' in result: - replace_with = '(%s)' % replace_with - elif not replace_with[0] in '[(': - replace_with = ' ' + replace_with - replace_with = qualify(quals, replace_with) - result = result.replace('&', replace_with) - if '$' in result: - raise VerificationError( - "cannot generate '%s' in %s: unknown type name" - % (self._get_c_name(), context)) - return result - - def _get_c_name(self): - return self.c_name_with_marker.replace('&', '') - - def has_c_name(self): - return '$' not in self._get_c_name() - - def is_integer_type(self): - return False - - def get_cached_btype(self, ffi, finishlist, can_delay=False): - try: - BType = ffi._cached_btypes[self] - except KeyError: - BType = self.build_backend_type(ffi, finishlist) - BType2 = ffi._cached_btypes.setdefault(self, BType) - assert BType2 is BType - return BType - - def __repr__(self): - return '<%s>' % (self._get_c_name(),) - - def _get_items(self): - return [(name, getattr(self, name)) for name in self._attrs_] - - -class BaseType(BaseTypeByIdentity): - - def __eq__(self, other): - return (self.__class__ == other.__class__ and - self._get_items() == other._get_items()) - - def __ne__(self, other): - return not self == other - - def __hash__(self): - return hash((self.__class__, tuple(self._get_items()))) - - -class VoidType(BaseType): - _attrs_ = () - - def __init__(self): - self.c_name_with_marker = 'void&' - - def build_backend_type(self, ffi, finishlist): - return global_cache(self, ffi, 'new_void_type') - -void_type = VoidType() - - -class BasePrimitiveType(BaseType): - def is_complex_type(self): - return False - - -class PrimitiveType(BasePrimitiveType): - _attrs_ = ('name',) - - ALL_PRIMITIVE_TYPES = { - 'char': 'c', - 'short': 'i', - 'int': 'i', - 'long': 'i', - 'long long': 'i', - 'signed char': 'i', - 'unsigned char': 'i', - 'unsigned short': 'i', - 'unsigned int': 'i', - 'unsigned long': 'i', - 'unsigned long long': 'i', - 'float': 'f', - 'double': 'f', - 'long double': 'f', - '_cffi_float_complex_t': 'j', - '_cffi_double_complex_t': 'j', - '_Bool': 'i', - # the following types are not primitive in the C sense - 'wchar_t': 'c', - 'char16_t': 'c', - 'char32_t': 'c', - 'int8_t': 'i', - 'uint8_t': 'i', - 'int16_t': 'i', - 'uint16_t': 'i', - 'int32_t': 'i', - 'uint32_t': 'i', - 'int64_t': 'i', - 'uint64_t': 'i', - 'int_least8_t': 'i', - 'uint_least8_t': 'i', - 'int_least16_t': 'i', - 'uint_least16_t': 'i', - 'int_least32_t': 'i', - 'uint_least32_t': 'i', - 'int_least64_t': 'i', - 'uint_least64_t': 'i', - 'int_fast8_t': 'i', - 'uint_fast8_t': 'i', - 'int_fast16_t': 'i', - 'uint_fast16_t': 'i', - 'int_fast32_t': 'i', - 'uint_fast32_t': 'i', - 'int_fast64_t': 'i', - 'uint_fast64_t': 'i', - 'intptr_t': 'i', - 'uintptr_t': 'i', - 'intmax_t': 'i', - 'uintmax_t': 'i', - 'ptrdiff_t': 'i', - 'size_t': 'i', - 'ssize_t': 'i', - } - - def __init__(self, name): - assert name in self.ALL_PRIMITIVE_TYPES - self.name = name - self.c_name_with_marker = name + '&' - - def is_char_type(self): - return self.ALL_PRIMITIVE_TYPES[self.name] == 'c' - def is_integer_type(self): - return self.ALL_PRIMITIVE_TYPES[self.name] == 'i' - def is_float_type(self): - return self.ALL_PRIMITIVE_TYPES[self.name] == 'f' - def is_complex_type(self): - return self.ALL_PRIMITIVE_TYPES[self.name] == 'j' - - def build_backend_type(self, ffi, finishlist): - return global_cache(self, ffi, 'new_primitive_type', self.name) - - -class UnknownIntegerType(BasePrimitiveType): - _attrs_ = ('name',) - - def __init__(self, name): - self.name = name - self.c_name_with_marker = name + '&' - - def is_integer_type(self): - return True - - def build_backend_type(self, ffi, finishlist): - raise NotImplementedError("integer type '%s' can only be used after " - "compilation" % self.name) - -class UnknownFloatType(BasePrimitiveType): - _attrs_ = ('name', ) - - def __init__(self, name): - self.name = name - self.c_name_with_marker = name + '&' - - def build_backend_type(self, ffi, finishlist): - raise NotImplementedError("float type '%s' can only be used after " - "compilation" % self.name) - - -class BaseFunctionType(BaseType): - _attrs_ = ('args', 'result', 'ellipsis', 'abi') - - def __init__(self, args, result, ellipsis, abi=None): - self.args = args - self.result = result - self.ellipsis = ellipsis - self.abi = abi - # - reprargs = [arg._get_c_name() for arg in self.args] - if self.ellipsis: - reprargs.append('...') - reprargs = reprargs or ['void'] - replace_with = self._base_pattern % (', '.join(reprargs),) - if abi is not None: - replace_with = replace_with[:1] + abi + ' ' + replace_with[1:] - self.c_name_with_marker = ( - self.result.c_name_with_marker.replace('&', replace_with)) - - -class RawFunctionType(BaseFunctionType): - # Corresponds to a C type like 'int(int)', which is the C type of - # a function, but not a pointer-to-function. The backend has no - # notion of such a type; it's used temporarily by parsing. - _base_pattern = '(&)(%s)' - is_raw_function = True - - def build_backend_type(self, ffi, finishlist): - raise CDefError("cannot render the type %r: it is a function " - "type, not a pointer-to-function type" % (self,)) - - def as_function_pointer(self): - return FunctionPtrType(self.args, self.result, self.ellipsis, self.abi) - - -class FunctionPtrType(BaseFunctionType): - _base_pattern = '(*&)(%s)' - - def build_backend_type(self, ffi, finishlist): - result = self.result.get_cached_btype(ffi, finishlist) - args = [] - for tp in self.args: - args.append(tp.get_cached_btype(ffi, finishlist)) - abi_args = () - if self.abi == "__stdcall": - if not self.ellipsis: # __stdcall ignored for variadic funcs - try: - abi_args = (ffi._backend.FFI_STDCALL,) - except AttributeError: - pass - return global_cache(self, ffi, 'new_function_type', - tuple(args), result, self.ellipsis, *abi_args) - - def as_raw_function(self): - return RawFunctionType(self.args, self.result, self.ellipsis, self.abi) - - -class PointerType(BaseType): - _attrs_ = ('totype', 'quals') - - def __init__(self, totype, quals=0): - self.totype = totype - self.quals = quals - extra = " *&" - if totype.is_array_type: - extra = "(%s)" % (extra.lstrip(),) - extra = qualify(quals, extra) - self.c_name_with_marker = totype.c_name_with_marker.replace('&', extra) - - def build_backend_type(self, ffi, finishlist): - BItem = self.totype.get_cached_btype(ffi, finishlist, can_delay=True) - return global_cache(self, ffi, 'new_pointer_type', BItem) - -voidp_type = PointerType(void_type) - -def ConstPointerType(totype): - return PointerType(totype, Q_CONST) - -const_voidp_type = ConstPointerType(void_type) - - -class NamedPointerType(PointerType): - _attrs_ = ('totype', 'name') - - def __init__(self, totype, name, quals=0): - PointerType.__init__(self, totype, quals) - self.name = name - self.c_name_with_marker = name + '&' - - -class ArrayType(BaseType): - _attrs_ = ('item', 'length') - is_array_type = True - - def __init__(self, item, length): - self.item = item - self.length = length - # - if length is None: - brackets = '&[]' - elif length == '...': - brackets = '&[/*...*/]' - else: - brackets = '&[%s]' % length - self.c_name_with_marker = ( - self.item.c_name_with_marker.replace('&', brackets)) - - def length_is_unknown(self): - return isinstance(self.length, str) - - def resolve_length(self, newlength): - return ArrayType(self.item, newlength) - - def build_backend_type(self, ffi, finishlist): - if self.length_is_unknown(): - raise CDefError("cannot render the type %r: unknown length" % - (self,)) - self.item.get_cached_btype(ffi, finishlist) # force the item BType - BPtrItem = PointerType(self.item).get_cached_btype(ffi, finishlist) - return global_cache(self, ffi, 'new_array_type', BPtrItem, self.length) - -char_array_type = ArrayType(PrimitiveType('char'), None) - - -class StructOrUnionOrEnum(BaseTypeByIdentity): - _attrs_ = ('name',) - forcename = None - - def build_c_name_with_marker(self): - name = self.forcename or '%s %s' % (self.kind, self.name) - self.c_name_with_marker = name + '&' - - def force_the_name(self, forcename): - self.forcename = forcename - self.build_c_name_with_marker() - - def get_official_name(self): - assert self.c_name_with_marker.endswith('&') - return self.c_name_with_marker[:-1] - - -class StructOrUnion(StructOrUnionOrEnum): - fixedlayout = None - completed = 0 - partial = False - packed = 0 - - def __init__(self, name, fldnames, fldtypes, fldbitsize, fldquals=None): - self.name = name - self.fldnames = fldnames - self.fldtypes = fldtypes - self.fldbitsize = fldbitsize - self.fldquals = fldquals - self.build_c_name_with_marker() - - def anonymous_struct_fields(self): - if self.fldtypes is not None: - for name, type in zip(self.fldnames, self.fldtypes): - if name == '' and isinstance(type, StructOrUnion): - yield type - - def enumfields(self, expand_anonymous_struct_union=True): - fldquals = self.fldquals - if fldquals is None: - fldquals = (0,) * len(self.fldnames) - for name, type, bitsize, quals in zip(self.fldnames, self.fldtypes, - self.fldbitsize, fldquals): - if (name == '' and isinstance(type, StructOrUnion) - and expand_anonymous_struct_union): - # nested anonymous struct/union - for result in type.enumfields(): - yield result - else: - yield (name, type, bitsize, quals) - - def force_flatten(self): - # force the struct or union to have a declaration that lists - # directly all fields returned by enumfields(), flattening - # nested anonymous structs/unions. - names = [] - types = [] - bitsizes = [] - fldquals = [] - for name, type, bitsize, quals in self.enumfields(): - names.append(name) - types.append(type) - bitsizes.append(bitsize) - fldquals.append(quals) - self.fldnames = tuple(names) - self.fldtypes = tuple(types) - self.fldbitsize = tuple(bitsizes) - self.fldquals = tuple(fldquals) - - def get_cached_btype(self, ffi, finishlist, can_delay=False): - BType = StructOrUnionOrEnum.get_cached_btype(self, ffi, finishlist, - can_delay) - if not can_delay: - self.finish_backend_type(ffi, finishlist) - return BType - - def finish_backend_type(self, ffi, finishlist): - if self.completed: - if self.completed != 2: - raise NotImplementedError("recursive structure declaration " - "for '%s'" % (self.name,)) - return - BType = ffi._cached_btypes[self] - # - self.completed = 1 - # - if self.fldtypes is None: - pass # not completing it: it's an opaque struct - # - elif self.fixedlayout is None: - fldtypes = [tp.get_cached_btype(ffi, finishlist) - for tp in self.fldtypes] - lst = list(zip(self.fldnames, fldtypes, self.fldbitsize)) - extra_flags = () - if self.packed: - if self.packed == 1: - extra_flags = (8,) # SF_PACKED - else: - extra_flags = (0, self.packed) - ffi._backend.complete_struct_or_union(BType, lst, self, - -1, -1, *extra_flags) - # - else: - fldtypes = [] - fieldofs, fieldsize, totalsize, totalalignment = self.fixedlayout - for i in range(len(self.fldnames)): - fsize = fieldsize[i] - ftype = self.fldtypes[i] - # - if isinstance(ftype, ArrayType) and ftype.length_is_unknown(): - # fix the length to match the total size - BItemType = ftype.item.get_cached_btype(ffi, finishlist) - nlen, nrest = divmod(fsize, ffi.sizeof(BItemType)) - if nrest != 0: - self._verification_error( - "field '%s.%s' has a bogus size?" % ( - self.name, self.fldnames[i] or '{}')) - ftype = ftype.resolve_length(nlen) - self.fldtypes = (self.fldtypes[:i] + (ftype,) + - self.fldtypes[i+1:]) - # - BFieldType = ftype.get_cached_btype(ffi, finishlist) - if isinstance(ftype, ArrayType) and ftype.length is None: - assert fsize == 0 - else: - bitemsize = ffi.sizeof(BFieldType) - if bitemsize != fsize: - self._verification_error( - "field '%s.%s' is declared as %d bytes, but is " - "really %d bytes" % (self.name, - self.fldnames[i] or '{}', - bitemsize, fsize)) - fldtypes.append(BFieldType) - # - lst = list(zip(self.fldnames, fldtypes, self.fldbitsize, fieldofs)) - ffi._backend.complete_struct_or_union(BType, lst, self, - totalsize, totalalignment) - self.completed = 2 - - def _verification_error(self, msg): - raise VerificationError(msg) - - def check_not_partial(self): - if self.partial and self.fixedlayout is None: - raise VerificationMissing(self._get_c_name()) - - def build_backend_type(self, ffi, finishlist): - self.check_not_partial() - finishlist.append(self) - # - return global_cache(self, ffi, 'new_%s_type' % self.kind, - self.get_official_name(), key=self) - - -class StructType(StructOrUnion): - kind = 'struct' - - -class UnionType(StructOrUnion): - kind = 'union' - - -class EnumType(StructOrUnionOrEnum): - kind = 'enum' - partial = False - partial_resolved = False - - def __init__(self, name, enumerators, enumvalues, baseinttype=None): - self.name = name - self.enumerators = enumerators - self.enumvalues = enumvalues - self.baseinttype = baseinttype - self.build_c_name_with_marker() - - def force_the_name(self, forcename): - StructOrUnionOrEnum.force_the_name(self, forcename) - if self.forcename is None: - name = self.get_official_name() - self.forcename = '$' + name.replace(' ', '_') - - def check_not_partial(self): - if self.partial and not self.partial_resolved: - raise VerificationMissing(self._get_c_name()) - - def build_backend_type(self, ffi, finishlist): - self.check_not_partial() - base_btype = self.build_baseinttype(ffi, finishlist) - return global_cache(self, ffi, 'new_enum_type', - self.get_official_name(), - self.enumerators, self.enumvalues, - base_btype, key=self) - - def build_baseinttype(self, ffi, finishlist): - if self.baseinttype is not None: - return self.baseinttype.get_cached_btype(ffi, finishlist) - # - if self.enumvalues: - smallest_value = min(self.enumvalues) - largest_value = max(self.enumvalues) - else: - import warnings - try: - # XXX! The goal is to ensure that the warnings.warn() - # will not suppress the warning. We want to get it - # several times if we reach this point several times. - __warningregistry__.clear() - except NameError: - pass - warnings.warn("%r has no values explicitly defined; " - "guessing that it is equivalent to 'unsigned int'" - % self._get_c_name()) - smallest_value = largest_value = 0 - if smallest_value < 0: # needs a signed type - sign = 1 - candidate1 = PrimitiveType("int") - candidate2 = PrimitiveType("long") - else: - sign = 0 - candidate1 = PrimitiveType("unsigned int") - candidate2 = PrimitiveType("unsigned long") - btype1 = candidate1.get_cached_btype(ffi, finishlist) - btype2 = candidate2.get_cached_btype(ffi, finishlist) - size1 = ffi.sizeof(btype1) - size2 = ffi.sizeof(btype2) - if (smallest_value >= ((-1) << (8*size1-1)) and - largest_value < (1 << (8*size1-sign))): - return btype1 - if (smallest_value >= ((-1) << (8*size2-1)) and - largest_value < (1 << (8*size2-sign))): - return btype2 - raise CDefError("%s values don't all fit into either 'long' " - "or 'unsigned long'" % self._get_c_name()) - -def unknown_type(name, structname=None): - if structname is None: - structname = '$%s' % name - tp = StructType(structname, None, None, None) - tp.force_the_name(name) - tp.origin = "unknown_type" - return tp - -def unknown_ptr_type(name, structname=None): - if structname is None: - structname = '$$%s' % name - tp = StructType(structname, None, None, None) - return NamedPointerType(tp, name) - - -global_lock = allocate_lock() -_typecache_cffi_backend = weakref.WeakValueDictionary() - -def get_typecache(backend): - # returns _typecache_cffi_backend if backend is the _cffi_backend - # module, or type(backend).__typecache if backend is an instance of - # CTypesBackend (or some FakeBackend class during tests) - if isinstance(backend, types.ModuleType): - return _typecache_cffi_backend - with global_lock: - if not hasattr(type(backend), '__typecache'): - type(backend).__typecache = weakref.WeakValueDictionary() - return type(backend).__typecache - -def global_cache(srctype, ffi, funcname, *args, **kwds): - key = kwds.pop('key', (funcname, args)) - assert not kwds - try: - return ffi._typecache[key] - except KeyError: - pass - try: - res = getattr(ffi._backend, funcname)(*args) - except NotImplementedError as e: - raise NotImplementedError("%s: %r: %s" % (funcname, srctype, e)) - # note that setdefault() on WeakValueDictionary is not atomic - # and contains a rare bug (http://bugs.python.org/issue19542); - # we have to use a lock and do it ourselves - cache = ffi._typecache - with global_lock: - res1 = cache.get(key) - if res1 is None: - cache[key] = res - return res - else: - return res1 - -def pointer_cache(ffi, BType): - return global_cache('?', ffi, 'new_pointer_type', BType) - -def attach_exception_info(e, name): - if e.args and type(e.args[0]) is str: - e.args = ('%s: %s' % (name, e.args[0]),) + e.args[1:] diff --git a/env/lib/python3.10/site-packages/cffi/parse_c_type.h b/env/lib/python3.10/site-packages/cffi/parse_c_type.h deleted file mode 100644 index 84e4ef8..0000000 --- a/env/lib/python3.10/site-packages/cffi/parse_c_type.h +++ /dev/null @@ -1,181 +0,0 @@ - -/* This part is from file 'cffi/parse_c_type.h'. It is copied at the - beginning of C sources generated by CFFI's ffi.set_source(). */ - -typedef void *_cffi_opcode_t; - -#define _CFFI_OP(opcode, arg) (_cffi_opcode_t)(opcode | (((uintptr_t)(arg)) << 8)) -#define _CFFI_GETOP(cffi_opcode) ((unsigned char)(uintptr_t)cffi_opcode) -#define _CFFI_GETARG(cffi_opcode) (((intptr_t)cffi_opcode) >> 8) - -#define _CFFI_OP_PRIMITIVE 1 -#define _CFFI_OP_POINTER 3 -#define _CFFI_OP_ARRAY 5 -#define _CFFI_OP_OPEN_ARRAY 7 -#define _CFFI_OP_STRUCT_UNION 9 -#define _CFFI_OP_ENUM 11 -#define _CFFI_OP_FUNCTION 13 -#define _CFFI_OP_FUNCTION_END 15 -#define _CFFI_OP_NOOP 17 -#define _CFFI_OP_BITFIELD 19 -#define _CFFI_OP_TYPENAME 21 -#define _CFFI_OP_CPYTHON_BLTN_V 23 // varargs -#define _CFFI_OP_CPYTHON_BLTN_N 25 // noargs -#define _CFFI_OP_CPYTHON_BLTN_O 27 // O (i.e. a single arg) -#define _CFFI_OP_CONSTANT 29 -#define _CFFI_OP_CONSTANT_INT 31 -#define _CFFI_OP_GLOBAL_VAR 33 -#define _CFFI_OP_DLOPEN_FUNC 35 -#define _CFFI_OP_DLOPEN_CONST 37 -#define _CFFI_OP_GLOBAL_VAR_F 39 -#define _CFFI_OP_EXTERN_PYTHON 41 - -#define _CFFI_PRIM_VOID 0 -#define _CFFI_PRIM_BOOL 1 -#define _CFFI_PRIM_CHAR 2 -#define _CFFI_PRIM_SCHAR 3 -#define _CFFI_PRIM_UCHAR 4 -#define _CFFI_PRIM_SHORT 5 -#define _CFFI_PRIM_USHORT 6 -#define _CFFI_PRIM_INT 7 -#define _CFFI_PRIM_UINT 8 -#define _CFFI_PRIM_LONG 9 -#define _CFFI_PRIM_ULONG 10 -#define _CFFI_PRIM_LONGLONG 11 -#define _CFFI_PRIM_ULONGLONG 12 -#define _CFFI_PRIM_FLOAT 13 -#define _CFFI_PRIM_DOUBLE 14 -#define _CFFI_PRIM_LONGDOUBLE 15 - -#define _CFFI_PRIM_WCHAR 16 -#define _CFFI_PRIM_INT8 17 -#define _CFFI_PRIM_UINT8 18 -#define _CFFI_PRIM_INT16 19 -#define _CFFI_PRIM_UINT16 20 -#define _CFFI_PRIM_INT32 21 -#define _CFFI_PRIM_UINT32 22 -#define _CFFI_PRIM_INT64 23 -#define _CFFI_PRIM_UINT64 24 -#define _CFFI_PRIM_INTPTR 25 -#define _CFFI_PRIM_UINTPTR 26 -#define _CFFI_PRIM_PTRDIFF 27 -#define _CFFI_PRIM_SIZE 28 -#define _CFFI_PRIM_SSIZE 29 -#define _CFFI_PRIM_INT_LEAST8 30 -#define _CFFI_PRIM_UINT_LEAST8 31 -#define _CFFI_PRIM_INT_LEAST16 32 -#define _CFFI_PRIM_UINT_LEAST16 33 -#define _CFFI_PRIM_INT_LEAST32 34 -#define _CFFI_PRIM_UINT_LEAST32 35 -#define _CFFI_PRIM_INT_LEAST64 36 -#define _CFFI_PRIM_UINT_LEAST64 37 -#define _CFFI_PRIM_INT_FAST8 38 -#define _CFFI_PRIM_UINT_FAST8 39 -#define _CFFI_PRIM_INT_FAST16 40 -#define _CFFI_PRIM_UINT_FAST16 41 -#define _CFFI_PRIM_INT_FAST32 42 -#define _CFFI_PRIM_UINT_FAST32 43 -#define _CFFI_PRIM_INT_FAST64 44 -#define _CFFI_PRIM_UINT_FAST64 45 -#define _CFFI_PRIM_INTMAX 46 -#define _CFFI_PRIM_UINTMAX 47 -#define _CFFI_PRIM_FLOATCOMPLEX 48 -#define _CFFI_PRIM_DOUBLECOMPLEX 49 -#define _CFFI_PRIM_CHAR16 50 -#define _CFFI_PRIM_CHAR32 51 - -#define _CFFI__NUM_PRIM 52 -#define _CFFI__UNKNOWN_PRIM (-1) -#define _CFFI__UNKNOWN_FLOAT_PRIM (-2) -#define _CFFI__UNKNOWN_LONG_DOUBLE (-3) - -#define _CFFI__IO_FILE_STRUCT (-1) - - -struct _cffi_global_s { - const char *name; - void *address; - _cffi_opcode_t type_op; - void *size_or_direct_fn; // OP_GLOBAL_VAR: size, or 0 if unknown - // OP_CPYTHON_BLTN_*: addr of direct function -}; - -struct _cffi_getconst_s { - unsigned long long value; - const struct _cffi_type_context_s *ctx; - int gindex; -}; - -struct _cffi_struct_union_s { - const char *name; - int type_index; // -> _cffi_types, on a OP_STRUCT_UNION - int flags; // _CFFI_F_* flags below - size_t size; - int alignment; - int first_field_index; // -> _cffi_fields array - int num_fields; -}; -#define _CFFI_F_UNION 0x01 // is a union, not a struct -#define _CFFI_F_CHECK_FIELDS 0x02 // complain if fields are not in the - // "standard layout" or if some are missing -#define _CFFI_F_PACKED 0x04 // for CHECK_FIELDS, assume a packed struct -#define _CFFI_F_EXTERNAL 0x08 // in some other ffi.include() -#define _CFFI_F_OPAQUE 0x10 // opaque - -struct _cffi_field_s { - const char *name; - size_t field_offset; - size_t field_size; - _cffi_opcode_t field_type_op; -}; - -struct _cffi_enum_s { - const char *name; - int type_index; // -> _cffi_types, on a OP_ENUM - int type_prim; // _CFFI_PRIM_xxx - const char *enumerators; // comma-delimited string -}; - -struct _cffi_typename_s { - const char *name; - int type_index; /* if opaque, points to a possibly artificial - OP_STRUCT which is itself opaque */ -}; - -struct _cffi_type_context_s { - _cffi_opcode_t *types; - const struct _cffi_global_s *globals; - const struct _cffi_field_s *fields; - const struct _cffi_struct_union_s *struct_unions; - const struct _cffi_enum_s *enums; - const struct _cffi_typename_s *typenames; - int num_globals; - int num_struct_unions; - int num_enums; - int num_typenames; - const char *const *includes; - int num_types; - int flags; /* future extension */ -}; - -struct _cffi_parse_info_s { - const struct _cffi_type_context_s *ctx; - _cffi_opcode_t *output; - unsigned int output_size; - size_t error_location; - const char *error_message; -}; - -struct _cffi_externpy_s { - const char *name; - size_t size_of_result; - void *reserved1, *reserved2; -}; - -#ifdef _CFFI_INTERNAL -static int parse_c_type(struct _cffi_parse_info_s *info, const char *input); -static int search_in_globals(const struct _cffi_type_context_s *ctx, - const char *search, size_t search_len); -static int search_in_struct_unions(const struct _cffi_type_context_s *ctx, - const char *search, size_t search_len); -#endif diff --git a/env/lib/python3.10/site-packages/cffi/pkgconfig.py b/env/lib/python3.10/site-packages/cffi/pkgconfig.py deleted file mode 100644 index 5c93f15..0000000 --- a/env/lib/python3.10/site-packages/cffi/pkgconfig.py +++ /dev/null @@ -1,121 +0,0 @@ -# pkg-config, https://www.freedesktop.org/wiki/Software/pkg-config/ integration for cffi -import sys, os, subprocess - -from .error import PkgConfigError - - -def merge_flags(cfg1, cfg2): - """Merge values from cffi config flags cfg2 to cf1 - - Example: - merge_flags({"libraries": ["one"]}, {"libraries": ["two"]}) - {"libraries": ["one", "two"]} - """ - for key, value in cfg2.items(): - if key not in cfg1: - cfg1[key] = value - else: - if not isinstance(cfg1[key], list): - raise TypeError("cfg1[%r] should be a list of strings" % (key,)) - if not isinstance(value, list): - raise TypeError("cfg2[%r] should be a list of strings" % (key,)) - cfg1[key].extend(value) - return cfg1 - - -def call(libname, flag, encoding=sys.getfilesystemencoding()): - """Calls pkg-config and returns the output if found - """ - a = ["pkg-config", "--print-errors"] - a.append(flag) - a.append(libname) - try: - pc = subprocess.Popen(a, stdout=subprocess.PIPE, stderr=subprocess.PIPE) - except EnvironmentError as e: - raise PkgConfigError("cannot run pkg-config: %s" % (str(e).strip(),)) - - bout, berr = pc.communicate() - if pc.returncode != 0: - try: - berr = berr.decode(encoding) - except Exception: - pass - raise PkgConfigError(berr.strip()) - - if sys.version_info >= (3,) and not isinstance(bout, str): # Python 3.x - try: - bout = bout.decode(encoding) - except UnicodeDecodeError: - raise PkgConfigError("pkg-config %s %s returned bytes that cannot " - "be decoded with encoding %r:\n%r" % - (flag, libname, encoding, bout)) - - if os.altsep != '\\' and '\\' in bout: - raise PkgConfigError("pkg-config %s %s returned an unsupported " - "backslash-escaped output:\n%r" % - (flag, libname, bout)) - return bout - - -def flags_from_pkgconfig(libs): - r"""Return compiler line flags for FFI.set_source based on pkg-config output - - Usage - ... - ffibuilder.set_source("_foo", pkgconfig = ["libfoo", "libbar >= 1.8.3"]) - - If pkg-config is installed on build machine, then arguments include_dirs, - library_dirs, libraries, define_macros, extra_compile_args and - extra_link_args are extended with an output of pkg-config for libfoo and - libbar. - - Raises PkgConfigError in case the pkg-config call fails. - """ - - def get_include_dirs(string): - return [x[2:] for x in string.split() if x.startswith("-I")] - - def get_library_dirs(string): - return [x[2:] for x in string.split() if x.startswith("-L")] - - def get_libraries(string): - return [x[2:] for x in string.split() if x.startswith("-l")] - - # convert -Dfoo=bar to list of tuples [("foo", "bar")] expected by distutils - def get_macros(string): - def _macro(x): - x = x[2:] # drop "-D" - if '=' in x: - return tuple(x.split("=", 1)) # "-Dfoo=bar" => ("foo", "bar") - else: - return (x, None) # "-Dfoo" => ("foo", None) - return [_macro(x) for x in string.split() if x.startswith("-D")] - - def get_other_cflags(string): - return [x for x in string.split() if not x.startswith("-I") and - not x.startswith("-D")] - - def get_other_libs(string): - return [x for x in string.split() if not x.startswith("-L") and - not x.startswith("-l")] - - # return kwargs for given libname - def kwargs(libname): - fse = sys.getfilesystemencoding() - all_cflags = call(libname, "--cflags") - all_libs = call(libname, "--libs") - return { - "include_dirs": get_include_dirs(all_cflags), - "library_dirs": get_library_dirs(all_libs), - "libraries": get_libraries(all_libs), - "define_macros": get_macros(all_cflags), - "extra_compile_args": get_other_cflags(all_cflags), - "extra_link_args": get_other_libs(all_libs), - } - - # merge all arguments together - ret = {} - for libname in libs: - lib_flags = kwargs(libname) - merge_flags(ret, lib_flags) - return ret diff --git a/env/lib/python3.10/site-packages/cffi/recompiler.py b/env/lib/python3.10/site-packages/cffi/recompiler.py deleted file mode 100644 index 57781a3..0000000 --- a/env/lib/python3.10/site-packages/cffi/recompiler.py +++ /dev/null @@ -1,1598 +0,0 @@ -import os, sys, io -from . import ffiplatform, model -from .error import VerificationError -from .cffi_opcode import * - -VERSION_BASE = 0x2601 -VERSION_EMBEDDED = 0x2701 -VERSION_CHAR16CHAR32 = 0x2801 - -USE_LIMITED_API = (sys.platform != 'win32' or sys.version_info < (3, 0) or - sys.version_info >= (3, 5)) - - -class GlobalExpr: - def __init__(self, name, address, type_op, size=0, check_value=0): - self.name = name - self.address = address - self.type_op = type_op - self.size = size - self.check_value = check_value - - def as_c_expr(self): - return ' { "%s", (void *)%s, %s, (void *)%s },' % ( - self.name, self.address, self.type_op.as_c_expr(), self.size) - - def as_python_expr(self): - return "b'%s%s',%d" % (self.type_op.as_python_bytes(), self.name, - self.check_value) - -class FieldExpr: - def __init__(self, name, field_offset, field_size, fbitsize, field_type_op): - self.name = name - self.field_offset = field_offset - self.field_size = field_size - self.fbitsize = fbitsize - self.field_type_op = field_type_op - - def as_c_expr(self): - spaces = " " * len(self.name) - return (' { "%s", %s,\n' % (self.name, self.field_offset) + - ' %s %s,\n' % (spaces, self.field_size) + - ' %s %s },' % (spaces, self.field_type_op.as_c_expr())) - - def as_python_expr(self): - raise NotImplementedError - - def as_field_python_expr(self): - if self.field_type_op.op == OP_NOOP: - size_expr = '' - elif self.field_type_op.op == OP_BITFIELD: - size_expr = format_four_bytes(self.fbitsize) - else: - raise NotImplementedError - return "b'%s%s%s'" % (self.field_type_op.as_python_bytes(), - size_expr, - self.name) - -class StructUnionExpr: - def __init__(self, name, type_index, flags, size, alignment, comment, - first_field_index, c_fields): - self.name = name - self.type_index = type_index - self.flags = flags - self.size = size - self.alignment = alignment - self.comment = comment - self.first_field_index = first_field_index - self.c_fields = c_fields - - def as_c_expr(self): - return (' { "%s", %d, %s,' % (self.name, self.type_index, self.flags) - + '\n %s, %s, ' % (self.size, self.alignment) - + '%d, %d ' % (self.first_field_index, len(self.c_fields)) - + ('/* %s */ ' % self.comment if self.comment else '') - + '},') - - def as_python_expr(self): - flags = eval(self.flags, G_FLAGS) - fields_expr = [c_field.as_field_python_expr() - for c_field in self.c_fields] - return "(b'%s%s%s',%s)" % ( - format_four_bytes(self.type_index), - format_four_bytes(flags), - self.name, - ','.join(fields_expr)) - -class EnumExpr: - def __init__(self, name, type_index, size, signed, allenums): - self.name = name - self.type_index = type_index - self.size = size - self.signed = signed - self.allenums = allenums - - def as_c_expr(self): - return (' { "%s", %d, _cffi_prim_int(%s, %s),\n' - ' "%s" },' % (self.name, self.type_index, - self.size, self.signed, self.allenums)) - - def as_python_expr(self): - prim_index = { - (1, 0): PRIM_UINT8, (1, 1): PRIM_INT8, - (2, 0): PRIM_UINT16, (2, 1): PRIM_INT16, - (4, 0): PRIM_UINT32, (4, 1): PRIM_INT32, - (8, 0): PRIM_UINT64, (8, 1): PRIM_INT64, - }[self.size, self.signed] - return "b'%s%s%s\\x00%s'" % (format_four_bytes(self.type_index), - format_four_bytes(prim_index), - self.name, self.allenums) - -class TypenameExpr: - def __init__(self, name, type_index): - self.name = name - self.type_index = type_index - - def as_c_expr(self): - return ' { "%s", %d },' % (self.name, self.type_index) - - def as_python_expr(self): - return "b'%s%s'" % (format_four_bytes(self.type_index), self.name) - - -# ____________________________________________________________ - - -class Recompiler: - _num_externpy = 0 - - def __init__(self, ffi, module_name, target_is_python=False): - self.ffi = ffi - self.module_name = module_name - self.target_is_python = target_is_python - self._version = VERSION_BASE - - def needs_version(self, ver): - self._version = max(self._version, ver) - - def collect_type_table(self): - self._typesdict = {} - self._generate("collecttype") - # - all_decls = sorted(self._typesdict, key=str) - # - # prepare all FUNCTION bytecode sequences first - self.cffi_types = [] - for tp in all_decls: - if tp.is_raw_function: - assert self._typesdict[tp] is None - self._typesdict[tp] = len(self.cffi_types) - self.cffi_types.append(tp) # placeholder - for tp1 in tp.args: - assert isinstance(tp1, (model.VoidType, - model.BasePrimitiveType, - model.PointerType, - model.StructOrUnionOrEnum, - model.FunctionPtrType)) - if self._typesdict[tp1] is None: - self._typesdict[tp1] = len(self.cffi_types) - self.cffi_types.append(tp1) # placeholder - self.cffi_types.append('END') # placeholder - # - # prepare all OTHER bytecode sequences - for tp in all_decls: - if not tp.is_raw_function and self._typesdict[tp] is None: - self._typesdict[tp] = len(self.cffi_types) - self.cffi_types.append(tp) # placeholder - if tp.is_array_type and tp.length is not None: - self.cffi_types.append('LEN') # placeholder - assert None not in self._typesdict.values() - # - # collect all structs and unions and enums - self._struct_unions = {} - self._enums = {} - for tp in all_decls: - if isinstance(tp, model.StructOrUnion): - self._struct_unions[tp] = None - elif isinstance(tp, model.EnumType): - self._enums[tp] = None - for i, tp in enumerate(sorted(self._struct_unions, - key=lambda tp: tp.name)): - self._struct_unions[tp] = i - for i, tp in enumerate(sorted(self._enums, - key=lambda tp: tp.name)): - self._enums[tp] = i - # - # emit all bytecode sequences now - for tp in all_decls: - method = getattr(self, '_emit_bytecode_' + tp.__class__.__name__) - method(tp, self._typesdict[tp]) - # - # consistency check - for op in self.cffi_types: - assert isinstance(op, CffiOp) - self.cffi_types = tuple(self.cffi_types) # don't change any more - - def _enum_fields(self, tp): - # When producing C, expand all anonymous struct/union fields. - # That's necessary to have C code checking the offsets of the - # individual fields contained in them. When producing Python, - # don't do it and instead write it like it is, with the - # corresponding fields having an empty name. Empty names are - # recognized at runtime when we import the generated Python - # file. - expand_anonymous_struct_union = not self.target_is_python - return tp.enumfields(expand_anonymous_struct_union) - - def _do_collect_type(self, tp): - if not isinstance(tp, model.BaseTypeByIdentity): - if isinstance(tp, tuple): - for x in tp: - self._do_collect_type(x) - return - if tp not in self._typesdict: - self._typesdict[tp] = None - if isinstance(tp, model.FunctionPtrType): - self._do_collect_type(tp.as_raw_function()) - elif isinstance(tp, model.StructOrUnion): - if tp.fldtypes is not None and ( - tp not in self.ffi._parser._included_declarations): - for name1, tp1, _, _ in self._enum_fields(tp): - self._do_collect_type(self._field_type(tp, name1, tp1)) - else: - for _, x in tp._get_items(): - self._do_collect_type(x) - - def _generate(self, step_name): - lst = self.ffi._parser._declarations.items() - for name, (tp, quals) in sorted(lst): - kind, realname = name.split(' ', 1) - try: - method = getattr(self, '_generate_cpy_%s_%s' % (kind, - step_name)) - except AttributeError: - raise VerificationError( - "not implemented in recompile(): %r" % name) - try: - self._current_quals = quals - method(tp, realname) - except Exception as e: - model.attach_exception_info(e, name) - raise - - # ---------- - - ALL_STEPS = ["global", "field", "struct_union", "enum", "typename"] - - def collect_step_tables(self): - # collect the declarations for '_cffi_globals', '_cffi_typenames', etc. - self._lsts = {} - for step_name in self.ALL_STEPS: - self._lsts[step_name] = [] - self._seen_struct_unions = set() - self._generate("ctx") - self._add_missing_struct_unions() - # - for step_name in self.ALL_STEPS: - lst = self._lsts[step_name] - if step_name != "field": - lst.sort(key=lambda entry: entry.name) - self._lsts[step_name] = tuple(lst) # don't change any more - # - # check for a possible internal inconsistency: _cffi_struct_unions - # should have been generated with exactly self._struct_unions - lst = self._lsts["struct_union"] - for tp, i in self._struct_unions.items(): - assert i < len(lst) - assert lst[i].name == tp.name - assert len(lst) == len(self._struct_unions) - # same with enums - lst = self._lsts["enum"] - for tp, i in self._enums.items(): - assert i < len(lst) - assert lst[i].name == tp.name - assert len(lst) == len(self._enums) - - # ---------- - - def _prnt(self, what=''): - self._f.write(what + '\n') - - def write_source_to_f(self, f, preamble): - if self.target_is_python: - assert preamble is None - self.write_py_source_to_f(f) - else: - assert preamble is not None - self.write_c_source_to_f(f, preamble) - - def _rel_readlines(self, filename): - g = open(os.path.join(os.path.dirname(__file__), filename), 'r') - lines = g.readlines() - g.close() - return lines - - def write_c_source_to_f(self, f, preamble): - self._f = f - prnt = self._prnt - if self.ffi._embedding is not None: - prnt('#define _CFFI_USE_EMBEDDING') - if not USE_LIMITED_API: - prnt('#define _CFFI_NO_LIMITED_API') - # - # first the '#include' (actually done by inlining the file's content) - lines = self._rel_readlines('_cffi_include.h') - i = lines.index('#include "parse_c_type.h"\n') - lines[i:i+1] = self._rel_readlines('parse_c_type.h') - prnt(''.join(lines)) - # - # if we have ffi._embedding != None, we give it here as a macro - # and include an extra file - base_module_name = self.module_name.split('.')[-1] - if self.ffi._embedding is not None: - prnt('#define _CFFI_MODULE_NAME "%s"' % (self.module_name,)) - prnt('static const char _CFFI_PYTHON_STARTUP_CODE[] = {') - self._print_string_literal_in_array(self.ffi._embedding) - prnt('0 };') - prnt('#ifdef PYPY_VERSION') - prnt('# define _CFFI_PYTHON_STARTUP_FUNC _cffi_pypyinit_%s' % ( - base_module_name,)) - prnt('#elif PY_MAJOR_VERSION >= 3') - prnt('# define _CFFI_PYTHON_STARTUP_FUNC PyInit_%s' % ( - base_module_name,)) - prnt('#else') - prnt('# define _CFFI_PYTHON_STARTUP_FUNC init%s' % ( - base_module_name,)) - prnt('#endif') - lines = self._rel_readlines('_embedding.h') - i = lines.index('#include "_cffi_errors.h"\n') - lines[i:i+1] = self._rel_readlines('_cffi_errors.h') - prnt(''.join(lines)) - self.needs_version(VERSION_EMBEDDED) - # - # then paste the C source given by the user, verbatim. - prnt('/************************************************************/') - prnt() - prnt(preamble) - prnt() - prnt('/************************************************************/') - prnt() - # - # the declaration of '_cffi_types' - prnt('static void *_cffi_types[] = {') - typeindex2type = dict([(i, tp) for (tp, i) in self._typesdict.items()]) - for i, op in enumerate(self.cffi_types): - comment = '' - if i in typeindex2type: - comment = ' // ' + typeindex2type[i]._get_c_name() - prnt('/* %2d */ %s,%s' % (i, op.as_c_expr(), comment)) - if not self.cffi_types: - prnt(' 0') - prnt('};') - prnt() - # - # call generate_cpy_xxx_decl(), for every xxx found from - # ffi._parser._declarations. This generates all the functions. - self._seen_constants = set() - self._generate("decl") - # - # the declaration of '_cffi_globals' and '_cffi_typenames' - nums = {} - for step_name in self.ALL_STEPS: - lst = self._lsts[step_name] - nums[step_name] = len(lst) - if nums[step_name] > 0: - prnt('static const struct _cffi_%s_s _cffi_%ss[] = {' % ( - step_name, step_name)) - for entry in lst: - prnt(entry.as_c_expr()) - prnt('};') - prnt() - # - # the declaration of '_cffi_includes' - if self.ffi._included_ffis: - prnt('static const char * const _cffi_includes[] = {') - for ffi_to_include in self.ffi._included_ffis: - try: - included_module_name, included_source = ( - ffi_to_include._assigned_source[:2]) - except AttributeError: - raise VerificationError( - "ffi object %r includes %r, but the latter has not " - "been prepared with set_source()" % ( - self.ffi, ffi_to_include,)) - if included_source is None: - raise VerificationError( - "not implemented yet: ffi.include() of a Python-based " - "ffi inside a C-based ffi") - prnt(' "%s",' % (included_module_name,)) - prnt(' NULL') - prnt('};') - prnt() - # - # the declaration of '_cffi_type_context' - prnt('static const struct _cffi_type_context_s _cffi_type_context = {') - prnt(' _cffi_types,') - for step_name in self.ALL_STEPS: - if nums[step_name] > 0: - prnt(' _cffi_%ss,' % step_name) - else: - prnt(' NULL, /* no %ss */' % step_name) - for step_name in self.ALL_STEPS: - if step_name != "field": - prnt(' %d, /* num_%ss */' % (nums[step_name], step_name)) - if self.ffi._included_ffis: - prnt(' _cffi_includes,') - else: - prnt(' NULL, /* no includes */') - prnt(' %d, /* num_types */' % (len(self.cffi_types),)) - flags = 0 - if self._num_externpy > 0 or self.ffi._embedding is not None: - flags |= 1 # set to mean that we use extern "Python" - prnt(' %d, /* flags */' % flags) - prnt('};') - prnt() - # - # the init function - prnt('#ifdef __GNUC__') - prnt('# pragma GCC visibility push(default) /* for -fvisibility= */') - prnt('#endif') - prnt() - prnt('#ifdef PYPY_VERSION') - prnt('PyMODINIT_FUNC') - prnt('_cffi_pypyinit_%s(const void *p[])' % (base_module_name,)) - prnt('{') - if flags & 1: - prnt(' if (((intptr_t)p[0]) >= 0x0A03) {') - prnt(' _cffi_call_python_org = ' - '(void(*)(struct _cffi_externpy_s *, char *))p[1];') - prnt(' }') - prnt(' p[0] = (const void *)0x%x;' % self._version) - prnt(' p[1] = &_cffi_type_context;') - prnt('#if PY_MAJOR_VERSION >= 3') - prnt(' return NULL;') - prnt('#endif') - prnt('}') - # on Windows, distutils insists on putting init_cffi_xyz in - # 'export_symbols', so instead of fighting it, just give up and - # give it one - prnt('# ifdef _MSC_VER') - prnt(' PyMODINIT_FUNC') - prnt('# if PY_MAJOR_VERSION >= 3') - prnt(' PyInit_%s(void) { return NULL; }' % (base_module_name,)) - prnt('# else') - prnt(' init%s(void) { }' % (base_module_name,)) - prnt('# endif') - prnt('# endif') - prnt('#elif PY_MAJOR_VERSION >= 3') - prnt('PyMODINIT_FUNC') - prnt('PyInit_%s(void)' % (base_module_name,)) - prnt('{') - prnt(' return _cffi_init("%s", 0x%x, &_cffi_type_context);' % ( - self.module_name, self._version)) - prnt('}') - prnt('#else') - prnt('PyMODINIT_FUNC') - prnt('init%s(void)' % (base_module_name,)) - prnt('{') - prnt(' _cffi_init("%s", 0x%x, &_cffi_type_context);' % ( - self.module_name, self._version)) - prnt('}') - prnt('#endif') - prnt() - prnt('#ifdef __GNUC__') - prnt('# pragma GCC visibility pop') - prnt('#endif') - self._version = None - - def _to_py(self, x): - if isinstance(x, str): - return "b'%s'" % (x,) - if isinstance(x, (list, tuple)): - rep = [self._to_py(item) for item in x] - if len(rep) == 1: - rep.append('') - return "(%s)" % (','.join(rep),) - return x.as_python_expr() # Py2: unicode unexpected; Py3: bytes unexp. - - def write_py_source_to_f(self, f): - self._f = f - prnt = self._prnt - # - # header - prnt("# auto-generated file") - prnt("import _cffi_backend") - # - # the 'import' of the included ffis - num_includes = len(self.ffi._included_ffis or ()) - for i in range(num_includes): - ffi_to_include = self.ffi._included_ffis[i] - try: - included_module_name, included_source = ( - ffi_to_include._assigned_source[:2]) - except AttributeError: - raise VerificationError( - "ffi object %r includes %r, but the latter has not " - "been prepared with set_source()" % ( - self.ffi, ffi_to_include,)) - if included_source is not None: - raise VerificationError( - "not implemented yet: ffi.include() of a C-based " - "ffi inside a Python-based ffi") - prnt('from %s import ffi as _ffi%d' % (included_module_name, i)) - prnt() - prnt("ffi = _cffi_backend.FFI('%s'," % (self.module_name,)) - prnt(" _version = 0x%x," % (self._version,)) - self._version = None - # - # the '_types' keyword argument - self.cffi_types = tuple(self.cffi_types) # don't change any more - types_lst = [op.as_python_bytes() for op in self.cffi_types] - prnt(' _types = %s,' % (self._to_py(''.join(types_lst)),)) - typeindex2type = dict([(i, tp) for (tp, i) in self._typesdict.items()]) - # - # the keyword arguments from ALL_STEPS - for step_name in self.ALL_STEPS: - lst = self._lsts[step_name] - if len(lst) > 0 and step_name != "field": - prnt(' _%ss = %s,' % (step_name, self._to_py(lst))) - # - # the '_includes' keyword argument - if num_includes > 0: - prnt(' _includes = (%s,),' % ( - ', '.join(['_ffi%d' % i for i in range(num_includes)]),)) - # - # the footer - prnt(')') - - # ---------- - - def _gettypenum(self, type): - # a KeyError here is a bug. please report it! :-) - return self._typesdict[type] - - def _convert_funcarg_to_c(self, tp, fromvar, tovar, errcode): - extraarg = '' - if isinstance(tp, model.BasePrimitiveType) and not tp.is_complex_type(): - if tp.is_integer_type() and tp.name != '_Bool': - converter = '_cffi_to_c_int' - extraarg = ', %s' % tp.name - elif isinstance(tp, model.UnknownFloatType): - # don't check with is_float_type(): it may be a 'long - # double' here, and _cffi_to_c_double would loose precision - converter = '(%s)_cffi_to_c_double' % (tp.get_c_name(''),) - else: - cname = tp.get_c_name('') - converter = '(%s)_cffi_to_c_%s' % (cname, - tp.name.replace(' ', '_')) - if cname in ('char16_t', 'char32_t'): - self.needs_version(VERSION_CHAR16CHAR32) - errvalue = '-1' - # - elif isinstance(tp, model.PointerType): - self._convert_funcarg_to_c_ptr_or_array(tp, fromvar, - tovar, errcode) - return - # - elif (isinstance(tp, model.StructOrUnionOrEnum) or - isinstance(tp, model.BasePrimitiveType)): - # a struct (not a struct pointer) as a function argument; - # or, a complex (the same code works) - self._prnt(' if (_cffi_to_c((char *)&%s, _cffi_type(%d), %s) < 0)' - % (tovar, self._gettypenum(tp), fromvar)) - self._prnt(' %s;' % errcode) - return - # - elif isinstance(tp, model.FunctionPtrType): - converter = '(%s)_cffi_to_c_pointer' % tp.get_c_name('') - extraarg = ', _cffi_type(%d)' % self._gettypenum(tp) - errvalue = 'NULL' - # - else: - raise NotImplementedError(tp) - # - self._prnt(' %s = %s(%s%s);' % (tovar, converter, fromvar, extraarg)) - self._prnt(' if (%s == (%s)%s && PyErr_Occurred())' % ( - tovar, tp.get_c_name(''), errvalue)) - self._prnt(' %s;' % errcode) - - def _extra_local_variables(self, tp, localvars, freelines): - if isinstance(tp, model.PointerType): - localvars.add('Py_ssize_t datasize') - localvars.add('struct _cffi_freeme_s *large_args_free = NULL') - freelines.add('if (large_args_free != NULL)' - ' _cffi_free_array_arguments(large_args_free);') - - def _convert_funcarg_to_c_ptr_or_array(self, tp, fromvar, tovar, errcode): - self._prnt(' datasize = _cffi_prepare_pointer_call_argument(') - self._prnt(' _cffi_type(%d), %s, (char **)&%s);' % ( - self._gettypenum(tp), fromvar, tovar)) - self._prnt(' if (datasize != 0) {') - self._prnt(' %s = ((size_t)datasize) <= 640 ? ' - '(%s)alloca((size_t)datasize) : NULL;' % ( - tovar, tp.get_c_name(''))) - self._prnt(' if (_cffi_convert_array_argument(_cffi_type(%d), %s, ' - '(char **)&%s,' % (self._gettypenum(tp), fromvar, tovar)) - self._prnt(' datasize, &large_args_free) < 0)') - self._prnt(' %s;' % errcode) - self._prnt(' }') - - def _convert_expr_from_c(self, tp, var, context): - if isinstance(tp, model.BasePrimitiveType): - if tp.is_integer_type() and tp.name != '_Bool': - return '_cffi_from_c_int(%s, %s)' % (var, tp.name) - elif isinstance(tp, model.UnknownFloatType): - return '_cffi_from_c_double(%s)' % (var,) - elif tp.name != 'long double' and not tp.is_complex_type(): - cname = tp.name.replace(' ', '_') - if cname in ('char16_t', 'char32_t'): - self.needs_version(VERSION_CHAR16CHAR32) - return '_cffi_from_c_%s(%s)' % (cname, var) - else: - return '_cffi_from_c_deref((char *)&%s, _cffi_type(%d))' % ( - var, self._gettypenum(tp)) - elif isinstance(tp, (model.PointerType, model.FunctionPtrType)): - return '_cffi_from_c_pointer((char *)%s, _cffi_type(%d))' % ( - var, self._gettypenum(tp)) - elif isinstance(tp, model.ArrayType): - return '_cffi_from_c_pointer((char *)%s, _cffi_type(%d))' % ( - var, self._gettypenum(model.PointerType(tp.item))) - elif isinstance(tp, model.StructOrUnion): - if tp.fldnames is None: - raise TypeError("'%s' is used as %s, but is opaque" % ( - tp._get_c_name(), context)) - return '_cffi_from_c_struct((char *)&%s, _cffi_type(%d))' % ( - var, self._gettypenum(tp)) - elif isinstance(tp, model.EnumType): - return '_cffi_from_c_deref((char *)&%s, _cffi_type(%d))' % ( - var, self._gettypenum(tp)) - else: - raise NotImplementedError(tp) - - # ---------- - # typedefs - - def _typedef_type(self, tp, name): - return self._global_type(tp, "(*(%s *)0)" % (name,)) - - def _generate_cpy_typedef_collecttype(self, tp, name): - self._do_collect_type(self._typedef_type(tp, name)) - - def _generate_cpy_typedef_decl(self, tp, name): - pass - - def _typedef_ctx(self, tp, name): - type_index = self._typesdict[tp] - self._lsts["typename"].append(TypenameExpr(name, type_index)) - - def _generate_cpy_typedef_ctx(self, tp, name): - tp = self._typedef_type(tp, name) - self._typedef_ctx(tp, name) - if getattr(tp, "origin", None) == "unknown_type": - self._struct_ctx(tp, tp.name, approxname=None) - elif isinstance(tp, model.NamedPointerType): - self._struct_ctx(tp.totype, tp.totype.name, approxname=tp.name, - named_ptr=tp) - - # ---------- - # function declarations - - def _generate_cpy_function_collecttype(self, tp, name): - self._do_collect_type(tp.as_raw_function()) - if tp.ellipsis and not self.target_is_python: - self._do_collect_type(tp) - - def _generate_cpy_function_decl(self, tp, name): - assert not self.target_is_python - assert isinstance(tp, model.FunctionPtrType) - if tp.ellipsis: - # cannot support vararg functions better than this: check for its - # exact type (including the fixed arguments), and build it as a - # constant function pointer (no CPython wrapper) - self._generate_cpy_constant_decl(tp, name) - return - prnt = self._prnt - numargs = len(tp.args) - if numargs == 0: - argname = 'noarg' - elif numargs == 1: - argname = 'arg0' - else: - argname = 'args' - # - # ------------------------------ - # the 'd' version of the function, only for addressof(lib, 'func') - arguments = [] - call_arguments = [] - context = 'argument of %s' % name - for i, type in enumerate(tp.args): - arguments.append(type.get_c_name(' x%d' % i, context)) - call_arguments.append('x%d' % i) - repr_arguments = ', '.join(arguments) - repr_arguments = repr_arguments or 'void' - if tp.abi: - abi = tp.abi + ' ' - else: - abi = '' - name_and_arguments = '%s_cffi_d_%s(%s)' % (abi, name, repr_arguments) - prnt('static %s' % (tp.result.get_c_name(name_and_arguments),)) - prnt('{') - call_arguments = ', '.join(call_arguments) - result_code = 'return ' - if isinstance(tp.result, model.VoidType): - result_code = '' - prnt(' %s%s(%s);' % (result_code, name, call_arguments)) - prnt('}') - # - prnt('#ifndef PYPY_VERSION') # ------------------------------ - # - prnt('static PyObject *') - prnt('_cffi_f_%s(PyObject *self, PyObject *%s)' % (name, argname)) - prnt('{') - # - context = 'argument of %s' % name - for i, type in enumerate(tp.args): - arg = type.get_c_name(' x%d' % i, context) - prnt(' %s;' % arg) - # - localvars = set() - freelines = set() - for type in tp.args: - self._extra_local_variables(type, localvars, freelines) - for decl in sorted(localvars): - prnt(' %s;' % (decl,)) - # - if not isinstance(tp.result, model.VoidType): - result_code = 'result = ' - context = 'result of %s' % name - result_decl = ' %s;' % tp.result.get_c_name(' result', context) - prnt(result_decl) - prnt(' PyObject *pyresult;') - else: - result_decl = None - result_code = '' - # - if len(tp.args) > 1: - rng = range(len(tp.args)) - for i in rng: - prnt(' PyObject *arg%d;' % i) - prnt() - prnt(' if (!PyArg_UnpackTuple(args, "%s", %d, %d, %s))' % ( - name, len(rng), len(rng), - ', '.join(['&arg%d' % i for i in rng]))) - prnt(' return NULL;') - prnt() - # - for i, type in enumerate(tp.args): - self._convert_funcarg_to_c(type, 'arg%d' % i, 'x%d' % i, - 'return NULL') - prnt() - # - prnt(' Py_BEGIN_ALLOW_THREADS') - prnt(' _cffi_restore_errno();') - call_arguments = ['x%d' % i for i in range(len(tp.args))] - call_arguments = ', '.join(call_arguments) - prnt(' { %s%s(%s); }' % (result_code, name, call_arguments)) - prnt(' _cffi_save_errno();') - prnt(' Py_END_ALLOW_THREADS') - prnt() - # - prnt(' (void)self; /* unused */') - if numargs == 0: - prnt(' (void)noarg; /* unused */') - if result_code: - prnt(' pyresult = %s;' % - self._convert_expr_from_c(tp.result, 'result', 'result type')) - for freeline in freelines: - prnt(' ' + freeline) - prnt(' return pyresult;') - else: - for freeline in freelines: - prnt(' ' + freeline) - prnt(' Py_INCREF(Py_None);') - prnt(' return Py_None;') - prnt('}') - # - prnt('#else') # ------------------------------ - # - # the PyPy version: need to replace struct/union arguments with - # pointers, and if the result is a struct/union, insert a first - # arg that is a pointer to the result. We also do that for - # complex args and return type. - def need_indirection(type): - return (isinstance(type, model.StructOrUnion) or - (isinstance(type, model.PrimitiveType) and - type.is_complex_type())) - difference = False - arguments = [] - call_arguments = [] - context = 'argument of %s' % name - for i, type in enumerate(tp.args): - indirection = '' - if need_indirection(type): - indirection = '*' - difference = True - arg = type.get_c_name(' %sx%d' % (indirection, i), context) - arguments.append(arg) - call_arguments.append('%sx%d' % (indirection, i)) - tp_result = tp.result - if need_indirection(tp_result): - context = 'result of %s' % name - arg = tp_result.get_c_name(' *result', context) - arguments.insert(0, arg) - tp_result = model.void_type - result_decl = None - result_code = '*result = ' - difference = True - if difference: - repr_arguments = ', '.join(arguments) - repr_arguments = repr_arguments or 'void' - name_and_arguments = '%s_cffi_f_%s(%s)' % (abi, name, - repr_arguments) - prnt('static %s' % (tp_result.get_c_name(name_and_arguments),)) - prnt('{') - if result_decl: - prnt(result_decl) - call_arguments = ', '.join(call_arguments) - prnt(' { %s%s(%s); }' % (result_code, name, call_arguments)) - if result_decl: - prnt(' return result;') - prnt('}') - else: - prnt('# define _cffi_f_%s _cffi_d_%s' % (name, name)) - # - prnt('#endif') # ------------------------------ - prnt() - - def _generate_cpy_function_ctx(self, tp, name): - if tp.ellipsis and not self.target_is_python: - self._generate_cpy_constant_ctx(tp, name) - return - type_index = self._typesdict[tp.as_raw_function()] - numargs = len(tp.args) - if self.target_is_python: - meth_kind = OP_DLOPEN_FUNC - elif numargs == 0: - meth_kind = OP_CPYTHON_BLTN_N # 'METH_NOARGS' - elif numargs == 1: - meth_kind = OP_CPYTHON_BLTN_O # 'METH_O' - else: - meth_kind = OP_CPYTHON_BLTN_V # 'METH_VARARGS' - self._lsts["global"].append( - GlobalExpr(name, '_cffi_f_%s' % name, - CffiOp(meth_kind, type_index), - size='_cffi_d_%s' % name)) - - # ---------- - # named structs or unions - - def _field_type(self, tp_struct, field_name, tp_field): - if isinstance(tp_field, model.ArrayType): - actual_length = tp_field.length - if actual_length == '...': - ptr_struct_name = tp_struct.get_c_name('*') - actual_length = '_cffi_array_len(((%s)0)->%s)' % ( - ptr_struct_name, field_name) - tp_item = self._field_type(tp_struct, '%s[0]' % field_name, - tp_field.item) - tp_field = model.ArrayType(tp_item, actual_length) - return tp_field - - def _struct_collecttype(self, tp): - self._do_collect_type(tp) - if self.target_is_python: - # also requires nested anon struct/unions in ABI mode, recursively - for fldtype in tp.anonymous_struct_fields(): - self._struct_collecttype(fldtype) - - def _struct_decl(self, tp, cname, approxname): - if tp.fldtypes is None: - return - prnt = self._prnt - checkfuncname = '_cffi_checkfld_%s' % (approxname,) - prnt('_CFFI_UNUSED_FN') - prnt('static void %s(%s *p)' % (checkfuncname, cname)) - prnt('{') - prnt(' /* only to generate compile-time warnings or errors */') - prnt(' (void)p;') - for fname, ftype, fbitsize, fqual in self._enum_fields(tp): - try: - if ftype.is_integer_type() or fbitsize >= 0: - # accept all integers, but complain on float or double - if fname != '': - prnt(" (void)((p->%s) | 0); /* check that '%s.%s' is " - "an integer */" % (fname, cname, fname)) - continue - # only accept exactly the type declared, except that '[]' - # is interpreted as a '*' and so will match any array length. - # (It would also match '*', but that's harder to detect...) - while (isinstance(ftype, model.ArrayType) - and (ftype.length is None or ftype.length == '...')): - ftype = ftype.item - fname = fname + '[0]' - prnt(' { %s = &p->%s; (void)tmp; }' % ( - ftype.get_c_name('*tmp', 'field %r'%fname, quals=fqual), - fname)) - except VerificationError as e: - prnt(' /* %s */' % str(e)) # cannot verify it, ignore - prnt('}') - prnt('struct _cffi_align_%s { char x; %s y; };' % (approxname, cname)) - prnt() - - def _struct_ctx(self, tp, cname, approxname, named_ptr=None): - type_index = self._typesdict[tp] - reason_for_not_expanding = None - flags = [] - if isinstance(tp, model.UnionType): - flags.append("_CFFI_F_UNION") - if tp.fldtypes is None: - flags.append("_CFFI_F_OPAQUE") - reason_for_not_expanding = "opaque" - if (tp not in self.ffi._parser._included_declarations and - (named_ptr is None or - named_ptr not in self.ffi._parser._included_declarations)): - if tp.fldtypes is None: - pass # opaque - elif tp.partial or any(tp.anonymous_struct_fields()): - pass # field layout obtained silently from the C compiler - else: - flags.append("_CFFI_F_CHECK_FIELDS") - if tp.packed: - if tp.packed > 1: - raise NotImplementedError( - "%r is declared with 'pack=%r'; only 0 or 1 are " - "supported in API mode (try to use \"...;\", which " - "does not require a 'pack' declaration)" % - (tp, tp.packed)) - flags.append("_CFFI_F_PACKED") - else: - flags.append("_CFFI_F_EXTERNAL") - reason_for_not_expanding = "external" - flags = '|'.join(flags) or '0' - c_fields = [] - if reason_for_not_expanding is None: - enumfields = list(self._enum_fields(tp)) - for fldname, fldtype, fbitsize, fqual in enumfields: - fldtype = self._field_type(tp, fldname, fldtype) - self._check_not_opaque(fldtype, - "field '%s.%s'" % (tp.name, fldname)) - # cname is None for _add_missing_struct_unions() only - op = OP_NOOP - if fbitsize >= 0: - op = OP_BITFIELD - size = '%d /* bits */' % fbitsize - elif cname is None or ( - isinstance(fldtype, model.ArrayType) and - fldtype.length is None): - size = '(size_t)-1' - else: - size = 'sizeof(((%s)0)->%s)' % ( - tp.get_c_name('*') if named_ptr is None - else named_ptr.name, - fldname) - if cname is None or fbitsize >= 0: - offset = '(size_t)-1' - elif named_ptr is not None: - offset = '((char *)&((%s)4096)->%s) - (char *)4096' % ( - named_ptr.name, fldname) - else: - offset = 'offsetof(%s, %s)' % (tp.get_c_name(''), fldname) - c_fields.append( - FieldExpr(fldname, offset, size, fbitsize, - CffiOp(op, self._typesdict[fldtype]))) - first_field_index = len(self._lsts["field"]) - self._lsts["field"].extend(c_fields) - # - if cname is None: # unknown name, for _add_missing_struct_unions - size = '(size_t)-2' - align = -2 - comment = "unnamed" - else: - if named_ptr is not None: - size = 'sizeof(*(%s)0)' % (named_ptr.name,) - align = '-1 /* unknown alignment */' - else: - size = 'sizeof(%s)' % (cname,) - align = 'offsetof(struct _cffi_align_%s, y)' % (approxname,) - comment = None - else: - size = '(size_t)-1' - align = -1 - first_field_index = -1 - comment = reason_for_not_expanding - self._lsts["struct_union"].append( - StructUnionExpr(tp.name, type_index, flags, size, align, comment, - first_field_index, c_fields)) - self._seen_struct_unions.add(tp) - - def _check_not_opaque(self, tp, location): - while isinstance(tp, model.ArrayType): - tp = tp.item - if isinstance(tp, model.StructOrUnion) and tp.fldtypes is None: - raise TypeError( - "%s is of an opaque type (not declared in cdef())" % location) - - def _add_missing_struct_unions(self): - # not very nice, but some struct declarations might be missing - # because they don't have any known C name. Check that they are - # not partial (we can't complete or verify them!) and emit them - # anonymously. - lst = list(self._struct_unions.items()) - lst.sort(key=lambda tp_order: tp_order[1]) - for tp, order in lst: - if tp not in self._seen_struct_unions: - if tp.partial: - raise NotImplementedError("internal inconsistency: %r is " - "partial but was not seen at " - "this point" % (tp,)) - if tp.name.startswith('$') and tp.name[1:].isdigit(): - approxname = tp.name[1:] - elif tp.name == '_IO_FILE' and tp.forcename == 'FILE': - approxname = 'FILE' - self._typedef_ctx(tp, 'FILE') - else: - raise NotImplementedError("internal inconsistency: %r" % - (tp,)) - self._struct_ctx(tp, None, approxname) - - def _generate_cpy_struct_collecttype(self, tp, name): - self._struct_collecttype(tp) - _generate_cpy_union_collecttype = _generate_cpy_struct_collecttype - - def _struct_names(self, tp): - cname = tp.get_c_name('') - if ' ' in cname: - return cname, cname.replace(' ', '_') - else: - return cname, '_' + cname - - def _generate_cpy_struct_decl(self, tp, name): - self._struct_decl(tp, *self._struct_names(tp)) - _generate_cpy_union_decl = _generate_cpy_struct_decl - - def _generate_cpy_struct_ctx(self, tp, name): - self._struct_ctx(tp, *self._struct_names(tp)) - _generate_cpy_union_ctx = _generate_cpy_struct_ctx - - # ---------- - # 'anonymous' declarations. These are produced for anonymous structs - # or unions; the 'name' is obtained by a typedef. - - def _generate_cpy_anonymous_collecttype(self, tp, name): - if isinstance(tp, model.EnumType): - self._generate_cpy_enum_collecttype(tp, name) - else: - self._struct_collecttype(tp) - - def _generate_cpy_anonymous_decl(self, tp, name): - if isinstance(tp, model.EnumType): - self._generate_cpy_enum_decl(tp) - else: - self._struct_decl(tp, name, 'typedef_' + name) - - def _generate_cpy_anonymous_ctx(self, tp, name): - if isinstance(tp, model.EnumType): - self._enum_ctx(tp, name) - else: - self._struct_ctx(tp, name, 'typedef_' + name) - - # ---------- - # constants, declared with "static const ..." - - def _generate_cpy_const(self, is_int, name, tp=None, category='const', - check_value=None): - if (category, name) in self._seen_constants: - raise VerificationError( - "duplicate declaration of %s '%s'" % (category, name)) - self._seen_constants.add((category, name)) - # - prnt = self._prnt - funcname = '_cffi_%s_%s' % (category, name) - if is_int: - prnt('static int %s(unsigned long long *o)' % funcname) - prnt('{') - prnt(' int n = (%s) <= 0;' % (name,)) - prnt(' *o = (unsigned long long)((%s) | 0);' - ' /* check that %s is an integer */' % (name, name)) - if check_value is not None: - if check_value > 0: - check_value = '%dU' % (check_value,) - prnt(' if (!_cffi_check_int(*o, n, %s))' % (check_value,)) - prnt(' n |= 2;') - prnt(' return n;') - prnt('}') - else: - assert check_value is None - prnt('static void %s(char *o)' % funcname) - prnt('{') - prnt(' *(%s)o = %s;' % (tp.get_c_name('*'), name)) - prnt('}') - prnt() - - def _generate_cpy_constant_collecttype(self, tp, name): - is_int = tp.is_integer_type() - if not is_int or self.target_is_python: - self._do_collect_type(tp) - - def _generate_cpy_constant_decl(self, tp, name): - is_int = tp.is_integer_type() - self._generate_cpy_const(is_int, name, tp) - - def _generate_cpy_constant_ctx(self, tp, name): - if not self.target_is_python and tp.is_integer_type(): - type_op = CffiOp(OP_CONSTANT_INT, -1) - else: - if self.target_is_python: - const_kind = OP_DLOPEN_CONST - else: - const_kind = OP_CONSTANT - type_index = self._typesdict[tp] - type_op = CffiOp(const_kind, type_index) - self._lsts["global"].append( - GlobalExpr(name, '_cffi_const_%s' % name, type_op)) - - # ---------- - # enums - - def _generate_cpy_enum_collecttype(self, tp, name): - self._do_collect_type(tp) - - def _generate_cpy_enum_decl(self, tp, name=None): - for enumerator in tp.enumerators: - self._generate_cpy_const(True, enumerator) - - def _enum_ctx(self, tp, cname): - type_index = self._typesdict[tp] - type_op = CffiOp(OP_ENUM, -1) - if self.target_is_python: - tp.check_not_partial() - for enumerator, enumvalue in zip(tp.enumerators, tp.enumvalues): - self._lsts["global"].append( - GlobalExpr(enumerator, '_cffi_const_%s' % enumerator, type_op, - check_value=enumvalue)) - # - if cname is not None and '$' not in cname and not self.target_is_python: - size = "sizeof(%s)" % cname - signed = "((%s)-1) <= 0" % cname - else: - basetp = tp.build_baseinttype(self.ffi, []) - size = self.ffi.sizeof(basetp) - signed = int(int(self.ffi.cast(basetp, -1)) < 0) - allenums = ",".join(tp.enumerators) - self._lsts["enum"].append( - EnumExpr(tp.name, type_index, size, signed, allenums)) - - def _generate_cpy_enum_ctx(self, tp, name): - self._enum_ctx(tp, tp._get_c_name()) - - # ---------- - # macros: for now only for integers - - def _generate_cpy_macro_collecttype(self, tp, name): - pass - - def _generate_cpy_macro_decl(self, tp, name): - if tp == '...': - check_value = None - else: - check_value = tp # an integer - self._generate_cpy_const(True, name, check_value=check_value) - - def _generate_cpy_macro_ctx(self, tp, name): - if tp == '...': - if self.target_is_python: - raise VerificationError( - "cannot use the syntax '...' in '#define %s ...' when " - "using the ABI mode" % (name,)) - check_value = None - else: - check_value = tp # an integer - type_op = CffiOp(OP_CONSTANT_INT, -1) - self._lsts["global"].append( - GlobalExpr(name, '_cffi_const_%s' % name, type_op, - check_value=check_value)) - - # ---------- - # global variables - - def _global_type(self, tp, global_name): - if isinstance(tp, model.ArrayType): - actual_length = tp.length - if actual_length == '...': - actual_length = '_cffi_array_len(%s)' % (global_name,) - tp_item = self._global_type(tp.item, '%s[0]' % global_name) - tp = model.ArrayType(tp_item, actual_length) - return tp - - def _generate_cpy_variable_collecttype(self, tp, name): - self._do_collect_type(self._global_type(tp, name)) - - def _generate_cpy_variable_decl(self, tp, name): - prnt = self._prnt - tp = self._global_type(tp, name) - if isinstance(tp, model.ArrayType) and tp.length is None: - tp = tp.item - ampersand = '' - else: - ampersand = '&' - # This code assumes that casts from "tp *" to "void *" is a - # no-op, i.e. a function that returns a "tp *" can be called - # as if it returned a "void *". This should be generally true - # on any modern machine. The only exception to that rule (on - # uncommon architectures, and as far as I can tell) might be - # if 'tp' were a function type, but that is not possible here. - # (If 'tp' is a function _pointer_ type, then casts from "fn_t - # **" to "void *" are again no-ops, as far as I can tell.) - decl = '*_cffi_var_%s(void)' % (name,) - prnt('static ' + tp.get_c_name(decl, quals=self._current_quals)) - prnt('{') - prnt(' return %s(%s);' % (ampersand, name)) - prnt('}') - prnt() - - def _generate_cpy_variable_ctx(self, tp, name): - tp = self._global_type(tp, name) - type_index = self._typesdict[tp] - if self.target_is_python: - op = OP_GLOBAL_VAR - else: - op = OP_GLOBAL_VAR_F - self._lsts["global"].append( - GlobalExpr(name, '_cffi_var_%s' % name, CffiOp(op, type_index))) - - # ---------- - # extern "Python" - - def _generate_cpy_extern_python_collecttype(self, tp, name): - assert isinstance(tp, model.FunctionPtrType) - self._do_collect_type(tp) - _generate_cpy_dllexport_python_collecttype = \ - _generate_cpy_extern_python_plus_c_collecttype = \ - _generate_cpy_extern_python_collecttype - - def _extern_python_decl(self, tp, name, tag_and_space): - prnt = self._prnt - if isinstance(tp.result, model.VoidType): - size_of_result = '0' - else: - context = 'result of %s' % name - size_of_result = '(int)sizeof(%s)' % ( - tp.result.get_c_name('', context),) - prnt('static struct _cffi_externpy_s _cffi_externpy__%s =' % name) - prnt(' { "%s.%s", %s, 0, 0 };' % ( - self.module_name, name, size_of_result)) - prnt() - # - arguments = [] - context = 'argument of %s' % name - for i, type in enumerate(tp.args): - arg = type.get_c_name(' a%d' % i, context) - arguments.append(arg) - # - repr_arguments = ', '.join(arguments) - repr_arguments = repr_arguments or 'void' - name_and_arguments = '%s(%s)' % (name, repr_arguments) - if tp.abi == "__stdcall": - name_and_arguments = '_cffi_stdcall ' + name_and_arguments - # - def may_need_128_bits(tp): - return (isinstance(tp, model.PrimitiveType) and - tp.name == 'long double') - # - size_of_a = max(len(tp.args)*8, 8) - if may_need_128_bits(tp.result): - size_of_a = max(size_of_a, 16) - if isinstance(tp.result, model.StructOrUnion): - size_of_a = 'sizeof(%s) > %d ? sizeof(%s) : %d' % ( - tp.result.get_c_name(''), size_of_a, - tp.result.get_c_name(''), size_of_a) - prnt('%s%s' % (tag_and_space, tp.result.get_c_name(name_and_arguments))) - prnt('{') - prnt(' char a[%s];' % size_of_a) - prnt(' char *p = a;') - for i, type in enumerate(tp.args): - arg = 'a%d' % i - if (isinstance(type, model.StructOrUnion) or - may_need_128_bits(type)): - arg = '&' + arg - type = model.PointerType(type) - prnt(' *(%s)(p + %d) = %s;' % (type.get_c_name('*'), i*8, arg)) - prnt(' _cffi_call_python(&_cffi_externpy__%s, p);' % name) - if not isinstance(tp.result, model.VoidType): - prnt(' return *(%s)p;' % (tp.result.get_c_name('*'),)) - prnt('}') - prnt() - self._num_externpy += 1 - - def _generate_cpy_extern_python_decl(self, tp, name): - self._extern_python_decl(tp, name, 'static ') - - def _generate_cpy_dllexport_python_decl(self, tp, name): - self._extern_python_decl(tp, name, 'CFFI_DLLEXPORT ') - - def _generate_cpy_extern_python_plus_c_decl(self, tp, name): - self._extern_python_decl(tp, name, '') - - def _generate_cpy_extern_python_ctx(self, tp, name): - if self.target_is_python: - raise VerificationError( - "cannot use 'extern \"Python\"' in the ABI mode") - if tp.ellipsis: - raise NotImplementedError("a vararg function is extern \"Python\"") - type_index = self._typesdict[tp] - type_op = CffiOp(OP_EXTERN_PYTHON, type_index) - self._lsts["global"].append( - GlobalExpr(name, '&_cffi_externpy__%s' % name, type_op, name)) - - _generate_cpy_dllexport_python_ctx = \ - _generate_cpy_extern_python_plus_c_ctx = \ - _generate_cpy_extern_python_ctx - - def _print_string_literal_in_array(self, s): - prnt = self._prnt - prnt('// # NB. this is not a string because of a size limit in MSVC') - if not isinstance(s, bytes): # unicode - s = s.encode('utf-8') # -> bytes - else: - s.decode('utf-8') # got bytes, check for valid utf-8 - try: - s.decode('ascii') - except UnicodeDecodeError: - s = b'# -*- encoding: utf8 -*-\n' + s - for line in s.splitlines(True): - comment = line - if type('//') is bytes: # python2 - line = map(ord, line) # make a list of integers - else: # python3 - # type(line) is bytes, which enumerates like a list of integers - comment = ascii(comment)[1:-1] - prnt(('// ' + comment).rstrip()) - printed_line = '' - for c in line: - if len(printed_line) >= 76: - prnt(printed_line) - printed_line = '' - printed_line += '%d,' % (c,) - prnt(printed_line) - - # ---------- - # emitting the opcodes for individual types - - def _emit_bytecode_VoidType(self, tp, index): - self.cffi_types[index] = CffiOp(OP_PRIMITIVE, PRIM_VOID) - - def _emit_bytecode_PrimitiveType(self, tp, index): - prim_index = PRIMITIVE_TO_INDEX[tp.name] - self.cffi_types[index] = CffiOp(OP_PRIMITIVE, prim_index) - - def _emit_bytecode_UnknownIntegerType(self, tp, index): - s = ('_cffi_prim_int(sizeof(%s), (\n' - ' ((%s)-1) | 0 /* check that %s is an integer type */\n' - ' ) <= 0)' % (tp.name, tp.name, tp.name)) - self.cffi_types[index] = CffiOp(OP_PRIMITIVE, s) - - def _emit_bytecode_UnknownFloatType(self, tp, index): - s = ('_cffi_prim_float(sizeof(%s) *\n' - ' (((%s)1) / 2) * 2 /* integer => 0, float => 1 */\n' - ' )' % (tp.name, tp.name)) - self.cffi_types[index] = CffiOp(OP_PRIMITIVE, s) - - def _emit_bytecode_RawFunctionType(self, tp, index): - self.cffi_types[index] = CffiOp(OP_FUNCTION, self._typesdict[tp.result]) - index += 1 - for tp1 in tp.args: - realindex = self._typesdict[tp1] - if index != realindex: - if isinstance(tp1, model.PrimitiveType): - self._emit_bytecode_PrimitiveType(tp1, index) - else: - self.cffi_types[index] = CffiOp(OP_NOOP, realindex) - index += 1 - flags = int(tp.ellipsis) - if tp.abi is not None: - if tp.abi == '__stdcall': - flags |= 2 - else: - raise NotImplementedError("abi=%r" % (tp.abi,)) - self.cffi_types[index] = CffiOp(OP_FUNCTION_END, flags) - - def _emit_bytecode_PointerType(self, tp, index): - self.cffi_types[index] = CffiOp(OP_POINTER, self._typesdict[tp.totype]) - - _emit_bytecode_ConstPointerType = _emit_bytecode_PointerType - _emit_bytecode_NamedPointerType = _emit_bytecode_PointerType - - def _emit_bytecode_FunctionPtrType(self, tp, index): - raw = tp.as_raw_function() - self.cffi_types[index] = CffiOp(OP_POINTER, self._typesdict[raw]) - - def _emit_bytecode_ArrayType(self, tp, index): - item_index = self._typesdict[tp.item] - if tp.length is None: - self.cffi_types[index] = CffiOp(OP_OPEN_ARRAY, item_index) - elif tp.length == '...': - raise VerificationError( - "type %s badly placed: the '...' array length can only be " - "used on global arrays or on fields of structures" % ( - str(tp).replace('/*...*/', '...'),)) - else: - assert self.cffi_types[index + 1] == 'LEN' - self.cffi_types[index] = CffiOp(OP_ARRAY, item_index) - self.cffi_types[index + 1] = CffiOp(None, str(tp.length)) - - def _emit_bytecode_StructType(self, tp, index): - struct_index = self._struct_unions[tp] - self.cffi_types[index] = CffiOp(OP_STRUCT_UNION, struct_index) - _emit_bytecode_UnionType = _emit_bytecode_StructType - - def _emit_bytecode_EnumType(self, tp, index): - enum_index = self._enums[tp] - self.cffi_types[index] = CffiOp(OP_ENUM, enum_index) - - -if sys.version_info >= (3,): - NativeIO = io.StringIO -else: - class NativeIO(io.BytesIO): - def write(self, s): - if isinstance(s, unicode): - s = s.encode('ascii') - super(NativeIO, self).write(s) - -def _is_file_like(maybefile): - # compare to xml.etree.ElementTree._get_writer - return hasattr(maybefile, 'write') - -def _make_c_or_py_source(ffi, module_name, preamble, target_file, verbose): - if verbose: - print("generating %s" % (target_file,)) - recompiler = Recompiler(ffi, module_name, - target_is_python=(preamble is None)) - recompiler.collect_type_table() - recompiler.collect_step_tables() - if _is_file_like(target_file): - recompiler.write_source_to_f(target_file, preamble) - return True - f = NativeIO() - recompiler.write_source_to_f(f, preamble) - output = f.getvalue() - try: - with open(target_file, 'r') as f1: - if f1.read(len(output) + 1) != output: - raise IOError - if verbose: - print("(already up-to-date)") - return False # already up-to-date - except IOError: - tmp_file = '%s.~%d' % (target_file, os.getpid()) - with open(tmp_file, 'w') as f1: - f1.write(output) - try: - os.rename(tmp_file, target_file) - except OSError: - os.unlink(target_file) - os.rename(tmp_file, target_file) - return True - -def make_c_source(ffi, module_name, preamble, target_c_file, verbose=False): - assert preamble is not None - return _make_c_or_py_source(ffi, module_name, preamble, target_c_file, - verbose) - -def make_py_source(ffi, module_name, target_py_file, verbose=False): - return _make_c_or_py_source(ffi, module_name, None, target_py_file, - verbose) - -def _modname_to_file(outputdir, modname, extension): - parts = modname.split('.') - try: - os.makedirs(os.path.join(outputdir, *parts[:-1])) - except OSError: - pass - parts[-1] += extension - return os.path.join(outputdir, *parts), parts - - -# Aaargh. Distutils is not tested at all for the purpose of compiling -# DLLs that are not extension modules. Here are some hacks to work -# around that, in the _patch_for_*() functions... - -def _patch_meth(patchlist, cls, name, new_meth): - old = getattr(cls, name) - patchlist.append((cls, name, old)) - setattr(cls, name, new_meth) - return old - -def _unpatch_meths(patchlist): - for cls, name, old_meth in reversed(patchlist): - setattr(cls, name, old_meth) - -def _patch_for_embedding(patchlist): - if sys.platform == 'win32': - # we must not remove the manifest when building for embedding! - # FUTURE: this module was removed in setuptools 74; this is likely dead code and should be removed, - # since the toolchain it supports (VS2005-2008) is also long dead. - from cffi._shimmed_dist_utils import MSVCCompiler - if MSVCCompiler is not None: - _patch_meth(patchlist, MSVCCompiler, '_remove_visual_c_ref', - lambda self, manifest_file: manifest_file) - - if sys.platform == 'darwin': - # we must not make a '-bundle', but a '-dynamiclib' instead - from cffi._shimmed_dist_utils import CCompiler - def my_link_shared_object(self, *args, **kwds): - if '-bundle' in self.linker_so: - self.linker_so = list(self.linker_so) - i = self.linker_so.index('-bundle') - self.linker_so[i] = '-dynamiclib' - return old_link_shared_object(self, *args, **kwds) - old_link_shared_object = _patch_meth(patchlist, CCompiler, - 'link_shared_object', - my_link_shared_object) - -def _patch_for_target(patchlist, target): - from cffi._shimmed_dist_utils import build_ext - # if 'target' is different from '*', we need to patch some internal - # method to just return this 'target' value, instead of having it - # built from module_name - if target.endswith('.*'): - target = target[:-2] - if sys.platform == 'win32': - target += '.dll' - elif sys.platform == 'darwin': - target += '.dylib' - else: - target += '.so' - _patch_meth(patchlist, build_ext, 'get_ext_filename', - lambda self, ext_name: target) - - -def recompile(ffi, module_name, preamble, tmpdir='.', call_c_compiler=True, - c_file=None, source_extension='.c', extradir=None, - compiler_verbose=1, target=None, debug=None, - uses_ffiplatform=True, **kwds): - if not isinstance(module_name, str): - module_name = module_name.encode('ascii') - if ffi._windows_unicode: - ffi._apply_windows_unicode(kwds) - if preamble is not None: - if call_c_compiler and _is_file_like(c_file): - raise TypeError("Writing to file-like objects is not supported " - "with call_c_compiler=True") - embedding = (ffi._embedding is not None) - if embedding: - ffi._apply_embedding_fix(kwds) - if c_file is None: - c_file, parts = _modname_to_file(tmpdir, module_name, - source_extension) - if extradir: - parts = [extradir] + parts - ext_c_file = os.path.join(*parts) - else: - ext_c_file = c_file - # - if target is None: - if embedding: - target = '%s.*' % module_name - else: - target = '*' - # - if uses_ffiplatform: - ext = ffiplatform.get_extension(ext_c_file, module_name, **kwds) - else: - ext = None - updated = make_c_source(ffi, module_name, preamble, c_file, - verbose=compiler_verbose) - if call_c_compiler: - patchlist = [] - cwd = os.getcwd() - try: - if embedding: - _patch_for_embedding(patchlist) - if target != '*': - _patch_for_target(patchlist, target) - if compiler_verbose: - if tmpdir == '.': - msg = 'the current directory is' - else: - msg = 'setting the current directory to' - print('%s %r' % (msg, os.path.abspath(tmpdir))) - os.chdir(tmpdir) - outputfilename = ffiplatform.compile('.', ext, - compiler_verbose, debug) - finally: - os.chdir(cwd) - _unpatch_meths(patchlist) - return outputfilename - else: - return ext, updated - else: - if c_file is None: - c_file, _ = _modname_to_file(tmpdir, module_name, '.py') - updated = make_py_source(ffi, module_name, c_file, - verbose=compiler_verbose) - if call_c_compiler: - return c_file - else: - return None, updated - diff --git a/env/lib/python3.10/site-packages/cffi/setuptools_ext.py b/env/lib/python3.10/site-packages/cffi/setuptools_ext.py deleted file mode 100644 index 681b49d..0000000 --- a/env/lib/python3.10/site-packages/cffi/setuptools_ext.py +++ /dev/null @@ -1,216 +0,0 @@ -import os -import sys - -try: - basestring -except NameError: - # Python 3.x - basestring = str - -def error(msg): - from cffi._shimmed_dist_utils import DistutilsSetupError - raise DistutilsSetupError(msg) - - -def execfile(filename, glob): - # We use execfile() (here rewritten for Python 3) instead of - # __import__() to load the build script. The problem with - # a normal import is that in some packages, the intermediate - # __init__.py files may already try to import the file that - # we are generating. - with open(filename) as f: - src = f.read() - src += '\n' # Python 2.6 compatibility - code = compile(src, filename, 'exec') - exec(code, glob, glob) - - -def add_cffi_module(dist, mod_spec): - from cffi.api import FFI - - if not isinstance(mod_spec, basestring): - error("argument to 'cffi_modules=...' must be a str or a list of str," - " not %r" % (type(mod_spec).__name__,)) - mod_spec = str(mod_spec) - try: - build_file_name, ffi_var_name = mod_spec.split(':') - except ValueError: - error("%r must be of the form 'path/build.py:ffi_variable'" % - (mod_spec,)) - if not os.path.exists(build_file_name): - ext = '' - rewritten = build_file_name.replace('.', '/') + '.py' - if os.path.exists(rewritten): - ext = ' (rewrite cffi_modules to [%r])' % ( - rewritten + ':' + ffi_var_name,) - error("%r does not name an existing file%s" % (build_file_name, ext)) - - mod_vars = {'__name__': '__cffi__', '__file__': build_file_name} - execfile(build_file_name, mod_vars) - - try: - ffi = mod_vars[ffi_var_name] - except KeyError: - error("%r: object %r not found in module" % (mod_spec, - ffi_var_name)) - if not isinstance(ffi, FFI): - ffi = ffi() # maybe it's a function instead of directly an ffi - if not isinstance(ffi, FFI): - error("%r is not an FFI instance (got %r)" % (mod_spec, - type(ffi).__name__)) - if not hasattr(ffi, '_assigned_source'): - error("%r: the set_source() method was not called" % (mod_spec,)) - module_name, source, source_extension, kwds = ffi._assigned_source - if ffi._windows_unicode: - kwds = kwds.copy() - ffi._apply_windows_unicode(kwds) - - if source is None: - _add_py_module(dist, ffi, module_name) - else: - _add_c_module(dist, ffi, module_name, source, source_extension, kwds) - -def _set_py_limited_api(Extension, kwds): - """ - Add py_limited_api to kwds if setuptools >= 26 is in use. - Do not alter the setting if it already exists. - Setuptools takes care of ignoring the flag on Python 2 and PyPy. - - CPython itself should ignore the flag in a debugging version - (by not listing .abi3.so in the extensions it supports), but - it doesn't so far, creating troubles. That's why we check - for "not hasattr(sys, 'gettotalrefcount')" (the 2.7 compatible equivalent - of 'd' not in sys.abiflags). (http://bugs.python.org/issue28401) - - On Windows, with CPython <= 3.4, it's better not to use py_limited_api - because virtualenv *still* doesn't copy PYTHON3.DLL on these versions. - Recently (2020) we started shipping only >= 3.5 wheels, though. So - we'll give it another try and set py_limited_api on Windows >= 3.5. - """ - from cffi import recompiler - - if ('py_limited_api' not in kwds and not hasattr(sys, 'gettotalrefcount') - and recompiler.USE_LIMITED_API): - import setuptools - try: - setuptools_major_version = int(setuptools.__version__.partition('.')[0]) - if setuptools_major_version >= 26: - kwds['py_limited_api'] = True - except ValueError: # certain development versions of setuptools - # If we don't know the version number of setuptools, we - # try to set 'py_limited_api' anyway. At worst, we get a - # warning. - kwds['py_limited_api'] = True - return kwds - -def _add_c_module(dist, ffi, module_name, source, source_extension, kwds): - # We are a setuptools extension. Need this build_ext for py_limited_api. - from setuptools.command.build_ext import build_ext - from cffi._shimmed_dist_utils import Extension, log, mkpath - from cffi import recompiler - - allsources = ['$PLACEHOLDER'] - allsources.extend(kwds.pop('sources', [])) - kwds = _set_py_limited_api(Extension, kwds) - ext = Extension(name=module_name, sources=allsources, **kwds) - - def make_mod(tmpdir, pre_run=None): - c_file = os.path.join(tmpdir, module_name + source_extension) - log.info("generating cffi module %r" % c_file) - mkpath(tmpdir) - # a setuptools-only, API-only hook: called with the "ext" and "ffi" - # arguments just before we turn the ffi into C code. To use it, - # subclass the 'distutils.command.build_ext.build_ext' class and - # add a method 'def pre_run(self, ext, ffi)'. - if pre_run is not None: - pre_run(ext, ffi) - updated = recompiler.make_c_source(ffi, module_name, source, c_file) - if not updated: - log.info("already up-to-date") - return c_file - - if dist.ext_modules is None: - dist.ext_modules = [] - dist.ext_modules.append(ext) - - base_class = dist.cmdclass.get('build_ext', build_ext) - class build_ext_make_mod(base_class): - def run(self): - if ext.sources[0] == '$PLACEHOLDER': - pre_run = getattr(self, 'pre_run', None) - ext.sources[0] = make_mod(self.build_temp, pre_run) - base_class.run(self) - dist.cmdclass['build_ext'] = build_ext_make_mod - # NB. multiple runs here will create multiple 'build_ext_make_mod' - # classes. Even in this case the 'build_ext' command should be - # run once; but just in case, the logic above does nothing if - # called again. - - -def _add_py_module(dist, ffi, module_name): - from setuptools.command.build_py import build_py - from setuptools.command.build_ext import build_ext - from cffi._shimmed_dist_utils import log, mkpath - from cffi import recompiler - - def generate_mod(py_file): - log.info("generating cffi module %r" % py_file) - mkpath(os.path.dirname(py_file)) - updated = recompiler.make_py_source(ffi, module_name, py_file) - if not updated: - log.info("already up-to-date") - - base_class = dist.cmdclass.get('build_py', build_py) - class build_py_make_mod(base_class): - def run(self): - base_class.run(self) - module_path = module_name.split('.') - module_path[-1] += '.py' - generate_mod(os.path.join(self.build_lib, *module_path)) - def get_source_files(self): - # This is called from 'setup.py sdist' only. Exclude - # the generate .py module in this case. - saved_py_modules = self.py_modules - try: - if saved_py_modules: - self.py_modules = [m for m in saved_py_modules - if m != module_name] - return base_class.get_source_files(self) - finally: - self.py_modules = saved_py_modules - dist.cmdclass['build_py'] = build_py_make_mod - - # distutils and setuptools have no notion I could find of a - # generated python module. If we don't add module_name to - # dist.py_modules, then things mostly work but there are some - # combination of options (--root and --record) that will miss - # the module. So we add it here, which gives a few apparently - # harmless warnings about not finding the file outside the - # build directory. - # Then we need to hack more in get_source_files(); see above. - if dist.py_modules is None: - dist.py_modules = [] - dist.py_modules.append(module_name) - - # the following is only for "build_ext -i" - base_class_2 = dist.cmdclass.get('build_ext', build_ext) - class build_ext_make_mod(base_class_2): - def run(self): - base_class_2.run(self) - if self.inplace: - # from get_ext_fullpath() in distutils/command/build_ext.py - module_path = module_name.split('.') - package = '.'.join(module_path[:-1]) - build_py = self.get_finalized_command('build_py') - package_dir = build_py.get_package_dir(package) - file_name = module_path[-1] + '.py' - generate_mod(os.path.join(package_dir, file_name)) - dist.cmdclass['build_ext'] = build_ext_make_mod - -def cffi_modules(dist, attr, value): - assert attr == 'cffi_modules' - if isinstance(value, basestring): - value = [value] - - for cffi_module in value: - add_cffi_module(dist, cffi_module) diff --git a/env/lib/python3.10/site-packages/cffi/vengine_cpy.py b/env/lib/python3.10/site-packages/cffi/vengine_cpy.py deleted file mode 100644 index eb0b6f7..0000000 --- a/env/lib/python3.10/site-packages/cffi/vengine_cpy.py +++ /dev/null @@ -1,1084 +0,0 @@ -# -# DEPRECATED: implementation for ffi.verify() -# -import sys -from . import model -from .error import VerificationError -from . import _imp_emulation as imp - - -class VCPythonEngine(object): - _class_key = 'x' - _gen_python_module = True - - def __init__(self, verifier): - self.verifier = verifier - self.ffi = verifier.ffi - self._struct_pending_verification = {} - self._types_of_builtin_functions = {} - - def patch_extension_kwds(self, kwds): - pass - - def find_module(self, module_name, path, so_suffixes): - try: - f, filename, descr = imp.find_module(module_name, path) - except ImportError: - return None - if f is not None: - f.close() - # Note that after a setuptools installation, there are both .py - # and .so files with the same basename. The code here relies on - # imp.find_module() locating the .so in priority. - if descr[0] not in so_suffixes: - return None - return filename - - def collect_types(self): - self._typesdict = {} - self._generate("collecttype") - - def _prnt(self, what=''): - self._f.write(what + '\n') - - def _gettypenum(self, type): - # a KeyError here is a bug. please report it! :-) - return self._typesdict[type] - - def _do_collect_type(self, tp): - if ((not isinstance(tp, model.PrimitiveType) - or tp.name == 'long double') - and tp not in self._typesdict): - num = len(self._typesdict) - self._typesdict[tp] = num - - def write_source_to_f(self): - self.collect_types() - # - # The new module will have a _cffi_setup() function that receives - # objects from the ffi world, and that calls some setup code in - # the module. This setup code is split in several independent - # functions, e.g. one per constant. The functions are "chained" - # by ending in a tail call to each other. - # - # This is further split in two chained lists, depending on if we - # can do it at import-time or if we must wait for _cffi_setup() to - # provide us with the objects. This is needed because we - # need the values of the enum constants in order to build the - # that we may have to pass to _cffi_setup(). - # - # The following two 'chained_list_constants' items contains - # the head of these two chained lists, as a string that gives the - # call to do, if any. - self._chained_list_constants = ['((void)lib,0)', '((void)lib,0)'] - # - prnt = self._prnt - # first paste some standard set of lines that are mostly '#define' - prnt(cffimod_header) - prnt() - # then paste the C source given by the user, verbatim. - prnt(self.verifier.preamble) - prnt() - # - # call generate_cpy_xxx_decl(), for every xxx found from - # ffi._parser._declarations. This generates all the functions. - self._generate("decl") - # - # implement the function _cffi_setup_custom() as calling the - # head of the chained list. - self._generate_setup_custom() - prnt() - # - # produce the method table, including the entries for the - # generated Python->C function wrappers, which are done - # by generate_cpy_function_method(). - prnt('static PyMethodDef _cffi_methods[] = {') - self._generate("method") - prnt(' {"_cffi_setup", _cffi_setup, METH_VARARGS, NULL},') - prnt(' {NULL, NULL, 0, NULL} /* Sentinel */') - prnt('};') - prnt() - # - # standard init. - modname = self.verifier.get_module_name() - constants = self._chained_list_constants[False] - prnt('#if PY_MAJOR_VERSION >= 3') - prnt() - prnt('static struct PyModuleDef _cffi_module_def = {') - prnt(' PyModuleDef_HEAD_INIT,') - prnt(' "%s",' % modname) - prnt(' NULL,') - prnt(' -1,') - prnt(' _cffi_methods,') - prnt(' NULL, NULL, NULL, NULL') - prnt('};') - prnt() - prnt('PyMODINIT_FUNC') - prnt('PyInit_%s(void)' % modname) - prnt('{') - prnt(' PyObject *lib;') - prnt(' lib = PyModule_Create(&_cffi_module_def);') - prnt(' if (lib == NULL)') - prnt(' return NULL;') - prnt(' if (%s < 0 || _cffi_init() < 0) {' % (constants,)) - prnt(' Py_DECREF(lib);') - prnt(' return NULL;') - prnt(' }') - prnt(' return lib;') - prnt('}') - prnt() - prnt('#else') - prnt() - prnt('PyMODINIT_FUNC') - prnt('init%s(void)' % modname) - prnt('{') - prnt(' PyObject *lib;') - prnt(' lib = Py_InitModule("%s", _cffi_methods);' % modname) - prnt(' if (lib == NULL)') - prnt(' return;') - prnt(' if (%s < 0 || _cffi_init() < 0)' % (constants,)) - prnt(' return;') - prnt(' return;') - prnt('}') - prnt() - prnt('#endif') - - def load_library(self, flags=None): - # XXX review all usages of 'self' here! - # import it as a new extension module - imp.acquire_lock() - try: - if hasattr(sys, "getdlopenflags"): - previous_flags = sys.getdlopenflags() - try: - if hasattr(sys, "setdlopenflags") and flags is not None: - sys.setdlopenflags(flags) - module = imp.load_dynamic(self.verifier.get_module_name(), - self.verifier.modulefilename) - except ImportError as e: - error = "importing %r: %s" % (self.verifier.modulefilename, e) - raise VerificationError(error) - finally: - if hasattr(sys, "setdlopenflags"): - sys.setdlopenflags(previous_flags) - finally: - imp.release_lock() - # - # call loading_cpy_struct() to get the struct layout inferred by - # the C compiler - self._load(module, 'loading') - # - # the C code will need the objects. Collect them in - # order in a list. - revmapping = dict([(value, key) - for (key, value) in self._typesdict.items()]) - lst = [revmapping[i] for i in range(len(revmapping))] - lst = list(map(self.ffi._get_cached_btype, lst)) - # - # build the FFILibrary class and instance and call _cffi_setup(). - # this will set up some fields like '_cffi_types', and only then - # it will invoke the chained list of functions that will really - # build (notably) the constant objects, as if they are - # pointers, and store them as attributes on the 'library' object. - class FFILibrary(object): - _cffi_python_module = module - _cffi_ffi = self.ffi - _cffi_dir = [] - def __dir__(self): - return FFILibrary._cffi_dir + list(self.__dict__) - library = FFILibrary() - if module._cffi_setup(lst, VerificationError, library): - import warnings - warnings.warn("reimporting %r might overwrite older definitions" - % (self.verifier.get_module_name())) - # - # finally, call the loaded_cpy_xxx() functions. This will perform - # the final adjustments, like copying the Python->C wrapper - # functions from the module to the 'library' object, and setting - # up the FFILibrary class with properties for the global C variables. - self._load(module, 'loaded', library=library) - module._cffi_original_ffi = self.ffi - module._cffi_types_of_builtin_funcs = self._types_of_builtin_functions - return library - - def _get_declarations(self): - lst = [(key, tp) for (key, (tp, qual)) in - self.ffi._parser._declarations.items()] - lst.sort() - return lst - - def _generate(self, step_name): - for name, tp in self._get_declarations(): - kind, realname = name.split(' ', 1) - try: - method = getattr(self, '_generate_cpy_%s_%s' % (kind, - step_name)) - except AttributeError: - raise VerificationError( - "not implemented in verify(): %r" % name) - try: - method(tp, realname) - except Exception as e: - model.attach_exception_info(e, name) - raise - - def _load(self, module, step_name, **kwds): - for name, tp in self._get_declarations(): - kind, realname = name.split(' ', 1) - method = getattr(self, '_%s_cpy_%s' % (step_name, kind)) - try: - method(tp, realname, module, **kwds) - except Exception as e: - model.attach_exception_info(e, name) - raise - - def _generate_nothing(self, tp, name): - pass - - def _loaded_noop(self, tp, name, module, **kwds): - pass - - # ---------- - - def _convert_funcarg_to_c(self, tp, fromvar, tovar, errcode): - extraarg = '' - if isinstance(tp, model.PrimitiveType): - if tp.is_integer_type() and tp.name != '_Bool': - converter = '_cffi_to_c_int' - extraarg = ', %s' % tp.name - elif tp.is_complex_type(): - raise VerificationError( - "not implemented in verify(): complex types") - else: - converter = '(%s)_cffi_to_c_%s' % (tp.get_c_name(''), - tp.name.replace(' ', '_')) - errvalue = '-1' - # - elif isinstance(tp, model.PointerType): - self._convert_funcarg_to_c_ptr_or_array(tp, fromvar, - tovar, errcode) - return - # - elif isinstance(tp, (model.StructOrUnion, model.EnumType)): - # a struct (not a struct pointer) as a function argument - self._prnt(' if (_cffi_to_c((char *)&%s, _cffi_type(%d), %s) < 0)' - % (tovar, self._gettypenum(tp), fromvar)) - self._prnt(' %s;' % errcode) - return - # - elif isinstance(tp, model.FunctionPtrType): - converter = '(%s)_cffi_to_c_pointer' % tp.get_c_name('') - extraarg = ', _cffi_type(%d)' % self._gettypenum(tp) - errvalue = 'NULL' - # - else: - raise NotImplementedError(tp) - # - self._prnt(' %s = %s(%s%s);' % (tovar, converter, fromvar, extraarg)) - self._prnt(' if (%s == (%s)%s && PyErr_Occurred())' % ( - tovar, tp.get_c_name(''), errvalue)) - self._prnt(' %s;' % errcode) - - def _extra_local_variables(self, tp, localvars, freelines): - if isinstance(tp, model.PointerType): - localvars.add('Py_ssize_t datasize') - localvars.add('struct _cffi_freeme_s *large_args_free = NULL') - freelines.add('if (large_args_free != NULL)' - ' _cffi_free_array_arguments(large_args_free);') - - def _convert_funcarg_to_c_ptr_or_array(self, tp, fromvar, tovar, errcode): - self._prnt(' datasize = _cffi_prepare_pointer_call_argument(') - self._prnt(' _cffi_type(%d), %s, (char **)&%s);' % ( - self._gettypenum(tp), fromvar, tovar)) - self._prnt(' if (datasize != 0) {') - self._prnt(' %s = ((size_t)datasize) <= 640 ? ' - 'alloca((size_t)datasize) : NULL;' % (tovar,)) - self._prnt(' if (_cffi_convert_array_argument(_cffi_type(%d), %s, ' - '(char **)&%s,' % (self._gettypenum(tp), fromvar, tovar)) - self._prnt(' datasize, &large_args_free) < 0)') - self._prnt(' %s;' % errcode) - self._prnt(' }') - - def _convert_expr_from_c(self, tp, var, context): - if isinstance(tp, model.PrimitiveType): - if tp.is_integer_type() and tp.name != '_Bool': - return '_cffi_from_c_int(%s, %s)' % (var, tp.name) - elif tp.name != 'long double': - return '_cffi_from_c_%s(%s)' % (tp.name.replace(' ', '_'), var) - else: - return '_cffi_from_c_deref((char *)&%s, _cffi_type(%d))' % ( - var, self._gettypenum(tp)) - elif isinstance(tp, (model.PointerType, model.FunctionPtrType)): - return '_cffi_from_c_pointer((char *)%s, _cffi_type(%d))' % ( - var, self._gettypenum(tp)) - elif isinstance(tp, model.ArrayType): - return '_cffi_from_c_pointer((char *)%s, _cffi_type(%d))' % ( - var, self._gettypenum(model.PointerType(tp.item))) - elif isinstance(tp, model.StructOrUnion): - if tp.fldnames is None: - raise TypeError("'%s' is used as %s, but is opaque" % ( - tp._get_c_name(), context)) - return '_cffi_from_c_struct((char *)&%s, _cffi_type(%d))' % ( - var, self._gettypenum(tp)) - elif isinstance(tp, model.EnumType): - return '_cffi_from_c_deref((char *)&%s, _cffi_type(%d))' % ( - var, self._gettypenum(tp)) - else: - raise NotImplementedError(tp) - - # ---------- - # typedefs: generates no code so far - - _generate_cpy_typedef_collecttype = _generate_nothing - _generate_cpy_typedef_decl = _generate_nothing - _generate_cpy_typedef_method = _generate_nothing - _loading_cpy_typedef = _loaded_noop - _loaded_cpy_typedef = _loaded_noop - - # ---------- - # function declarations - - def _generate_cpy_function_collecttype(self, tp, name): - assert isinstance(tp, model.FunctionPtrType) - if tp.ellipsis: - self._do_collect_type(tp) - else: - # don't call _do_collect_type(tp) in this common case, - # otherwise test_autofilled_struct_as_argument fails - for type in tp.args: - self._do_collect_type(type) - self._do_collect_type(tp.result) - - def _generate_cpy_function_decl(self, tp, name): - assert isinstance(tp, model.FunctionPtrType) - if tp.ellipsis: - # cannot support vararg functions better than this: check for its - # exact type (including the fixed arguments), and build it as a - # constant function pointer (no CPython wrapper) - self._generate_cpy_const(False, name, tp) - return - prnt = self._prnt - numargs = len(tp.args) - if numargs == 0: - argname = 'noarg' - elif numargs == 1: - argname = 'arg0' - else: - argname = 'args' - prnt('static PyObject *') - prnt('_cffi_f_%s(PyObject *self, PyObject *%s)' % (name, argname)) - prnt('{') - # - context = 'argument of %s' % name - for i, type in enumerate(tp.args): - prnt(' %s;' % type.get_c_name(' x%d' % i, context)) - # - localvars = set() - freelines = set() - for type in tp.args: - self._extra_local_variables(type, localvars, freelines) - for decl in sorted(localvars): - prnt(' %s;' % (decl,)) - # - if not isinstance(tp.result, model.VoidType): - result_code = 'result = ' - context = 'result of %s' % name - prnt(' %s;' % tp.result.get_c_name(' result', context)) - prnt(' PyObject *pyresult;') - else: - result_code = '' - # - if len(tp.args) > 1: - rng = range(len(tp.args)) - for i in rng: - prnt(' PyObject *arg%d;' % i) - prnt() - prnt(' if (!PyArg_ParseTuple(args, "%s:%s", %s))' % ( - 'O' * numargs, name, ', '.join(['&arg%d' % i for i in rng]))) - prnt(' return NULL;') - prnt() - # - for i, type in enumerate(tp.args): - self._convert_funcarg_to_c(type, 'arg%d' % i, 'x%d' % i, - 'return NULL') - prnt() - # - prnt(' Py_BEGIN_ALLOW_THREADS') - prnt(' _cffi_restore_errno();') - prnt(' { %s%s(%s); }' % ( - result_code, name, - ', '.join(['x%d' % i for i in range(len(tp.args))]))) - prnt(' _cffi_save_errno();') - prnt(' Py_END_ALLOW_THREADS') - prnt() - # - prnt(' (void)self; /* unused */') - if numargs == 0: - prnt(' (void)noarg; /* unused */') - if result_code: - prnt(' pyresult = %s;' % - self._convert_expr_from_c(tp.result, 'result', 'result type')) - for freeline in freelines: - prnt(' ' + freeline) - prnt(' return pyresult;') - else: - for freeline in freelines: - prnt(' ' + freeline) - prnt(' Py_INCREF(Py_None);') - prnt(' return Py_None;') - prnt('}') - prnt() - - def _generate_cpy_function_method(self, tp, name): - if tp.ellipsis: - return - numargs = len(tp.args) - if numargs == 0: - meth = 'METH_NOARGS' - elif numargs == 1: - meth = 'METH_O' - else: - meth = 'METH_VARARGS' - self._prnt(' {"%s", _cffi_f_%s, %s, NULL},' % (name, name, meth)) - - _loading_cpy_function = _loaded_noop - - def _loaded_cpy_function(self, tp, name, module, library): - if tp.ellipsis: - return - func = getattr(module, name) - setattr(library, name, func) - self._types_of_builtin_functions[func] = tp - - # ---------- - # named structs - - _generate_cpy_struct_collecttype = _generate_nothing - def _generate_cpy_struct_decl(self, tp, name): - assert name == tp.name - self._generate_struct_or_union_decl(tp, 'struct', name) - def _generate_cpy_struct_method(self, tp, name): - self._generate_struct_or_union_method(tp, 'struct', name) - def _loading_cpy_struct(self, tp, name, module): - self._loading_struct_or_union(tp, 'struct', name, module) - def _loaded_cpy_struct(self, tp, name, module, **kwds): - self._loaded_struct_or_union(tp) - - _generate_cpy_union_collecttype = _generate_nothing - def _generate_cpy_union_decl(self, tp, name): - assert name == tp.name - self._generate_struct_or_union_decl(tp, 'union', name) - def _generate_cpy_union_method(self, tp, name): - self._generate_struct_or_union_method(tp, 'union', name) - def _loading_cpy_union(self, tp, name, module): - self._loading_struct_or_union(tp, 'union', name, module) - def _loaded_cpy_union(self, tp, name, module, **kwds): - self._loaded_struct_or_union(tp) - - def _generate_struct_or_union_decl(self, tp, prefix, name): - if tp.fldnames is None: - return # nothing to do with opaque structs - checkfuncname = '_cffi_check_%s_%s' % (prefix, name) - layoutfuncname = '_cffi_layout_%s_%s' % (prefix, name) - cname = ('%s %s' % (prefix, name)).strip() - # - prnt = self._prnt - prnt('static void %s(%s *p)' % (checkfuncname, cname)) - prnt('{') - prnt(' /* only to generate compile-time warnings or errors */') - prnt(' (void)p;') - for fname, ftype, fbitsize, fqual in tp.enumfields(): - if (isinstance(ftype, model.PrimitiveType) - and ftype.is_integer_type()) or fbitsize >= 0: - # accept all integers, but complain on float or double - prnt(' (void)((p->%s) << 1);' % fname) - else: - # only accept exactly the type declared. - try: - prnt(' { %s = &p->%s; (void)tmp; }' % ( - ftype.get_c_name('*tmp', 'field %r'%fname, quals=fqual), - fname)) - except VerificationError as e: - prnt(' /* %s */' % str(e)) # cannot verify it, ignore - prnt('}') - prnt('static PyObject *') - prnt('%s(PyObject *self, PyObject *noarg)' % (layoutfuncname,)) - prnt('{') - prnt(' struct _cffi_aligncheck { char x; %s y; };' % cname) - prnt(' static Py_ssize_t nums[] = {') - prnt(' sizeof(%s),' % cname) - prnt(' offsetof(struct _cffi_aligncheck, y),') - for fname, ftype, fbitsize, fqual in tp.enumfields(): - if fbitsize >= 0: - continue # xxx ignore fbitsize for now - prnt(' offsetof(%s, %s),' % (cname, fname)) - if isinstance(ftype, model.ArrayType) and ftype.length is None: - prnt(' 0, /* %s */' % ftype._get_c_name()) - else: - prnt(' sizeof(((%s *)0)->%s),' % (cname, fname)) - prnt(' -1') - prnt(' };') - prnt(' (void)self; /* unused */') - prnt(' (void)noarg; /* unused */') - prnt(' return _cffi_get_struct_layout(nums);') - prnt(' /* the next line is not executed, but compiled */') - prnt(' %s(0);' % (checkfuncname,)) - prnt('}') - prnt() - - def _generate_struct_or_union_method(self, tp, prefix, name): - if tp.fldnames is None: - return # nothing to do with opaque structs - layoutfuncname = '_cffi_layout_%s_%s' % (prefix, name) - self._prnt(' {"%s", %s, METH_NOARGS, NULL},' % (layoutfuncname, - layoutfuncname)) - - def _loading_struct_or_union(self, tp, prefix, name, module): - if tp.fldnames is None: - return # nothing to do with opaque structs - layoutfuncname = '_cffi_layout_%s_%s' % (prefix, name) - # - function = getattr(module, layoutfuncname) - layout = function() - if isinstance(tp, model.StructOrUnion) and tp.partial: - # use the function()'s sizes and offsets to guide the - # layout of the struct - totalsize = layout[0] - totalalignment = layout[1] - fieldofs = layout[2::2] - fieldsize = layout[3::2] - tp.force_flatten() - assert len(fieldofs) == len(fieldsize) == len(tp.fldnames) - tp.fixedlayout = fieldofs, fieldsize, totalsize, totalalignment - else: - cname = ('%s %s' % (prefix, name)).strip() - self._struct_pending_verification[tp] = layout, cname - - def _loaded_struct_or_union(self, tp): - if tp.fldnames is None: - return # nothing to do with opaque structs - self.ffi._get_cached_btype(tp) # force 'fixedlayout' to be considered - - if tp in self._struct_pending_verification: - # check that the layout sizes and offsets match the real ones - def check(realvalue, expectedvalue, msg): - if realvalue != expectedvalue: - raise VerificationError( - "%s (we have %d, but C compiler says %d)" - % (msg, expectedvalue, realvalue)) - ffi = self.ffi - BStruct = ffi._get_cached_btype(tp) - layout, cname = self._struct_pending_verification.pop(tp) - check(layout[0], ffi.sizeof(BStruct), "wrong total size") - check(layout[1], ffi.alignof(BStruct), "wrong total alignment") - i = 2 - for fname, ftype, fbitsize, fqual in tp.enumfields(): - if fbitsize >= 0: - continue # xxx ignore fbitsize for now - check(layout[i], ffi.offsetof(BStruct, fname), - "wrong offset for field %r" % (fname,)) - if layout[i+1] != 0: - BField = ffi._get_cached_btype(ftype) - check(layout[i+1], ffi.sizeof(BField), - "wrong size for field %r" % (fname,)) - i += 2 - assert i == len(layout) - - # ---------- - # 'anonymous' declarations. These are produced for anonymous structs - # or unions; the 'name' is obtained by a typedef. - - _generate_cpy_anonymous_collecttype = _generate_nothing - - def _generate_cpy_anonymous_decl(self, tp, name): - if isinstance(tp, model.EnumType): - self._generate_cpy_enum_decl(tp, name, '') - else: - self._generate_struct_or_union_decl(tp, '', name) - - def _generate_cpy_anonymous_method(self, tp, name): - if not isinstance(tp, model.EnumType): - self._generate_struct_or_union_method(tp, '', name) - - def _loading_cpy_anonymous(self, tp, name, module): - if isinstance(tp, model.EnumType): - self._loading_cpy_enum(tp, name, module) - else: - self._loading_struct_or_union(tp, '', name, module) - - def _loaded_cpy_anonymous(self, tp, name, module, **kwds): - if isinstance(tp, model.EnumType): - self._loaded_cpy_enum(tp, name, module, **kwds) - else: - self._loaded_struct_or_union(tp) - - # ---------- - # constants, likely declared with '#define' - - def _generate_cpy_const(self, is_int, name, tp=None, category='const', - vartp=None, delayed=True, size_too=False, - check_value=None): - prnt = self._prnt - funcname = '_cffi_%s_%s' % (category, name) - prnt('static int %s(PyObject *lib)' % funcname) - prnt('{') - prnt(' PyObject *o;') - prnt(' int res;') - if not is_int: - prnt(' %s;' % (vartp or tp).get_c_name(' i', name)) - else: - assert category == 'const' - # - if check_value is not None: - self._check_int_constant_value(name, check_value) - # - if not is_int: - if category == 'var': - realexpr = '&' + name - else: - realexpr = name - prnt(' i = (%s);' % (realexpr,)) - prnt(' o = %s;' % (self._convert_expr_from_c(tp, 'i', - 'variable type'),)) - assert delayed - else: - prnt(' o = _cffi_from_c_int_const(%s);' % name) - prnt(' if (o == NULL)') - prnt(' return -1;') - if size_too: - prnt(' {') - prnt(' PyObject *o1 = o;') - prnt(' o = Py_BuildValue("On", o1, (Py_ssize_t)sizeof(%s));' - % (name,)) - prnt(' Py_DECREF(o1);') - prnt(' if (o == NULL)') - prnt(' return -1;') - prnt(' }') - prnt(' res = PyObject_SetAttrString(lib, "%s", o);' % name) - prnt(' Py_DECREF(o);') - prnt(' if (res < 0)') - prnt(' return -1;') - prnt(' return %s;' % self._chained_list_constants[delayed]) - self._chained_list_constants[delayed] = funcname + '(lib)' - prnt('}') - prnt() - - def _generate_cpy_constant_collecttype(self, tp, name): - is_int = isinstance(tp, model.PrimitiveType) and tp.is_integer_type() - if not is_int: - self._do_collect_type(tp) - - def _generate_cpy_constant_decl(self, tp, name): - is_int = isinstance(tp, model.PrimitiveType) and tp.is_integer_type() - self._generate_cpy_const(is_int, name, tp) - - _generate_cpy_constant_method = _generate_nothing - _loading_cpy_constant = _loaded_noop - _loaded_cpy_constant = _loaded_noop - - # ---------- - # enums - - def _check_int_constant_value(self, name, value, err_prefix=''): - prnt = self._prnt - if value <= 0: - prnt(' if ((%s) > 0 || (long)(%s) != %dL) {' % ( - name, name, value)) - else: - prnt(' if ((%s) <= 0 || (unsigned long)(%s) != %dUL) {' % ( - name, name, value)) - prnt(' char buf[64];') - prnt(' if ((%s) <= 0)' % name) - prnt(' snprintf(buf, 63, "%%ld", (long)(%s));' % name) - prnt(' else') - prnt(' snprintf(buf, 63, "%%lu", (unsigned long)(%s));' % - name) - prnt(' PyErr_Format(_cffi_VerificationError,') - prnt(' "%s%s has the real value %s, not %s",') - prnt(' "%s", "%s", buf, "%d");' % ( - err_prefix, name, value)) - prnt(' return -1;') - prnt(' }') - - def _enum_funcname(self, prefix, name): - # "$enum_$1" => "___D_enum____D_1" - name = name.replace('$', '___D_') - return '_cffi_e_%s_%s' % (prefix, name) - - def _generate_cpy_enum_decl(self, tp, name, prefix='enum'): - if tp.partial: - for enumerator in tp.enumerators: - self._generate_cpy_const(True, enumerator, delayed=False) - return - # - funcname = self._enum_funcname(prefix, name) - prnt = self._prnt - prnt('static int %s(PyObject *lib)' % funcname) - prnt('{') - for enumerator, enumvalue in zip(tp.enumerators, tp.enumvalues): - self._check_int_constant_value(enumerator, enumvalue, - "enum %s: " % name) - prnt(' return %s;' % self._chained_list_constants[True]) - self._chained_list_constants[True] = funcname + '(lib)' - prnt('}') - prnt() - - _generate_cpy_enum_collecttype = _generate_nothing - _generate_cpy_enum_method = _generate_nothing - - def _loading_cpy_enum(self, tp, name, module): - if tp.partial: - enumvalues = [getattr(module, enumerator) - for enumerator in tp.enumerators] - tp.enumvalues = tuple(enumvalues) - tp.partial_resolved = True - - def _loaded_cpy_enum(self, tp, name, module, library): - for enumerator, enumvalue in zip(tp.enumerators, tp.enumvalues): - setattr(library, enumerator, enumvalue) - - # ---------- - # macros: for now only for integers - - def _generate_cpy_macro_decl(self, tp, name): - if tp == '...': - check_value = None - else: - check_value = tp # an integer - self._generate_cpy_const(True, name, check_value=check_value) - - _generate_cpy_macro_collecttype = _generate_nothing - _generate_cpy_macro_method = _generate_nothing - _loading_cpy_macro = _loaded_noop - _loaded_cpy_macro = _loaded_noop - - # ---------- - # global variables - - def _generate_cpy_variable_collecttype(self, tp, name): - if isinstance(tp, model.ArrayType): - tp_ptr = model.PointerType(tp.item) - else: - tp_ptr = model.PointerType(tp) - self._do_collect_type(tp_ptr) - - def _generate_cpy_variable_decl(self, tp, name): - if isinstance(tp, model.ArrayType): - tp_ptr = model.PointerType(tp.item) - self._generate_cpy_const(False, name, tp, vartp=tp_ptr, - size_too = tp.length_is_unknown()) - else: - tp_ptr = model.PointerType(tp) - self._generate_cpy_const(False, name, tp_ptr, category='var') - - _generate_cpy_variable_method = _generate_nothing - _loading_cpy_variable = _loaded_noop - - def _loaded_cpy_variable(self, tp, name, module, library): - value = getattr(library, name) - if isinstance(tp, model.ArrayType): # int a[5] is "constant" in the - # sense that "a=..." is forbidden - if tp.length_is_unknown(): - assert isinstance(value, tuple) - (value, size) = value - BItemType = self.ffi._get_cached_btype(tp.item) - length, rest = divmod(size, self.ffi.sizeof(BItemType)) - if rest != 0: - raise VerificationError( - "bad size: %r does not seem to be an array of %s" % - (name, tp.item)) - tp = tp.resolve_length(length) - # 'value' is a which we have to replace with - # a if the N is actually known - if tp.length is not None: - BArray = self.ffi._get_cached_btype(tp) - value = self.ffi.cast(BArray, value) - setattr(library, name, value) - return - # remove ptr= from the library instance, and replace - # it by a property on the class, which reads/writes into ptr[0]. - ptr = value - delattr(library, name) - def getter(library): - return ptr[0] - def setter(library, value): - ptr[0] = value - setattr(type(library), name, property(getter, setter)) - type(library)._cffi_dir.append(name) - - # ---------- - - def _generate_setup_custom(self): - prnt = self._prnt - prnt('static int _cffi_setup_custom(PyObject *lib)') - prnt('{') - prnt(' return %s;' % self._chained_list_constants[True]) - prnt('}') - -cffimod_header = r''' -#include -#include - -/* this block of #ifs should be kept exactly identical between - c/_cffi_backend.c, cffi/vengine_cpy.py, cffi/vengine_gen.py - and cffi/_cffi_include.h */ -#if defined(_MSC_VER) -# include /* for alloca() */ -# if _MSC_VER < 1600 /* MSVC < 2010 */ - typedef __int8 int8_t; - typedef __int16 int16_t; - typedef __int32 int32_t; - typedef __int64 int64_t; - typedef unsigned __int8 uint8_t; - typedef unsigned __int16 uint16_t; - typedef unsigned __int32 uint32_t; - typedef unsigned __int64 uint64_t; - typedef __int8 int_least8_t; - typedef __int16 int_least16_t; - typedef __int32 int_least32_t; - typedef __int64 int_least64_t; - typedef unsigned __int8 uint_least8_t; - typedef unsigned __int16 uint_least16_t; - typedef unsigned __int32 uint_least32_t; - typedef unsigned __int64 uint_least64_t; - typedef __int8 int_fast8_t; - typedef __int16 int_fast16_t; - typedef __int32 int_fast32_t; - typedef __int64 int_fast64_t; - typedef unsigned __int8 uint_fast8_t; - typedef unsigned __int16 uint_fast16_t; - typedef unsigned __int32 uint_fast32_t; - typedef unsigned __int64 uint_fast64_t; - typedef __int64 intmax_t; - typedef unsigned __int64 uintmax_t; -# else -# include -# endif -# if _MSC_VER < 1800 /* MSVC < 2013 */ -# ifndef __cplusplus - typedef unsigned char _Bool; -# endif -# endif -# define _cffi_float_complex_t _Fcomplex /* include for it */ -# define _cffi_double_complex_t _Dcomplex /* include for it */ -#else -# include -# if (defined (__SVR4) && defined (__sun)) || defined(_AIX) || defined(__hpux) -# include -# endif -# define _cffi_float_complex_t float _Complex -# define _cffi_double_complex_t double _Complex -#endif - -#if PY_MAJOR_VERSION < 3 -# undef PyCapsule_CheckExact -# undef PyCapsule_GetPointer -# define PyCapsule_CheckExact(capsule) (PyCObject_Check(capsule)) -# define PyCapsule_GetPointer(capsule, name) \ - (PyCObject_AsVoidPtr(capsule)) -#endif - -#if PY_MAJOR_VERSION >= 3 -# define PyInt_FromLong PyLong_FromLong -#endif - -#define _cffi_from_c_double PyFloat_FromDouble -#define _cffi_from_c_float PyFloat_FromDouble -#define _cffi_from_c_long PyInt_FromLong -#define _cffi_from_c_ulong PyLong_FromUnsignedLong -#define _cffi_from_c_longlong PyLong_FromLongLong -#define _cffi_from_c_ulonglong PyLong_FromUnsignedLongLong -#define _cffi_from_c__Bool PyBool_FromLong - -#define _cffi_to_c_double PyFloat_AsDouble -#define _cffi_to_c_float PyFloat_AsDouble - -#define _cffi_from_c_int_const(x) \ - (((x) > 0) ? \ - ((unsigned long long)(x) <= (unsigned long long)LONG_MAX) ? \ - PyInt_FromLong((long)(x)) : \ - PyLong_FromUnsignedLongLong((unsigned long long)(x)) : \ - ((long long)(x) >= (long long)LONG_MIN) ? \ - PyInt_FromLong((long)(x)) : \ - PyLong_FromLongLong((long long)(x))) - -#define _cffi_from_c_int(x, type) \ - (((type)-1) > 0 ? /* unsigned */ \ - (sizeof(type) < sizeof(long) ? \ - PyInt_FromLong((long)x) : \ - sizeof(type) == sizeof(long) ? \ - PyLong_FromUnsignedLong((unsigned long)x) : \ - PyLong_FromUnsignedLongLong((unsigned long long)x)) : \ - (sizeof(type) <= sizeof(long) ? \ - PyInt_FromLong((long)x) : \ - PyLong_FromLongLong((long long)x))) - -#define _cffi_to_c_int(o, type) \ - ((type)( \ - sizeof(type) == 1 ? (((type)-1) > 0 ? (type)_cffi_to_c_u8(o) \ - : (type)_cffi_to_c_i8(o)) : \ - sizeof(type) == 2 ? (((type)-1) > 0 ? (type)_cffi_to_c_u16(o) \ - : (type)_cffi_to_c_i16(o)) : \ - sizeof(type) == 4 ? (((type)-1) > 0 ? (type)_cffi_to_c_u32(o) \ - : (type)_cffi_to_c_i32(o)) : \ - sizeof(type) == 8 ? (((type)-1) > 0 ? (type)_cffi_to_c_u64(o) \ - : (type)_cffi_to_c_i64(o)) : \ - (Py_FatalError("unsupported size for type " #type), (type)0))) - -#define _cffi_to_c_i8 \ - ((int(*)(PyObject *))_cffi_exports[1]) -#define _cffi_to_c_u8 \ - ((int(*)(PyObject *))_cffi_exports[2]) -#define _cffi_to_c_i16 \ - ((int(*)(PyObject *))_cffi_exports[3]) -#define _cffi_to_c_u16 \ - ((int(*)(PyObject *))_cffi_exports[4]) -#define _cffi_to_c_i32 \ - ((int(*)(PyObject *))_cffi_exports[5]) -#define _cffi_to_c_u32 \ - ((unsigned int(*)(PyObject *))_cffi_exports[6]) -#define _cffi_to_c_i64 \ - ((long long(*)(PyObject *))_cffi_exports[7]) -#define _cffi_to_c_u64 \ - ((unsigned long long(*)(PyObject *))_cffi_exports[8]) -#define _cffi_to_c_char \ - ((int(*)(PyObject *))_cffi_exports[9]) -#define _cffi_from_c_pointer \ - ((PyObject *(*)(char *, CTypeDescrObject *))_cffi_exports[10]) -#define _cffi_to_c_pointer \ - ((char *(*)(PyObject *, CTypeDescrObject *))_cffi_exports[11]) -#define _cffi_get_struct_layout \ - ((PyObject *(*)(Py_ssize_t[]))_cffi_exports[12]) -#define _cffi_restore_errno \ - ((void(*)(void))_cffi_exports[13]) -#define _cffi_save_errno \ - ((void(*)(void))_cffi_exports[14]) -#define _cffi_from_c_char \ - ((PyObject *(*)(char))_cffi_exports[15]) -#define _cffi_from_c_deref \ - ((PyObject *(*)(char *, CTypeDescrObject *))_cffi_exports[16]) -#define _cffi_to_c \ - ((int(*)(char *, CTypeDescrObject *, PyObject *))_cffi_exports[17]) -#define _cffi_from_c_struct \ - ((PyObject *(*)(char *, CTypeDescrObject *))_cffi_exports[18]) -#define _cffi_to_c_wchar_t \ - ((wchar_t(*)(PyObject *))_cffi_exports[19]) -#define _cffi_from_c_wchar_t \ - ((PyObject *(*)(wchar_t))_cffi_exports[20]) -#define _cffi_to_c_long_double \ - ((long double(*)(PyObject *))_cffi_exports[21]) -#define _cffi_to_c__Bool \ - ((_Bool(*)(PyObject *))_cffi_exports[22]) -#define _cffi_prepare_pointer_call_argument \ - ((Py_ssize_t(*)(CTypeDescrObject *, PyObject *, char **))_cffi_exports[23]) -#define _cffi_convert_array_from_object \ - ((int(*)(char *, CTypeDescrObject *, PyObject *))_cffi_exports[24]) -#define _CFFI_NUM_EXPORTS 25 - -typedef struct _ctypedescr CTypeDescrObject; - -static void *_cffi_exports[_CFFI_NUM_EXPORTS]; -static PyObject *_cffi_types, *_cffi_VerificationError; - -static int _cffi_setup_custom(PyObject *lib); /* forward */ - -static PyObject *_cffi_setup(PyObject *self, PyObject *args) -{ - PyObject *library; - int was_alive = (_cffi_types != NULL); - (void)self; /* unused */ - if (!PyArg_ParseTuple(args, "OOO", &_cffi_types, &_cffi_VerificationError, - &library)) - return NULL; - Py_INCREF(_cffi_types); - Py_INCREF(_cffi_VerificationError); - if (_cffi_setup_custom(library) < 0) - return NULL; - return PyBool_FromLong(was_alive); -} - -union _cffi_union_alignment_u { - unsigned char m_char; - unsigned short m_short; - unsigned int m_int; - unsigned long m_long; - unsigned long long m_longlong; - float m_float; - double m_double; - long double m_longdouble; -}; - -struct _cffi_freeme_s { - struct _cffi_freeme_s *next; - union _cffi_union_alignment_u alignment; -}; - -#ifdef __GNUC__ - __attribute__((unused)) -#endif -static int _cffi_convert_array_argument(CTypeDescrObject *ctptr, PyObject *arg, - char **output_data, Py_ssize_t datasize, - struct _cffi_freeme_s **freeme) -{ - char *p; - if (datasize < 0) - return -1; - - p = *output_data; - if (p == NULL) { - struct _cffi_freeme_s *fp = (struct _cffi_freeme_s *)PyObject_Malloc( - offsetof(struct _cffi_freeme_s, alignment) + (size_t)datasize); - if (fp == NULL) - return -1; - fp->next = *freeme; - *freeme = fp; - p = *output_data = (char *)&fp->alignment; - } - memset((void *)p, 0, (size_t)datasize); - return _cffi_convert_array_from_object(p, ctptr, arg); -} - -#ifdef __GNUC__ - __attribute__((unused)) -#endif -static void _cffi_free_array_arguments(struct _cffi_freeme_s *freeme) -{ - do { - void *p = (void *)freeme; - freeme = freeme->next; - PyObject_Free(p); - } while (freeme != NULL); -} - -static int _cffi_init(void) -{ - PyObject *module, *c_api_object = NULL; - - module = PyImport_ImportModule("_cffi_backend"); - if (module == NULL) - goto failure; - - c_api_object = PyObject_GetAttrString(module, "_C_API"); - if (c_api_object == NULL) - goto failure; - if (!PyCapsule_CheckExact(c_api_object)) { - PyErr_SetNone(PyExc_ImportError); - goto failure; - } - memcpy(_cffi_exports, PyCapsule_GetPointer(c_api_object, "cffi"), - _CFFI_NUM_EXPORTS * sizeof(void *)); - - Py_DECREF(module); - Py_DECREF(c_api_object); - return 0; - - failure: - Py_XDECREF(module); - Py_XDECREF(c_api_object); - return -1; -} - -#define _cffi_type(num) ((CTypeDescrObject *)PyList_GET_ITEM(_cffi_types, num)) - -/**********/ -''' diff --git a/env/lib/python3.10/site-packages/cffi/vengine_gen.py b/env/lib/python3.10/site-packages/cffi/vengine_gen.py deleted file mode 100644 index bffc821..0000000 --- a/env/lib/python3.10/site-packages/cffi/vengine_gen.py +++ /dev/null @@ -1,679 +0,0 @@ -# -# DEPRECATED: implementation for ffi.verify() -# -import sys, os -import types - -from . import model -from .error import VerificationError - - -class VGenericEngine(object): - _class_key = 'g' - _gen_python_module = False - - def __init__(self, verifier): - self.verifier = verifier - self.ffi = verifier.ffi - self.export_symbols = [] - self._struct_pending_verification = {} - - def patch_extension_kwds(self, kwds): - # add 'export_symbols' to the dictionary. Note that we add the - # list before filling it. When we fill it, it will thus also show - # up in kwds['export_symbols']. - kwds.setdefault('export_symbols', self.export_symbols) - - def find_module(self, module_name, path, so_suffixes): - for so_suffix in so_suffixes: - basename = module_name + so_suffix - if path is None: - path = sys.path - for dirname in path: - filename = os.path.join(dirname, basename) - if os.path.isfile(filename): - return filename - - def collect_types(self): - pass # not needed in the generic engine - - def _prnt(self, what=''): - self._f.write(what + '\n') - - def write_source_to_f(self): - prnt = self._prnt - # first paste some standard set of lines that are mostly '#include' - prnt(cffimod_header) - # then paste the C source given by the user, verbatim. - prnt(self.verifier.preamble) - # - # call generate_gen_xxx_decl(), for every xxx found from - # ffi._parser._declarations. This generates all the functions. - self._generate('decl') - # - # on Windows, distutils insists on putting init_cffi_xyz in - # 'export_symbols', so instead of fighting it, just give up and - # give it one - if sys.platform == 'win32': - if sys.version_info >= (3,): - prefix = 'PyInit_' - else: - prefix = 'init' - modname = self.verifier.get_module_name() - prnt("void %s%s(void) { }\n" % (prefix, modname)) - - def load_library(self, flags=0): - # import it with the CFFI backend - backend = self.ffi._backend - # needs to make a path that contains '/', on Posix - filename = os.path.join(os.curdir, self.verifier.modulefilename) - module = backend.load_library(filename, flags) - # - # call loading_gen_struct() to get the struct layout inferred by - # the C compiler - self._load(module, 'loading') - - # build the FFILibrary class and instance, this is a module subclass - # because modules are expected to have usually-constant-attributes and - # in PyPy this means the JIT is able to treat attributes as constant, - # which we want. - class FFILibrary(types.ModuleType): - _cffi_generic_module = module - _cffi_ffi = self.ffi - _cffi_dir = [] - def __dir__(self): - return FFILibrary._cffi_dir - library = FFILibrary("") - # - # finally, call the loaded_gen_xxx() functions. This will set - # up the 'library' object. - self._load(module, 'loaded', library=library) - return library - - def _get_declarations(self): - lst = [(key, tp) for (key, (tp, qual)) in - self.ffi._parser._declarations.items()] - lst.sort() - return lst - - def _generate(self, step_name): - for name, tp in self._get_declarations(): - kind, realname = name.split(' ', 1) - try: - method = getattr(self, '_generate_gen_%s_%s' % (kind, - step_name)) - except AttributeError: - raise VerificationError( - "not implemented in verify(): %r" % name) - try: - method(tp, realname) - except Exception as e: - model.attach_exception_info(e, name) - raise - - def _load(self, module, step_name, **kwds): - for name, tp in self._get_declarations(): - kind, realname = name.split(' ', 1) - method = getattr(self, '_%s_gen_%s' % (step_name, kind)) - try: - method(tp, realname, module, **kwds) - except Exception as e: - model.attach_exception_info(e, name) - raise - - def _generate_nothing(self, tp, name): - pass - - def _loaded_noop(self, tp, name, module, **kwds): - pass - - # ---------- - # typedefs: generates no code so far - - _generate_gen_typedef_decl = _generate_nothing - _loading_gen_typedef = _loaded_noop - _loaded_gen_typedef = _loaded_noop - - # ---------- - # function declarations - - def _generate_gen_function_decl(self, tp, name): - assert isinstance(tp, model.FunctionPtrType) - if tp.ellipsis: - # cannot support vararg functions better than this: check for its - # exact type (including the fixed arguments), and build it as a - # constant function pointer (no _cffi_f_%s wrapper) - self._generate_gen_const(False, name, tp) - return - prnt = self._prnt - numargs = len(tp.args) - argnames = [] - for i, type in enumerate(tp.args): - indirection = '' - if isinstance(type, model.StructOrUnion): - indirection = '*' - argnames.append('%sx%d' % (indirection, i)) - context = 'argument of %s' % name - arglist = [type.get_c_name(' %s' % arg, context) - for type, arg in zip(tp.args, argnames)] - tpresult = tp.result - if isinstance(tpresult, model.StructOrUnion): - arglist.insert(0, tpresult.get_c_name(' *r', context)) - tpresult = model.void_type - arglist = ', '.join(arglist) or 'void' - wrappername = '_cffi_f_%s' % name - self.export_symbols.append(wrappername) - if tp.abi: - abi = tp.abi + ' ' - else: - abi = '' - funcdecl = ' %s%s(%s)' % (abi, wrappername, arglist) - context = 'result of %s' % name - prnt(tpresult.get_c_name(funcdecl, context)) - prnt('{') - # - if isinstance(tp.result, model.StructOrUnion): - result_code = '*r = ' - elif not isinstance(tp.result, model.VoidType): - result_code = 'return ' - else: - result_code = '' - prnt(' %s%s(%s);' % (result_code, name, ', '.join(argnames))) - prnt('}') - prnt() - - _loading_gen_function = _loaded_noop - - def _loaded_gen_function(self, tp, name, module, library): - assert isinstance(tp, model.FunctionPtrType) - if tp.ellipsis: - newfunction = self._load_constant(False, tp, name, module) - else: - indirections = [] - base_tp = tp - if (any(isinstance(typ, model.StructOrUnion) for typ in tp.args) - or isinstance(tp.result, model.StructOrUnion)): - indirect_args = [] - for i, typ in enumerate(tp.args): - if isinstance(typ, model.StructOrUnion): - typ = model.PointerType(typ) - indirections.append((i, typ)) - indirect_args.append(typ) - indirect_result = tp.result - if isinstance(indirect_result, model.StructOrUnion): - if indirect_result.fldtypes is None: - raise TypeError("'%s' is used as result type, " - "but is opaque" % ( - indirect_result._get_c_name(),)) - indirect_result = model.PointerType(indirect_result) - indirect_args.insert(0, indirect_result) - indirections.insert(0, ("result", indirect_result)) - indirect_result = model.void_type - tp = model.FunctionPtrType(tuple(indirect_args), - indirect_result, tp.ellipsis) - BFunc = self.ffi._get_cached_btype(tp) - wrappername = '_cffi_f_%s' % name - newfunction = module.load_function(BFunc, wrappername) - for i, typ in indirections: - newfunction = self._make_struct_wrapper(newfunction, i, typ, - base_tp) - setattr(library, name, newfunction) - type(library)._cffi_dir.append(name) - - def _make_struct_wrapper(self, oldfunc, i, tp, base_tp): - backend = self.ffi._backend - BType = self.ffi._get_cached_btype(tp) - if i == "result": - ffi = self.ffi - def newfunc(*args): - res = ffi.new(BType) - oldfunc(res, *args) - return res[0] - else: - def newfunc(*args): - args = args[:i] + (backend.newp(BType, args[i]),) + args[i+1:] - return oldfunc(*args) - newfunc._cffi_base_type = base_tp - return newfunc - - # ---------- - # named structs - - def _generate_gen_struct_decl(self, tp, name): - assert name == tp.name - self._generate_struct_or_union_decl(tp, 'struct', name) - - def _loading_gen_struct(self, tp, name, module): - self._loading_struct_or_union(tp, 'struct', name, module) - - def _loaded_gen_struct(self, tp, name, module, **kwds): - self._loaded_struct_or_union(tp) - - def _generate_gen_union_decl(self, tp, name): - assert name == tp.name - self._generate_struct_or_union_decl(tp, 'union', name) - - def _loading_gen_union(self, tp, name, module): - self._loading_struct_or_union(tp, 'union', name, module) - - def _loaded_gen_union(self, tp, name, module, **kwds): - self._loaded_struct_or_union(tp) - - def _generate_struct_or_union_decl(self, tp, prefix, name): - if tp.fldnames is None: - return # nothing to do with opaque structs - checkfuncname = '_cffi_check_%s_%s' % (prefix, name) - layoutfuncname = '_cffi_layout_%s_%s' % (prefix, name) - cname = ('%s %s' % (prefix, name)).strip() - # - prnt = self._prnt - prnt('static void %s(%s *p)' % (checkfuncname, cname)) - prnt('{') - prnt(' /* only to generate compile-time warnings or errors */') - prnt(' (void)p;') - for fname, ftype, fbitsize, fqual in tp.enumfields(): - if (isinstance(ftype, model.PrimitiveType) - and ftype.is_integer_type()) or fbitsize >= 0: - # accept all integers, but complain on float or double - prnt(' (void)((p->%s) << 1);' % fname) - else: - # only accept exactly the type declared. - try: - prnt(' { %s = &p->%s; (void)tmp; }' % ( - ftype.get_c_name('*tmp', 'field %r'%fname, quals=fqual), - fname)) - except VerificationError as e: - prnt(' /* %s */' % str(e)) # cannot verify it, ignore - prnt('}') - self.export_symbols.append(layoutfuncname) - prnt('intptr_t %s(intptr_t i)' % (layoutfuncname,)) - prnt('{') - prnt(' struct _cffi_aligncheck { char x; %s y; };' % cname) - prnt(' static intptr_t nums[] = {') - prnt(' sizeof(%s),' % cname) - prnt(' offsetof(struct _cffi_aligncheck, y),') - for fname, ftype, fbitsize, fqual in tp.enumfields(): - if fbitsize >= 0: - continue # xxx ignore fbitsize for now - prnt(' offsetof(%s, %s),' % (cname, fname)) - if isinstance(ftype, model.ArrayType) and ftype.length is None: - prnt(' 0, /* %s */' % ftype._get_c_name()) - else: - prnt(' sizeof(((%s *)0)->%s),' % (cname, fname)) - prnt(' -1') - prnt(' };') - prnt(' return nums[i];') - prnt(' /* the next line is not executed, but compiled */') - prnt(' %s(0);' % (checkfuncname,)) - prnt('}') - prnt() - - def _loading_struct_or_union(self, tp, prefix, name, module): - if tp.fldnames is None: - return # nothing to do with opaque structs - layoutfuncname = '_cffi_layout_%s_%s' % (prefix, name) - # - BFunc = self.ffi._typeof_locked("intptr_t(*)(intptr_t)")[0] - function = module.load_function(BFunc, layoutfuncname) - layout = [] - num = 0 - while True: - x = function(num) - if x < 0: break - layout.append(x) - num += 1 - if isinstance(tp, model.StructOrUnion) and tp.partial: - # use the function()'s sizes and offsets to guide the - # layout of the struct - totalsize = layout[0] - totalalignment = layout[1] - fieldofs = layout[2::2] - fieldsize = layout[3::2] - tp.force_flatten() - assert len(fieldofs) == len(fieldsize) == len(tp.fldnames) - tp.fixedlayout = fieldofs, fieldsize, totalsize, totalalignment - else: - cname = ('%s %s' % (prefix, name)).strip() - self._struct_pending_verification[tp] = layout, cname - - def _loaded_struct_or_union(self, tp): - if tp.fldnames is None: - return # nothing to do with opaque structs - self.ffi._get_cached_btype(tp) # force 'fixedlayout' to be considered - - if tp in self._struct_pending_verification: - # check that the layout sizes and offsets match the real ones - def check(realvalue, expectedvalue, msg): - if realvalue != expectedvalue: - raise VerificationError( - "%s (we have %d, but C compiler says %d)" - % (msg, expectedvalue, realvalue)) - ffi = self.ffi - BStruct = ffi._get_cached_btype(tp) - layout, cname = self._struct_pending_verification.pop(tp) - check(layout[0], ffi.sizeof(BStruct), "wrong total size") - check(layout[1], ffi.alignof(BStruct), "wrong total alignment") - i = 2 - for fname, ftype, fbitsize, fqual in tp.enumfields(): - if fbitsize >= 0: - continue # xxx ignore fbitsize for now - check(layout[i], ffi.offsetof(BStruct, fname), - "wrong offset for field %r" % (fname,)) - if layout[i+1] != 0: - BField = ffi._get_cached_btype(ftype) - check(layout[i+1], ffi.sizeof(BField), - "wrong size for field %r" % (fname,)) - i += 2 - assert i == len(layout) - - # ---------- - # 'anonymous' declarations. These are produced for anonymous structs - # or unions; the 'name' is obtained by a typedef. - - def _generate_gen_anonymous_decl(self, tp, name): - if isinstance(tp, model.EnumType): - self._generate_gen_enum_decl(tp, name, '') - else: - self._generate_struct_or_union_decl(tp, '', name) - - def _loading_gen_anonymous(self, tp, name, module): - if isinstance(tp, model.EnumType): - self._loading_gen_enum(tp, name, module, '') - else: - self._loading_struct_or_union(tp, '', name, module) - - def _loaded_gen_anonymous(self, tp, name, module, **kwds): - if isinstance(tp, model.EnumType): - self._loaded_gen_enum(tp, name, module, **kwds) - else: - self._loaded_struct_or_union(tp) - - # ---------- - # constants, likely declared with '#define' - - def _generate_gen_const(self, is_int, name, tp=None, category='const', - check_value=None): - prnt = self._prnt - funcname = '_cffi_%s_%s' % (category, name) - self.export_symbols.append(funcname) - if check_value is not None: - assert is_int - assert category == 'const' - prnt('int %s(char *out_error)' % funcname) - prnt('{') - self._check_int_constant_value(name, check_value) - prnt(' return 0;') - prnt('}') - elif is_int: - assert category == 'const' - prnt('int %s(long long *out_value)' % funcname) - prnt('{') - prnt(' *out_value = (long long)(%s);' % (name,)) - prnt(' return (%s) <= 0;' % (name,)) - prnt('}') - else: - assert tp is not None - assert check_value is None - if category == 'var': - ampersand = '&' - else: - ampersand = '' - extra = '' - if category == 'const' and isinstance(tp, model.StructOrUnion): - extra = 'const *' - ampersand = '&' - prnt(tp.get_c_name(' %s%s(void)' % (extra, funcname), name)) - prnt('{') - prnt(' return (%s%s);' % (ampersand, name)) - prnt('}') - prnt() - - def _generate_gen_constant_decl(self, tp, name): - is_int = isinstance(tp, model.PrimitiveType) and tp.is_integer_type() - self._generate_gen_const(is_int, name, tp) - - _loading_gen_constant = _loaded_noop - - def _load_constant(self, is_int, tp, name, module, check_value=None): - funcname = '_cffi_const_%s' % name - if check_value is not None: - assert is_int - self._load_known_int_constant(module, funcname) - value = check_value - elif is_int: - BType = self.ffi._typeof_locked("long long*")[0] - BFunc = self.ffi._typeof_locked("int(*)(long long*)")[0] - function = module.load_function(BFunc, funcname) - p = self.ffi.new(BType) - negative = function(p) - value = int(p[0]) - if value < 0 and not negative: - BLongLong = self.ffi._typeof_locked("long long")[0] - value += (1 << (8*self.ffi.sizeof(BLongLong))) - else: - assert check_value is None - fntypeextra = '(*)(void)' - if isinstance(tp, model.StructOrUnion): - fntypeextra = '*' + fntypeextra - BFunc = self.ffi._typeof_locked(tp.get_c_name(fntypeextra, name))[0] - function = module.load_function(BFunc, funcname) - value = function() - if isinstance(tp, model.StructOrUnion): - value = value[0] - return value - - def _loaded_gen_constant(self, tp, name, module, library): - is_int = isinstance(tp, model.PrimitiveType) and tp.is_integer_type() - value = self._load_constant(is_int, tp, name, module) - setattr(library, name, value) - type(library)._cffi_dir.append(name) - - # ---------- - # enums - - def _check_int_constant_value(self, name, value): - prnt = self._prnt - if value <= 0: - prnt(' if ((%s) > 0 || (long)(%s) != %dL) {' % ( - name, name, value)) - else: - prnt(' if ((%s) <= 0 || (unsigned long)(%s) != %dUL) {' % ( - name, name, value)) - prnt(' char buf[64];') - prnt(' if ((%s) <= 0)' % name) - prnt(' sprintf(buf, "%%ld", (long)(%s));' % name) - prnt(' else') - prnt(' sprintf(buf, "%%lu", (unsigned long)(%s));' % - name) - prnt(' sprintf(out_error, "%s has the real value %s, not %s",') - prnt(' "%s", buf, "%d");' % (name[:100], value)) - prnt(' return -1;') - prnt(' }') - - def _load_known_int_constant(self, module, funcname): - BType = self.ffi._typeof_locked("char[]")[0] - BFunc = self.ffi._typeof_locked("int(*)(char*)")[0] - function = module.load_function(BFunc, funcname) - p = self.ffi.new(BType, 256) - if function(p) < 0: - error = self.ffi.string(p) - if sys.version_info >= (3,): - error = str(error, 'utf-8') - raise VerificationError(error) - - def _enum_funcname(self, prefix, name): - # "$enum_$1" => "___D_enum____D_1" - name = name.replace('$', '___D_') - return '_cffi_e_%s_%s' % (prefix, name) - - def _generate_gen_enum_decl(self, tp, name, prefix='enum'): - if tp.partial: - for enumerator in tp.enumerators: - self._generate_gen_const(True, enumerator) - return - # - funcname = self._enum_funcname(prefix, name) - self.export_symbols.append(funcname) - prnt = self._prnt - prnt('int %s(char *out_error)' % funcname) - prnt('{') - for enumerator, enumvalue in zip(tp.enumerators, tp.enumvalues): - self._check_int_constant_value(enumerator, enumvalue) - prnt(' return 0;') - prnt('}') - prnt() - - def _loading_gen_enum(self, tp, name, module, prefix='enum'): - if tp.partial: - enumvalues = [self._load_constant(True, tp, enumerator, module) - for enumerator in tp.enumerators] - tp.enumvalues = tuple(enumvalues) - tp.partial_resolved = True - else: - funcname = self._enum_funcname(prefix, name) - self._load_known_int_constant(module, funcname) - - def _loaded_gen_enum(self, tp, name, module, library): - for enumerator, enumvalue in zip(tp.enumerators, tp.enumvalues): - setattr(library, enumerator, enumvalue) - type(library)._cffi_dir.append(enumerator) - - # ---------- - # macros: for now only for integers - - def _generate_gen_macro_decl(self, tp, name): - if tp == '...': - check_value = None - else: - check_value = tp # an integer - self._generate_gen_const(True, name, check_value=check_value) - - _loading_gen_macro = _loaded_noop - - def _loaded_gen_macro(self, tp, name, module, library): - if tp == '...': - check_value = None - else: - check_value = tp # an integer - value = self._load_constant(True, tp, name, module, - check_value=check_value) - setattr(library, name, value) - type(library)._cffi_dir.append(name) - - # ---------- - # global variables - - def _generate_gen_variable_decl(self, tp, name): - if isinstance(tp, model.ArrayType): - if tp.length_is_unknown(): - prnt = self._prnt - funcname = '_cffi_sizeof_%s' % (name,) - self.export_symbols.append(funcname) - prnt("size_t %s(void)" % funcname) - prnt("{") - prnt(" return sizeof(%s);" % (name,)) - prnt("}") - tp_ptr = model.PointerType(tp.item) - self._generate_gen_const(False, name, tp_ptr) - else: - tp_ptr = model.PointerType(tp) - self._generate_gen_const(False, name, tp_ptr, category='var') - - _loading_gen_variable = _loaded_noop - - def _loaded_gen_variable(self, tp, name, module, library): - if isinstance(tp, model.ArrayType): # int a[5] is "constant" in the - # sense that "a=..." is forbidden - if tp.length_is_unknown(): - funcname = '_cffi_sizeof_%s' % (name,) - BFunc = self.ffi._typeof_locked('size_t(*)(void)')[0] - function = module.load_function(BFunc, funcname) - size = function() - BItemType = self.ffi._get_cached_btype(tp.item) - length, rest = divmod(size, self.ffi.sizeof(BItemType)) - if rest != 0: - raise VerificationError( - "bad size: %r does not seem to be an array of %s" % - (name, tp.item)) - tp = tp.resolve_length(length) - tp_ptr = model.PointerType(tp.item) - value = self._load_constant(False, tp_ptr, name, module) - # 'value' is a which we have to replace with - # a if the N is actually known - if tp.length is not None: - BArray = self.ffi._get_cached_btype(tp) - value = self.ffi.cast(BArray, value) - setattr(library, name, value) - type(library)._cffi_dir.append(name) - return - # remove ptr= from the library instance, and replace - # it by a property on the class, which reads/writes into ptr[0]. - funcname = '_cffi_var_%s' % name - BFunc = self.ffi._typeof_locked(tp.get_c_name('*(*)(void)', name))[0] - function = module.load_function(BFunc, funcname) - ptr = function() - def getter(library): - return ptr[0] - def setter(library, value): - ptr[0] = value - setattr(type(library), name, property(getter, setter)) - type(library)._cffi_dir.append(name) - -cffimod_header = r''' -#include -#include -#include -#include -#include /* XXX for ssize_t on some platforms */ - -/* this block of #ifs should be kept exactly identical between - c/_cffi_backend.c, cffi/vengine_cpy.py, cffi/vengine_gen.py - and cffi/_cffi_include.h */ -#if defined(_MSC_VER) -# include /* for alloca() */ -# if _MSC_VER < 1600 /* MSVC < 2010 */ - typedef __int8 int8_t; - typedef __int16 int16_t; - typedef __int32 int32_t; - typedef __int64 int64_t; - typedef unsigned __int8 uint8_t; - typedef unsigned __int16 uint16_t; - typedef unsigned __int32 uint32_t; - typedef unsigned __int64 uint64_t; - typedef __int8 int_least8_t; - typedef __int16 int_least16_t; - typedef __int32 int_least32_t; - typedef __int64 int_least64_t; - typedef unsigned __int8 uint_least8_t; - typedef unsigned __int16 uint_least16_t; - typedef unsigned __int32 uint_least32_t; - typedef unsigned __int64 uint_least64_t; - typedef __int8 int_fast8_t; - typedef __int16 int_fast16_t; - typedef __int32 int_fast32_t; - typedef __int64 int_fast64_t; - typedef unsigned __int8 uint_fast8_t; - typedef unsigned __int16 uint_fast16_t; - typedef unsigned __int32 uint_fast32_t; - typedef unsigned __int64 uint_fast64_t; - typedef __int64 intmax_t; - typedef unsigned __int64 uintmax_t; -# else -# include -# endif -# if _MSC_VER < 1800 /* MSVC < 2013 */ -# ifndef __cplusplus - typedef unsigned char _Bool; -# endif -# endif -# define _cffi_float_complex_t _Fcomplex /* include for it */ -# define _cffi_double_complex_t _Dcomplex /* include for it */ -#else -# include -# if (defined (__SVR4) && defined (__sun)) || defined(_AIX) || defined(__hpux) -# include -# endif -# define _cffi_float_complex_t float _Complex -# define _cffi_double_complex_t double _Complex -#endif -''' diff --git a/env/lib/python3.10/site-packages/cffi/verifier.py b/env/lib/python3.10/site-packages/cffi/verifier.py deleted file mode 100644 index e392a2b..0000000 --- a/env/lib/python3.10/site-packages/cffi/verifier.py +++ /dev/null @@ -1,306 +0,0 @@ -# -# DEPRECATED: implementation for ffi.verify() -# -import sys, os, binascii, shutil, io -from . import __version_verifier_modules__ -from . import ffiplatform -from .error import VerificationError - -if sys.version_info >= (3, 3): - import importlib.machinery - def _extension_suffixes(): - return importlib.machinery.EXTENSION_SUFFIXES[:] -else: - import imp - def _extension_suffixes(): - return [suffix for suffix, _, type in imp.get_suffixes() - if type == imp.C_EXTENSION] - - -if sys.version_info >= (3,): - NativeIO = io.StringIO -else: - class NativeIO(io.BytesIO): - def write(self, s): - if isinstance(s, unicode): - s = s.encode('ascii') - super(NativeIO, self).write(s) - - -class Verifier(object): - - def __init__(self, ffi, preamble, tmpdir=None, modulename=None, - ext_package=None, tag='', force_generic_engine=False, - source_extension='.c', flags=None, relative_to=None, **kwds): - if ffi._parser._uses_new_feature: - raise VerificationError( - "feature not supported with ffi.verify(), but only " - "with ffi.set_source(): %s" % (ffi._parser._uses_new_feature,)) - self.ffi = ffi - self.preamble = preamble - if not modulename: - flattened_kwds = ffiplatform.flatten(kwds) - vengine_class = _locate_engine_class(ffi, force_generic_engine) - self._vengine = vengine_class(self) - self._vengine.patch_extension_kwds(kwds) - self.flags = flags - self.kwds = self.make_relative_to(kwds, relative_to) - # - if modulename: - if tag: - raise TypeError("can't specify both 'modulename' and 'tag'") - else: - key = '\x00'.join(['%d.%d' % sys.version_info[:2], - __version_verifier_modules__, - preamble, flattened_kwds] + - ffi._cdefsources) - if sys.version_info >= (3,): - key = key.encode('utf-8') - k1 = hex(binascii.crc32(key[0::2]) & 0xffffffff) - k1 = k1.lstrip('0x').rstrip('L') - k2 = hex(binascii.crc32(key[1::2]) & 0xffffffff) - k2 = k2.lstrip('0').rstrip('L') - modulename = '_cffi_%s_%s%s%s' % (tag, self._vengine._class_key, - k1, k2) - suffix = _get_so_suffixes()[0] - self.tmpdir = tmpdir or _caller_dir_pycache() - self.sourcefilename = os.path.join(self.tmpdir, modulename + source_extension) - self.modulefilename = os.path.join(self.tmpdir, modulename + suffix) - self.ext_package = ext_package - self._has_source = False - self._has_module = False - - def write_source(self, file=None): - """Write the C source code. It is produced in 'self.sourcefilename', - which can be tweaked beforehand.""" - with self.ffi._lock: - if self._has_source and file is None: - raise VerificationError( - "source code already written") - self._write_source(file) - - def compile_module(self): - """Write the C source code (if not done already) and compile it. - This produces a dynamic link library in 'self.modulefilename'.""" - with self.ffi._lock: - if self._has_module: - raise VerificationError("module already compiled") - if not self._has_source: - self._write_source() - self._compile_module() - - def load_library(self): - """Get a C module from this Verifier instance. - Returns an instance of a FFILibrary class that behaves like the - objects returned by ffi.dlopen(), but that delegates all - operations to the C module. If necessary, the C code is written - and compiled first. - """ - with self.ffi._lock: - if not self._has_module: - self._locate_module() - if not self._has_module: - if not self._has_source: - self._write_source() - self._compile_module() - return self._load_library() - - def get_module_name(self): - basename = os.path.basename(self.modulefilename) - # kill both the .so extension and the other .'s, as introduced - # by Python 3: 'basename.cpython-33m.so' - basename = basename.split('.', 1)[0] - # and the _d added in Python 2 debug builds --- but try to be - # conservative and not kill a legitimate _d - if basename.endswith('_d') and hasattr(sys, 'gettotalrefcount'): - basename = basename[:-2] - return basename - - def get_extension(self): - if not self._has_source: - with self.ffi._lock: - if not self._has_source: - self._write_source() - sourcename = ffiplatform.maybe_relative_path(self.sourcefilename) - modname = self.get_module_name() - return ffiplatform.get_extension(sourcename, modname, **self.kwds) - - def generates_python_module(self): - return self._vengine._gen_python_module - - def make_relative_to(self, kwds, relative_to): - if relative_to and os.path.dirname(relative_to): - dirname = os.path.dirname(relative_to) - kwds = kwds.copy() - for key in ffiplatform.LIST_OF_FILE_NAMES: - if key in kwds: - lst = kwds[key] - if not isinstance(lst, (list, tuple)): - raise TypeError("keyword '%s' should be a list or tuple" - % (key,)) - lst = [os.path.join(dirname, fn) for fn in lst] - kwds[key] = lst - return kwds - - # ---------- - - def _locate_module(self): - if not os.path.isfile(self.modulefilename): - if self.ext_package: - try: - pkg = __import__(self.ext_package, None, None, ['__doc__']) - except ImportError: - return # cannot import the package itself, give up - # (e.g. it might be called differently before installation) - path = pkg.__path__ - else: - path = None - filename = self._vengine.find_module(self.get_module_name(), path, - _get_so_suffixes()) - if filename is None: - return - self.modulefilename = filename - self._vengine.collect_types() - self._has_module = True - - def _write_source_to(self, file): - self._vengine._f = file - try: - self._vengine.write_source_to_f() - finally: - del self._vengine._f - - def _write_source(self, file=None): - if file is not None: - self._write_source_to(file) - else: - # Write our source file to an in memory file. - f = NativeIO() - self._write_source_to(f) - source_data = f.getvalue() - - # Determine if this matches the current file - if os.path.exists(self.sourcefilename): - with open(self.sourcefilename, "r") as fp: - needs_written = not (fp.read() == source_data) - else: - needs_written = True - - # Actually write the file out if it doesn't match - if needs_written: - _ensure_dir(self.sourcefilename) - with open(self.sourcefilename, "w") as fp: - fp.write(source_data) - - # Set this flag - self._has_source = True - - def _compile_module(self): - # compile this C source - tmpdir = os.path.dirname(self.sourcefilename) - outputfilename = ffiplatform.compile(tmpdir, self.get_extension()) - try: - same = ffiplatform.samefile(outputfilename, self.modulefilename) - except OSError: - same = False - if not same: - _ensure_dir(self.modulefilename) - shutil.move(outputfilename, self.modulefilename) - self._has_module = True - - def _load_library(self): - assert self._has_module - if self.flags is not None: - return self._vengine.load_library(self.flags) - else: - return self._vengine.load_library() - -# ____________________________________________________________ - -_FORCE_GENERIC_ENGINE = False # for tests - -def _locate_engine_class(ffi, force_generic_engine): - if _FORCE_GENERIC_ENGINE: - force_generic_engine = True - if not force_generic_engine: - if '__pypy__' in sys.builtin_module_names: - force_generic_engine = True - else: - try: - import _cffi_backend - except ImportError: - _cffi_backend = '?' - if ffi._backend is not _cffi_backend: - force_generic_engine = True - if force_generic_engine: - from . import vengine_gen - return vengine_gen.VGenericEngine - else: - from . import vengine_cpy - return vengine_cpy.VCPythonEngine - -# ____________________________________________________________ - -_TMPDIR = None - -def _caller_dir_pycache(): - if _TMPDIR: - return _TMPDIR - result = os.environ.get('CFFI_TMPDIR') - if result: - return result - filename = sys._getframe(2).f_code.co_filename - return os.path.abspath(os.path.join(os.path.dirname(filename), - '__pycache__')) - -def set_tmpdir(dirname): - """Set the temporary directory to use instead of __pycache__.""" - global _TMPDIR - _TMPDIR = dirname - -def cleanup_tmpdir(tmpdir=None, keep_so=False): - """Clean up the temporary directory by removing all files in it - called `_cffi_*.{c,so}` as well as the `build` subdirectory.""" - tmpdir = tmpdir or _caller_dir_pycache() - try: - filelist = os.listdir(tmpdir) - except OSError: - return - if keep_so: - suffix = '.c' # only remove .c files - else: - suffix = _get_so_suffixes()[0].lower() - for fn in filelist: - if fn.lower().startswith('_cffi_') and ( - fn.lower().endswith(suffix) or fn.lower().endswith('.c')): - try: - os.unlink(os.path.join(tmpdir, fn)) - except OSError: - pass - clean_dir = [os.path.join(tmpdir, 'build')] - for dir in clean_dir: - try: - for fn in os.listdir(dir): - fn = os.path.join(dir, fn) - if os.path.isdir(fn): - clean_dir.append(fn) - else: - os.unlink(fn) - except OSError: - pass - -def _get_so_suffixes(): - suffixes = _extension_suffixes() - if not suffixes: - # bah, no C_EXTENSION available. Occurs on pypy without cpyext - if sys.platform == 'win32': - suffixes = [".pyd"] - else: - suffixes = [".so"] - - return suffixes - -def _ensure_dir(filename): - dirname = os.path.dirname(filename) - if dirname and not os.path.isdir(dirname): - os.makedirs(dirname) diff --git a/env/lib/python3.10/site-packages/charset_normalizer-3.4.0.dist-info/INSTALLER b/env/lib/python3.10/site-packages/charset_normalizer-3.4.0.dist-info/INSTALLER deleted file mode 100644 index a1b589e..0000000 --- a/env/lib/python3.10/site-packages/charset_normalizer-3.4.0.dist-info/INSTALLER +++ /dev/null @@ -1 +0,0 @@ -pip diff --git a/env/lib/python3.10/site-packages/charset_normalizer-3.4.0.dist-info/LICENSE b/env/lib/python3.10/site-packages/charset_normalizer-3.4.0.dist-info/LICENSE deleted file mode 100644 index ad82355..0000000 --- a/env/lib/python3.10/site-packages/charset_normalizer-3.4.0.dist-info/LICENSE +++ /dev/null @@ -1,21 +0,0 @@ -MIT License - -Copyright (c) 2019 TAHRI Ahmed R. - -Permission is hereby granted, free of charge, to any person obtaining a copy -of this software and associated documentation files (the "Software"), to deal -in the Software without restriction, including without limitation the rights -to use, copy, modify, merge, publish, distribute, sublicense, and/or sell -copies of the Software, and to permit persons to whom the Software is -furnished to do so, subject to the following conditions: - -The above copyright notice and this permission notice shall be included in all -copies or substantial portions of the Software. - -THE SOFTWARE IS PROVIDED "AS IS", WITHOUT WARRANTY OF ANY KIND, EXPRESS OR -IMPLIED, INCLUDING BUT NOT LIMITED TO THE WARRANTIES OF MERCHANTABILITY, -FITNESS FOR A PARTICULAR PURPOSE AND NONINFRINGEMENT. IN NO EVENT SHALL THE -AUTHORS OR COPYRIGHT HOLDERS BE LIABLE FOR ANY CLAIM, DAMAGES OR OTHER -LIABILITY, WHETHER IN AN ACTION OF CONTRACT, TORT OR OTHERWISE, ARISING FROM, -OUT OF OR IN CONNECTION WITH THE SOFTWARE OR THE USE OR OTHER DEALINGS IN THE -SOFTWARE. \ No newline at end of file diff --git a/env/lib/python3.10/site-packages/charset_normalizer-3.4.0.dist-info/METADATA b/env/lib/python3.10/site-packages/charset_normalizer-3.4.0.dist-info/METADATA deleted file mode 100644 index b19096b..0000000 --- a/env/lib/python3.10/site-packages/charset_normalizer-3.4.0.dist-info/METADATA +++ /dev/null @@ -1,695 +0,0 @@ -Metadata-Version: 2.1 -Name: charset-normalizer -Version: 3.4.0 -Summary: The Real First Universal Charset Detector. Open, modern and actively maintained alternative to Chardet. -Home-page: https://github.com/Ousret/charset_normalizer -Author: Ahmed TAHRI -Author-email: tahri.ahmed@proton.me -License: MIT -Project-URL: Bug Reports, https://github.com/Ousret/charset_normalizer/issues -Project-URL: Documentation, https://charset-normalizer.readthedocs.io/en/latest -Keywords: encoding,charset,charset-detector,detector,normalization,unicode,chardet,detect -Classifier: Development Status :: 5 - Production/Stable -Classifier: License :: OSI Approved :: MIT License -Classifier: Intended Audience :: Developers -Classifier: Topic :: Software Development :: Libraries :: Python Modules -Classifier: Operating System :: OS Independent -Classifier: Programming Language :: Python -Classifier: Programming Language :: Python :: 3 -Classifier: Programming Language :: Python :: 3.7 -Classifier: Programming Language :: Python :: 3.8 -Classifier: Programming Language :: Python :: 3.9 -Classifier: Programming Language :: Python :: 3.10 -Classifier: Programming Language :: Python :: 3.11 -Classifier: Programming Language :: Python :: 3.12 -Classifier: Programming Language :: Python :: 3.13 -Classifier: Programming Language :: Python :: Implementation :: PyPy -Classifier: Topic :: Text Processing :: Linguistic -Classifier: Topic :: Utilities -Classifier: Typing :: Typed -Requires-Python: >=3.7.0 -Description-Content-Type: text/markdown -License-File: LICENSE -Provides-Extra: unicode_backport - -

Charset Detection, for Everyone 👋

- -

- The Real First Universal Charset Detector
- - - - - Download Count Total - - - - -

-

- Featured Packages
- - Static Badge - - - Static Badge - -

-

- In other language (unofficial port - by the community)
- - Static Badge - -

- -> A library that helps you read text from an unknown charset encoding.
Motivated by `chardet`, -> I'm trying to resolve the issue by taking a new approach. -> All IANA character set names for which the Python core library provides codecs are supported. - -

- >>>>> 👉 Try Me Online Now, Then Adopt Me 👈 <<<<< -

- -This project offers you an alternative to **Universal Charset Encoding Detector**, also known as **Chardet**. - -| Feature | [Chardet](https://github.com/chardet/chardet) | Charset Normalizer | [cChardet](https://github.com/PyYoshi/cChardet) | -|--------------------------------------------------|:---------------------------------------------:|:--------------------------------------------------------------------------------------------------:|:-----------------------------------------------:| -| `Fast` | ❌ | ✅ | ✅ | -| `Universal**` | ❌ | ✅ | ❌ | -| `Reliable` **without** distinguishable standards | ❌ | ✅ | ✅ | -| `Reliable` **with** distinguishable standards | ✅ | ✅ | ✅ | -| `License` | LGPL-2.1
_restrictive_ | MIT | MPL-1.1
_restrictive_ | -| `Native Python` | ✅ | ✅ | ❌ | -| `Detect spoken language` | ❌ | ✅ | N/A | -| `UnicodeDecodeError Safety` | ❌ | ✅ | ❌ | -| `Whl Size (min)` | 193.6 kB | 42 kB | ~200 kB | -| `Supported Encoding` | 33 | 🎉 [99](https://charset-normalizer.readthedocs.io/en/latest/user/support.html#supported-encodings) | 40 | - -

-Reading Normalized TextCat Reading Text -

- -*\*\* : They are clearly using specific code for a specific encoding even if covering most of used one*
-Did you got there because of the logs? See [https://charset-normalizer.readthedocs.io/en/latest/user/miscellaneous.html](https://charset-normalizer.readthedocs.io/en/latest/user/miscellaneous.html) - -## ⚡ Performance - -This package offer better performance than its counterpart Chardet. Here are some numbers. - -| Package | Accuracy | Mean per file (ms) | File per sec (est) | -|-----------------------------------------------|:--------:|:------------------:|:------------------:| -| [chardet](https://github.com/chardet/chardet) | 86 % | 200 ms | 5 file/sec | -| charset-normalizer | **98 %** | **10 ms** | 100 file/sec | - -| Package | 99th percentile | 95th percentile | 50th percentile | -|-----------------------------------------------|:---------------:|:---------------:|:---------------:| -| [chardet](https://github.com/chardet/chardet) | 1200 ms | 287 ms | 23 ms | -| charset-normalizer | 100 ms | 50 ms | 5 ms | - -Chardet's performance on larger file (1MB+) are very poor. Expect huge difference on large payload. - -> Stats are generated using 400+ files using default parameters. More details on used files, see GHA workflows. -> And yes, these results might change at any time. The dataset can be updated to include more files. -> The actual delays heavily depends on your CPU capabilities. The factors should remain the same. -> Keep in mind that the stats are generous and that Chardet accuracy vs our is measured using Chardet initial capability -> (eg. Supported Encoding) Challenge-them if you want. - -## ✨ Installation - -Using pip: - -```sh -pip install charset-normalizer -U -``` - -## 🚀 Basic Usage - -### CLI -This package comes with a CLI. - -``` -usage: normalizer [-h] [-v] [-a] [-n] [-m] [-r] [-f] [-t THRESHOLD] - file [file ...] - -The Real First Universal Charset Detector. Discover originating encoding used -on text file. Normalize text to unicode. - -positional arguments: - files File(s) to be analysed - -optional arguments: - -h, --help show this help message and exit - -v, --verbose Display complementary information about file if any. - Stdout will contain logs about the detection process. - -a, --with-alternative - Output complementary possibilities if any. Top-level - JSON WILL be a list. - -n, --normalize Permit to normalize input file. If not set, program - does not write anything. - -m, --minimal Only output the charset detected to STDOUT. Disabling - JSON output. - -r, --replace Replace file when trying to normalize it instead of - creating a new one. - -f, --force Replace file without asking if you are sure, use this - flag with caution. - -t THRESHOLD, --threshold THRESHOLD - Define a custom maximum amount of chaos allowed in - decoded content. 0. <= chaos <= 1. - --version Show version information and exit. -``` - -```bash -normalizer ./data/sample.1.fr.srt -``` - -or - -```bash -python -m charset_normalizer ./data/sample.1.fr.srt -``` - -🎉 Since version 1.4.0 the CLI produce easily usable stdout result in JSON format. - -```json -{ - "path": "/home/default/projects/charset_normalizer/data/sample.1.fr.srt", - "encoding": "cp1252", - "encoding_aliases": [ - "1252", - "windows_1252" - ], - "alternative_encodings": [ - "cp1254", - "cp1256", - "cp1258", - "iso8859_14", - "iso8859_15", - "iso8859_16", - "iso8859_3", - "iso8859_9", - "latin_1", - "mbcs" - ], - "language": "French", - "alphabets": [ - "Basic Latin", - "Latin-1 Supplement" - ], - "has_sig_or_bom": false, - "chaos": 0.149, - "coherence": 97.152, - "unicode_path": null, - "is_preferred": true -} -``` - -### Python -*Just print out normalized text* -```python -from charset_normalizer import from_path - -results = from_path('./my_subtitle.srt') - -print(str(results.best())) -``` - -*Upgrade your code without effort* -```python -from charset_normalizer import detect -``` - -The above code will behave the same as **chardet**. We ensure that we offer the best (reasonable) BC result possible. - -See the docs for advanced usage : [readthedocs.io](https://charset-normalizer.readthedocs.io/en/latest/) - -## 😇 Why - -When I started using Chardet, I noticed that it was not suited to my expectations, and I wanted to propose a -reliable alternative using a completely different method. Also! I never back down on a good challenge! - -I **don't care** about the **originating charset** encoding, because **two different tables** can -produce **two identical rendered string.** -What I want is to get readable text, the best I can. - -In a way, **I'm brute forcing text decoding.** How cool is that ? 😎 - -Don't confuse package **ftfy** with charset-normalizer or chardet. ftfy goal is to repair unicode string whereas charset-normalizer to convert raw file in unknown encoding to unicode. - -## 🍰 How - - - Discard all charset encoding table that could not fit the binary content. - - Measure noise, or the mess once opened (by chunks) with a corresponding charset encoding. - - Extract matches with the lowest mess detected. - - Additionally, we measure coherence / probe for a language. - -**Wait a minute**, what is noise/mess and coherence according to **YOU ?** - -*Noise :* I opened hundred of text files, **written by humans**, with the wrong encoding table. **I observed**, then -**I established** some ground rules about **what is obvious** when **it seems like** a mess. - I know that my interpretation of what is noise is probably incomplete, feel free to contribute in order to - improve or rewrite it. - -*Coherence :* For each language there is on earth, we have computed ranked letter appearance occurrences (the best we can). So I thought -that intel is worth something here. So I use those records against decoded text to check if I can detect intelligent design. - -## ⚡ Known limitations - - - Language detection is unreliable when text contains two or more languages sharing identical letters. (eg. HTML (english tags) + Turkish content (Sharing Latin characters)) - - Every charset detector heavily depends on sufficient content. In common cases, do not bother run detection on very tiny content. - -## ⚠️ About Python EOLs - -**If you are running:** - -- Python >=2.7,<3.5: Unsupported -- Python 3.5: charset-normalizer < 2.1 -- Python 3.6: charset-normalizer < 3.1 -- Python 3.7: charset-normalizer < 4.0 - -Upgrade your Python interpreter as soon as possible. - -## 👤 Contributing - -Contributions, issues and feature requests are very much welcome.
-Feel free to check [issues page](https://github.com/ousret/charset_normalizer/issues) if you want to contribute. - -## 📝 License - -Copyright © [Ahmed TAHRI @Ousret](https://github.com/Ousret).
-This project is [MIT](https://github.com/Ousret/charset_normalizer/blob/master/LICENSE) licensed. - -Characters frequencies used in this project © 2012 [Denny Vrandečić](http://simia.net/letters/) - -## 💼 For Enterprise - -Professional support for charset-normalizer is available as part of the [Tidelift -Subscription][1]. Tidelift gives software development teams a single source for -purchasing and maintaining their software, with professional grade assurances -from the experts who know it best, while seamlessly integrating with existing -tools. - -[1]: https://tidelift.com/subscription/pkg/pypi-charset-normalizer?utm_source=pypi-charset-normalizer&utm_medium=readme - -# Changelog -All notable changes to charset-normalizer will be documented in this file. This project adheres to [Semantic Versioning](https://semver.org/spec/v2.0.0.html). -The format is based on [Keep a Changelog](https://keepachangelog.com/en/1.0.0/). - -## [3.4.0](https://github.com/Ousret/charset_normalizer/compare/3.3.2...3.4.0) (2024-10-08) - -### Added -- Argument `--no-preemptive` in the CLI to prevent the detector to search for hints. -- Support for Python 3.13 (#512) - -### Fixed -- Relax the TypeError exception thrown when trying to compare a CharsetMatch with anything else than a CharsetMatch. -- Improved the general reliability of the detector based on user feedbacks. (#520) (#509) (#498) (#407) (#537) -- Declared charset in content (preemptive detection) not changed when converting to utf-8 bytes. (#381) - -## [3.3.2](https://github.com/Ousret/charset_normalizer/compare/3.3.1...3.3.2) (2023-10-31) - -### Fixed -- Unintentional memory usage regression when using large payload that match several encoding (#376) -- Regression on some detection case showcased in the documentation (#371) - -### Added -- Noise (md) probe that identify malformed arabic representation due to the presence of letters in isolated form (credit to my wife) - -## [3.3.1](https://github.com/Ousret/charset_normalizer/compare/3.3.0...3.3.1) (2023-10-22) - -### Changed -- Optional mypyc compilation upgraded to version 1.6.1 for Python >= 3.8 -- Improved the general detection reliability based on reports from the community - -## [3.3.0](https://github.com/Ousret/charset_normalizer/compare/3.2.0...3.3.0) (2023-09-30) - -### Added -- Allow to execute the CLI (e.g. normalizer) through `python -m charset_normalizer.cli` or `python -m charset_normalizer` -- Support for 9 forgotten encoding that are supported by Python but unlisted in `encoding.aliases` as they have no alias (#323) - -### Removed -- (internal) Redundant utils.is_ascii function and unused function is_private_use_only -- (internal) charset_normalizer.assets is moved inside charset_normalizer.constant - -### Changed -- (internal) Unicode code blocks in constants are updated using the latest v15.0.0 definition to improve detection -- Optional mypyc compilation upgraded to version 1.5.1 for Python >= 3.8 - -### Fixed -- Unable to properly sort CharsetMatch when both chaos/noise and coherence were close due to an unreachable condition in \_\_lt\_\_ (#350) - -## [3.2.0](https://github.com/Ousret/charset_normalizer/compare/3.1.0...3.2.0) (2023-06-07) - -### Changed -- Typehint for function `from_path` no longer enforce `PathLike` as its first argument -- Minor improvement over the global detection reliability - -### Added -- Introduce function `is_binary` that relies on main capabilities, and optimized to detect binaries -- Propagate `enable_fallback` argument throughout `from_bytes`, `from_path`, and `from_fp` that allow a deeper control over the detection (default True) -- Explicit support for Python 3.12 - -### Fixed -- Edge case detection failure where a file would contain 'very-long' camel cased word (Issue #289) - -## [3.1.0](https://github.com/Ousret/charset_normalizer/compare/3.0.1...3.1.0) (2023-03-06) - -### Added -- Argument `should_rename_legacy` for legacy function `detect` and disregard any new arguments without errors (PR #262) - -### Removed -- Support for Python 3.6 (PR #260) - -### Changed -- Optional speedup provided by mypy/c 1.0.1 - -## [3.0.1](https://github.com/Ousret/charset_normalizer/compare/3.0.0...3.0.1) (2022-11-18) - -### Fixed -- Multi-bytes cutter/chunk generator did not always cut correctly (PR #233) - -### Changed -- Speedup provided by mypy/c 0.990 on Python >= 3.7 - -## [3.0.0](https://github.com/Ousret/charset_normalizer/compare/2.1.1...3.0.0) (2022-10-20) - -### Added -- Extend the capability of explain=True when cp_isolation contains at most two entries (min one), will log in details of the Mess-detector results -- Support for alternative language frequency set in charset_normalizer.assets.FREQUENCIES -- Add parameter `language_threshold` in `from_bytes`, `from_path` and `from_fp` to adjust the minimum expected coherence ratio -- `normalizer --version` now specify if current version provide extra speedup (meaning mypyc compilation whl) - -### Changed -- Build with static metadata using 'build' frontend -- Make the language detection stricter -- Optional: Module `md.py` can be compiled using Mypyc to provide an extra speedup up to 4x faster than v2.1 - -### Fixed -- CLI with opt --normalize fail when using full path for files -- TooManyAccentuatedPlugin induce false positive on the mess detection when too few alpha character have been fed to it -- Sphinx warnings when generating the documentation - -### Removed -- Coherence detector no longer return 'Simple English' instead return 'English' -- Coherence detector no longer return 'Classical Chinese' instead return 'Chinese' -- Breaking: Method `first()` and `best()` from CharsetMatch -- UTF-7 will no longer appear as "detected" without a recognized SIG/mark (is unreliable/conflict with ASCII) -- Breaking: Class aliases CharsetDetector, CharsetDoctor, CharsetNormalizerMatch and CharsetNormalizerMatches -- Breaking: Top-level function `normalize` -- Breaking: Properties `chaos_secondary_pass`, `coherence_non_latin` and `w_counter` from CharsetMatch -- Support for the backport `unicodedata2` - -## [3.0.0rc1](https://github.com/Ousret/charset_normalizer/compare/3.0.0b2...3.0.0rc1) (2022-10-18) - -### Added -- Extend the capability of explain=True when cp_isolation contains at most two entries (min one), will log in details of the Mess-detector results -- Support for alternative language frequency set in charset_normalizer.assets.FREQUENCIES -- Add parameter `language_threshold` in `from_bytes`, `from_path` and `from_fp` to adjust the minimum expected coherence ratio - -### Changed -- Build with static metadata using 'build' frontend -- Make the language detection stricter - -### Fixed -- CLI with opt --normalize fail when using full path for files -- TooManyAccentuatedPlugin induce false positive on the mess detection when too few alpha character have been fed to it - -### Removed -- Coherence detector no longer return 'Simple English' instead return 'English' -- Coherence detector no longer return 'Classical Chinese' instead return 'Chinese' - -## [3.0.0b2](https://github.com/Ousret/charset_normalizer/compare/3.0.0b1...3.0.0b2) (2022-08-21) - -### Added -- `normalizer --version` now specify if current version provide extra speedup (meaning mypyc compilation whl) - -### Removed -- Breaking: Method `first()` and `best()` from CharsetMatch -- UTF-7 will no longer appear as "detected" without a recognized SIG/mark (is unreliable/conflict with ASCII) - -### Fixed -- Sphinx warnings when generating the documentation - -## [3.0.0b1](https://github.com/Ousret/charset_normalizer/compare/2.1.0...3.0.0b1) (2022-08-15) - -### Changed -- Optional: Module `md.py` can be compiled using Mypyc to provide an extra speedup up to 4x faster than v2.1 - -### Removed -- Breaking: Class aliases CharsetDetector, CharsetDoctor, CharsetNormalizerMatch and CharsetNormalizerMatches -- Breaking: Top-level function `normalize` -- Breaking: Properties `chaos_secondary_pass`, `coherence_non_latin` and `w_counter` from CharsetMatch -- Support for the backport `unicodedata2` - -## [2.1.1](https://github.com/Ousret/charset_normalizer/compare/2.1.0...2.1.1) (2022-08-19) - -### Deprecated -- Function `normalize` scheduled for removal in 3.0 - -### Changed -- Removed useless call to decode in fn is_unprintable (#206) - -### Fixed -- Third-party library (i18n xgettext) crashing not recognizing utf_8 (PEP 263) with underscore from [@aleksandernovikov](https://github.com/aleksandernovikov) (#204) - -## [2.1.0](https://github.com/Ousret/charset_normalizer/compare/2.0.12...2.1.0) (2022-06-19) - -### Added -- Output the Unicode table version when running the CLI with `--version` (PR #194) - -### Changed -- Re-use decoded buffer for single byte character sets from [@nijel](https://github.com/nijel) (PR #175) -- Fixing some performance bottlenecks from [@deedy5](https://github.com/deedy5) (PR #183) - -### Fixed -- Workaround potential bug in cpython with Zero Width No-Break Space located in Arabic Presentation Forms-B, Unicode 1.1 not acknowledged as space (PR #175) -- CLI default threshold aligned with the API threshold from [@oleksandr-kuzmenko](https://github.com/oleksandr-kuzmenko) (PR #181) - -### Removed -- Support for Python 3.5 (PR #192) - -### Deprecated -- Use of backport unicodedata from `unicodedata2` as Python is quickly catching up, scheduled for removal in 3.0 (PR #194) - -## [2.0.12](https://github.com/Ousret/charset_normalizer/compare/2.0.11...2.0.12) (2022-02-12) - -### Fixed -- ASCII miss-detection on rare cases (PR #170) - -## [2.0.11](https://github.com/Ousret/charset_normalizer/compare/2.0.10...2.0.11) (2022-01-30) - -### Added -- Explicit support for Python 3.11 (PR #164) - -### Changed -- The logging behavior have been completely reviewed, now using only TRACE and DEBUG levels (PR #163 #165) - -## [2.0.10](https://github.com/Ousret/charset_normalizer/compare/2.0.9...2.0.10) (2022-01-04) - -### Fixed -- Fallback match entries might lead to UnicodeDecodeError for large bytes sequence (PR #154) - -### Changed -- Skipping the language-detection (CD) on ASCII (PR #155) - -## [2.0.9](https://github.com/Ousret/charset_normalizer/compare/2.0.8...2.0.9) (2021-12-03) - -### Changed -- Moderating the logging impact (since 2.0.8) for specific environments (PR #147) - -### Fixed -- Wrong logging level applied when setting kwarg `explain` to True (PR #146) - -## [2.0.8](https://github.com/Ousret/charset_normalizer/compare/2.0.7...2.0.8) (2021-11-24) -### Changed -- Improvement over Vietnamese detection (PR #126) -- MD improvement on trailing data and long foreign (non-pure latin) data (PR #124) -- Efficiency improvements in cd/alphabet_languages from [@adbar](https://github.com/adbar) (PR #122) -- call sum() without an intermediary list following PEP 289 recommendations from [@adbar](https://github.com/adbar) (PR #129) -- Code style as refactored by Sourcery-AI (PR #131) -- Minor adjustment on the MD around european words (PR #133) -- Remove and replace SRTs from assets / tests (PR #139) -- Initialize the library logger with a `NullHandler` by default from [@nmaynes](https://github.com/nmaynes) (PR #135) -- Setting kwarg `explain` to True will add provisionally (bounded to function lifespan) a specific stream handler (PR #135) - -### Fixed -- Fix large (misleading) sequence giving UnicodeDecodeError (PR #137) -- Avoid using too insignificant chunk (PR #137) - -### Added -- Add and expose function `set_logging_handler` to configure a specific StreamHandler from [@nmaynes](https://github.com/nmaynes) (PR #135) -- Add `CHANGELOG.md` entries, format is based on [Keep a Changelog](https://keepachangelog.com/en/1.0.0/) (PR #141) - -## [2.0.7](https://github.com/Ousret/charset_normalizer/compare/2.0.6...2.0.7) (2021-10-11) -### Added -- Add support for Kazakh (Cyrillic) language detection (PR #109) - -### Changed -- Further, improve inferring the language from a given single-byte code page (PR #112) -- Vainly trying to leverage PEP263 when PEP3120 is not supported (PR #116) -- Refactoring for potential performance improvements in loops from [@adbar](https://github.com/adbar) (PR #113) -- Various detection improvement (MD+CD) (PR #117) - -### Removed -- Remove redundant logging entry about detected language(s) (PR #115) - -### Fixed -- Fix a minor inconsistency between Python 3.5 and other versions regarding language detection (PR #117 #102) - -## [2.0.6](https://github.com/Ousret/charset_normalizer/compare/2.0.5...2.0.6) (2021-09-18) -### Fixed -- Unforeseen regression with the loss of the backward-compatibility with some older minor of Python 3.5.x (PR #100) -- Fix CLI crash when using --minimal output in certain cases (PR #103) - -### Changed -- Minor improvement to the detection efficiency (less than 1%) (PR #106 #101) - -## [2.0.5](https://github.com/Ousret/charset_normalizer/compare/2.0.4...2.0.5) (2021-09-14) -### Changed -- The project now comply with: flake8, mypy, isort and black to ensure a better overall quality (PR #81) -- The BC-support with v1.x was improved, the old staticmethods are restored (PR #82) -- The Unicode detection is slightly improved (PR #93) -- Add syntax sugar \_\_bool\_\_ for results CharsetMatches list-container (PR #91) - -### Removed -- The project no longer raise warning on tiny content given for detection, will be simply logged as warning instead (PR #92) - -### Fixed -- In some rare case, the chunks extractor could cut in the middle of a multi-byte character and could mislead the mess detection (PR #95) -- Some rare 'space' characters could trip up the UnprintablePlugin/Mess detection (PR #96) -- The MANIFEST.in was not exhaustive (PR #78) - -## [2.0.4](https://github.com/Ousret/charset_normalizer/compare/2.0.3...2.0.4) (2021-07-30) -### Fixed -- The CLI no longer raise an unexpected exception when no encoding has been found (PR #70) -- Fix accessing the 'alphabets' property when the payload contains surrogate characters (PR #68) -- The logger could mislead (explain=True) on detected languages and the impact of one MBCS match (PR #72) -- Submatch factoring could be wrong in rare edge cases (PR #72) -- Multiple files given to the CLI were ignored when publishing results to STDOUT. (After the first path) (PR #72) -- Fix line endings from CRLF to LF for certain project files (PR #67) - -### Changed -- Adjust the MD to lower the sensitivity, thus improving the global detection reliability (PR #69 #76) -- Allow fallback on specified encoding if any (PR #71) - -## [2.0.3](https://github.com/Ousret/charset_normalizer/compare/2.0.2...2.0.3) (2021-07-16) -### Changed -- Part of the detection mechanism has been improved to be less sensitive, resulting in more accurate detection results. Especially ASCII. (PR #63) -- According to the community wishes, the detection will fall back on ASCII or UTF-8 in a last-resort case. (PR #64) - -## [2.0.2](https://github.com/Ousret/charset_normalizer/compare/2.0.1...2.0.2) (2021-07-15) -### Fixed -- Empty/Too small JSON payload miss-detection fixed. Report from [@tseaver](https://github.com/tseaver) (PR #59) - -### Changed -- Don't inject unicodedata2 into sys.modules from [@akx](https://github.com/akx) (PR #57) - -## [2.0.1](https://github.com/Ousret/charset_normalizer/compare/2.0.0...2.0.1) (2021-07-13) -### Fixed -- Make it work where there isn't a filesystem available, dropping assets frequencies.json. Report from [@sethmlarson](https://github.com/sethmlarson). (PR #55) -- Using explain=False permanently disable the verbose output in the current runtime (PR #47) -- One log entry (language target preemptive) was not show in logs when using explain=True (PR #47) -- Fix undesired exception (ValueError) on getitem of instance CharsetMatches (PR #52) - -### Changed -- Public function normalize default args values were not aligned with from_bytes (PR #53) - -### Added -- You may now use charset aliases in cp_isolation and cp_exclusion arguments (PR #47) - -## [2.0.0](https://github.com/Ousret/charset_normalizer/compare/1.4.1...2.0.0) (2021-07-02) -### Changed -- 4x to 5 times faster than the previous 1.4.0 release. At least 2x faster than Chardet. -- Accent has been made on UTF-8 detection, should perform rather instantaneous. -- The backward compatibility with Chardet has been greatly improved. The legacy detect function returns an identical charset name whenever possible. -- The detection mechanism has been slightly improved, now Turkish content is detected correctly (most of the time) -- The program has been rewritten to ease the readability and maintainability. (+Using static typing)+ -- utf_7 detection has been reinstated. - -### Removed -- This package no longer require anything when used with Python 3.5 (Dropped cached_property) -- Removed support for these languages: Catalan, Esperanto, Kazakh, Baque, Volapük, Azeri, Galician, Nynorsk, Macedonian, and Serbocroatian. -- The exception hook on UnicodeDecodeError has been removed. - -### Deprecated -- Methods coherence_non_latin, w_counter, chaos_secondary_pass of the class CharsetMatch are now deprecated and scheduled for removal in v3.0 - -### Fixed -- The CLI output used the relative path of the file(s). Should be absolute. - -## [1.4.1](https://github.com/Ousret/charset_normalizer/compare/1.4.0...1.4.1) (2021-05-28) -### Fixed -- Logger configuration/usage no longer conflict with others (PR #44) - -## [1.4.0](https://github.com/Ousret/charset_normalizer/compare/1.3.9...1.4.0) (2021-05-21) -### Removed -- Using standard logging instead of using the package loguru. -- Dropping nose test framework in favor of the maintained pytest. -- Choose to not use dragonmapper package to help with gibberish Chinese/CJK text. -- Require cached_property only for Python 3.5 due to constraint. Dropping for every other interpreter version. -- Stop support for UTF-7 that does not contain a SIG. -- Dropping PrettyTable, replaced with pure JSON output in CLI. - -### Fixed -- BOM marker in a CharsetNormalizerMatch instance could be False in rare cases even if obviously present. Due to the sub-match factoring process. -- Not searching properly for the BOM when trying utf32/16 parent codec. - -### Changed -- Improving the package final size by compressing frequencies.json. -- Huge improvement over the larges payload. - -### Added -- CLI now produces JSON consumable output. -- Return ASCII if given sequences fit. Given reasonable confidence. - -## [1.3.9](https://github.com/Ousret/charset_normalizer/compare/1.3.8...1.3.9) (2021-05-13) - -### Fixed -- In some very rare cases, you may end up getting encode/decode errors due to a bad bytes payload (PR #40) - -## [1.3.8](https://github.com/Ousret/charset_normalizer/compare/1.3.7...1.3.8) (2021-05-12) - -### Fixed -- Empty given payload for detection may cause an exception if trying to access the `alphabets` property. (PR #39) - -## [1.3.7](https://github.com/Ousret/charset_normalizer/compare/1.3.6...1.3.7) (2021-05-12) - -### Fixed -- The legacy detect function should return UTF-8-SIG if sig is present in the payload. (PR #38) - -## [1.3.6](https://github.com/Ousret/charset_normalizer/compare/1.3.5...1.3.6) (2021-02-09) - -### Changed -- Amend the previous release to allow prettytable 2.0 (PR #35) - -## [1.3.5](https://github.com/Ousret/charset_normalizer/compare/1.3.4...1.3.5) (2021-02-08) - -### Fixed -- Fix error while using the package with a python pre-release interpreter (PR #33) - -### Changed -- Dependencies refactoring, constraints revised. - -### Added -- Add python 3.9 and 3.10 to the supported interpreters - -MIT License - -Copyright (c) 2019 TAHRI Ahmed R. - -Permission is hereby granted, free of charge, to any person obtaining a copy -of this software and associated documentation files (the "Software"), to deal -in the Software without restriction, including without limitation the rights -to use, copy, modify, merge, publish, distribute, sublicense, and/or sell -copies of the Software, and to permit persons to whom the Software is -furnished to do so, subject to the following conditions: - -The above copyright notice and this permission notice shall be included in all -copies or substantial portions of the Software. - -THE SOFTWARE IS PROVIDED "AS IS", WITHOUT WARRANTY OF ANY KIND, EXPRESS OR -IMPLIED, INCLUDING BUT NOT LIMITED TO THE WARRANTIES OF MERCHANTABILITY, -FITNESS FOR A PARTICULAR PURPOSE AND NONINFRINGEMENT. IN NO EVENT SHALL THE -AUTHORS OR COPYRIGHT HOLDERS BE LIABLE FOR ANY CLAIM, DAMAGES OR OTHER -LIABILITY, WHETHER IN AN ACTION OF CONTRACT, TORT OR OTHERWISE, ARISING FROM, -OUT OF OR IN CONNECTION WITH THE SOFTWARE OR THE USE OR OTHER DEALINGS IN THE -SOFTWARE. diff --git a/env/lib/python3.10/site-packages/charset_normalizer-3.4.0.dist-info/RECORD b/env/lib/python3.10/site-packages/charset_normalizer-3.4.0.dist-info/RECORD deleted file mode 100644 index c6fcf04..0000000 --- a/env/lib/python3.10/site-packages/charset_normalizer-3.4.0.dist-info/RECORD +++ /dev/null @@ -1,35 +0,0 @@ -../../../bin/normalizer,sha256=k9XpFXENxZk-EkOKUzYb0y3ObcpZrZFiiyJ1qcO9zSc,279 -charset_normalizer-3.4.0.dist-info/INSTALLER,sha256=zuuue4knoyJ-UwPPXg8fezS7VCrXJQrAP7zeNuwvFQg,4 -charset_normalizer-3.4.0.dist-info/LICENSE,sha256=6zGgxaT7Cbik4yBV0lweX5w1iidS_vPNcgIT0cz-4kE,1070 -charset_normalizer-3.4.0.dist-info/METADATA,sha256=WGbEW9ehh2spNJxo1M6sEGGZWmsQ-oj2DsMjV29zoms,34159 -charset_normalizer-3.4.0.dist-info/RECORD,, -charset_normalizer-3.4.0.dist-info/WHEEL,sha256=VXRyidHovicsPXAYYBPK-lnsPgFrrhXkyzySBEhHzcg,151 -charset_normalizer-3.4.0.dist-info/entry_points.txt,sha256=ADSTKrkXZ3hhdOVFi6DcUEHQRS0xfxDIE_pEz4wLIXA,65 -charset_normalizer-3.4.0.dist-info/top_level.txt,sha256=7ASyzePr8_xuZWJsnqJjIBtyV8vhEo0wBCv1MPRRi3Q,19 -charset_normalizer/__init__.py,sha256=UzI3xC8PhmcLRMzSgPb6minTmRq0kWznnCBJ8ZCc2XI,1577 -charset_normalizer/__main__.py,sha256=JxY8bleaENOFlLRb9HfoeZCzAMnn2A1oGR5Xm2eyqg0,73 -charset_normalizer/__pycache__/__init__.cpython-310.pyc,, -charset_normalizer/__pycache__/__main__.cpython-310.pyc,, -charset_normalizer/__pycache__/api.cpython-310.pyc,, -charset_normalizer/__pycache__/cd.cpython-310.pyc,, -charset_normalizer/__pycache__/constant.cpython-310.pyc,, -charset_normalizer/__pycache__/legacy.cpython-310.pyc,, -charset_normalizer/__pycache__/md.cpython-310.pyc,, -charset_normalizer/__pycache__/models.cpython-310.pyc,, -charset_normalizer/__pycache__/utils.cpython-310.pyc,, -charset_normalizer/__pycache__/version.cpython-310.pyc,, -charset_normalizer/api.py,sha256=kMyNUqrfBZU22PP0pYKrSldtYUGA24wsGlXGLAKra7c,22559 -charset_normalizer/cd.py,sha256=xwZliZcTQFA3jU0c00PRiu9MNxXTFxQkFLWmMW24ZzI,12560 -charset_normalizer/cli/__init__.py,sha256=D5ERp8P62llm2FuoMzydZ7d9rs8cvvLXqE-1_6oViPc,100 -charset_normalizer/cli/__main__.py,sha256=zX9sV_ApU1d96Wb0cS04vulstdB4F0Eh7kLn-gevfw4,10411 -charset_normalizer/cli/__pycache__/__init__.cpython-310.pyc,, -charset_normalizer/cli/__pycache__/__main__.cpython-310.pyc,, -charset_normalizer/constant.py,sha256=uwoW87NicWZDTLviX7le0wdoYBbhBQDA4n1JtJo77ts,40499 -charset_normalizer/legacy.py,sha256=XJjkT0hejMH8qfAKz1ts8OUiBT18t2FJP3tJgLwUWwc,2327 -charset_normalizer/md.cpython-310-x86_64-linux-gnu.so,sha256=Y7QSLD5QLoSFAWys0-tL7R6QB7oi5864zM6zr7RWek4,16064 -charset_normalizer/md.py,sha256=SIIZcENrslI7h3v4GigbFN61fRyE_wiCN1z9Ii3fBRo,20138 -charset_normalizer/md__mypyc.cpython-310-x86_64-linux-gnu.so,sha256=8k3u8X3VqAfE2x1upAeiW8N3GT72CsqJVtHUIqFwXZE,276808 -charset_normalizer/models.py,sha256=oAMAcBSEY7CngbUXJp34Wc4Rl9NKJJjGmUwW3EPtk6g,12425 -charset_normalizer/py.typed,sha256=47DEQpj8HBSa-_TImW-5JCeuQeRkm5NMpJWZG3hSuFU,0 -charset_normalizer/utils.py,sha256=teiosMqzKjXyAHXnGdjSBOgnBZwx-SkBbCLrx0UXy8M,11894 -charset_normalizer/version.py,sha256=AX66S4ytQFdd6F5jbVU2OPMqYwFS5M3BkMvyX-3BKF8,79 diff --git a/env/lib/python3.10/site-packages/charset_normalizer-3.4.0.dist-info/WHEEL b/env/lib/python3.10/site-packages/charset_normalizer-3.4.0.dist-info/WHEEL deleted file mode 100644 index 0b1249e..0000000 --- a/env/lib/python3.10/site-packages/charset_normalizer-3.4.0.dist-info/WHEEL +++ /dev/null @@ -1,6 +0,0 @@ -Wheel-Version: 1.0 -Generator: setuptools (75.1.0) -Root-Is-Purelib: false -Tag: cp310-cp310-manylinux_2_17_x86_64 -Tag: cp310-cp310-manylinux2014_x86_64 - diff --git a/env/lib/python3.10/site-packages/charset_normalizer-3.4.0.dist-info/entry_points.txt b/env/lib/python3.10/site-packages/charset_normalizer-3.4.0.dist-info/entry_points.txt deleted file mode 100644 index 65619e7..0000000 --- a/env/lib/python3.10/site-packages/charset_normalizer-3.4.0.dist-info/entry_points.txt +++ /dev/null @@ -1,2 +0,0 @@ -[console_scripts] -normalizer = charset_normalizer.cli:cli_detect diff --git a/env/lib/python3.10/site-packages/charset_normalizer-3.4.0.dist-info/top_level.txt b/env/lib/python3.10/site-packages/charset_normalizer-3.4.0.dist-info/top_level.txt deleted file mode 100644 index 66958f0..0000000 --- a/env/lib/python3.10/site-packages/charset_normalizer-3.4.0.dist-info/top_level.txt +++ /dev/null @@ -1 +0,0 @@ -charset_normalizer diff --git a/env/lib/python3.10/site-packages/charset_normalizer/__init__.py b/env/lib/python3.10/site-packages/charset_normalizer/__init__.py deleted file mode 100644 index 55991fc..0000000 --- a/env/lib/python3.10/site-packages/charset_normalizer/__init__.py +++ /dev/null @@ -1,46 +0,0 @@ -# -*- coding: utf-8 -*- -""" -Charset-Normalizer -~~~~~~~~~~~~~~ -The Real First Universal Charset Detector. -A library that helps you read text from an unknown charset encoding. -Motivated by chardet, This package is trying to resolve the issue by taking a new approach. -All IANA character set names for which the Python core library provides codecs are supported. - -Basic usage: - >>> from charset_normalizer import from_bytes - >>> results = from_bytes('Bсеки човек има право на образование. Oбразованието!'.encode('utf_8')) - >>> best_guess = results.best() - >>> str(best_guess) - 'Bсеки човек има право на образование. Oбразованието!' - -Others methods and usages are available - see the full documentation -at . -:copyright: (c) 2021 by Ahmed TAHRI -:license: MIT, see LICENSE for more details. -""" -import logging - -from .api import from_bytes, from_fp, from_path, is_binary -from .legacy import detect -from .models import CharsetMatch, CharsetMatches -from .utils import set_logging_handler -from .version import VERSION, __version__ - -__all__ = ( - "from_fp", - "from_path", - "from_bytes", - "is_binary", - "detect", - "CharsetMatch", - "CharsetMatches", - "__version__", - "VERSION", - "set_logging_handler", -) - -# Attach a NullHandler to the top level logger by default -# https://docs.python.org/3.3/howto/logging.html#configuring-logging-for-a-library - -logging.getLogger("charset_normalizer").addHandler(logging.NullHandler()) diff --git a/env/lib/python3.10/site-packages/charset_normalizer/__main__.py b/env/lib/python3.10/site-packages/charset_normalizer/__main__.py deleted file mode 100644 index beae2ef..0000000 --- a/env/lib/python3.10/site-packages/charset_normalizer/__main__.py +++ /dev/null @@ -1,4 +0,0 @@ -from .cli import cli_detect - -if __name__ == "__main__": - cli_detect() diff --git a/env/lib/python3.10/site-packages/charset_normalizer/__pycache__/__init__.cpython-310.pyc b/env/lib/python3.10/site-packages/charset_normalizer/__pycache__/__init__.cpython-310.pyc deleted file mode 100644 index 3e03e38..0000000 Binary files a/env/lib/python3.10/site-packages/charset_normalizer/__pycache__/__init__.cpython-310.pyc and /dev/null differ diff --git a/env/lib/python3.10/site-packages/charset_normalizer/__pycache__/__main__.cpython-310.pyc b/env/lib/python3.10/site-packages/charset_normalizer/__pycache__/__main__.cpython-310.pyc deleted file mode 100644 index c066953..0000000 Binary files a/env/lib/python3.10/site-packages/charset_normalizer/__pycache__/__main__.cpython-310.pyc and /dev/null differ diff --git a/env/lib/python3.10/site-packages/charset_normalizer/__pycache__/api.cpython-310.pyc b/env/lib/python3.10/site-packages/charset_normalizer/__pycache__/api.cpython-310.pyc deleted file mode 100644 index bc4bf3f..0000000 Binary files a/env/lib/python3.10/site-packages/charset_normalizer/__pycache__/api.cpython-310.pyc and /dev/null differ diff --git a/env/lib/python3.10/site-packages/charset_normalizer/__pycache__/cd.cpython-310.pyc b/env/lib/python3.10/site-packages/charset_normalizer/__pycache__/cd.cpython-310.pyc deleted file mode 100644 index 17e047f..0000000 Binary files a/env/lib/python3.10/site-packages/charset_normalizer/__pycache__/cd.cpython-310.pyc and /dev/null differ diff --git a/env/lib/python3.10/site-packages/charset_normalizer/__pycache__/constant.cpython-310.pyc b/env/lib/python3.10/site-packages/charset_normalizer/__pycache__/constant.cpython-310.pyc deleted file mode 100644 index b0c5a11..0000000 Binary files a/env/lib/python3.10/site-packages/charset_normalizer/__pycache__/constant.cpython-310.pyc and /dev/null differ diff --git a/env/lib/python3.10/site-packages/charset_normalizer/__pycache__/legacy.cpython-310.pyc b/env/lib/python3.10/site-packages/charset_normalizer/__pycache__/legacy.cpython-310.pyc deleted file mode 100644 index 5653243..0000000 Binary files a/env/lib/python3.10/site-packages/charset_normalizer/__pycache__/legacy.cpython-310.pyc and /dev/null differ diff --git a/env/lib/python3.10/site-packages/charset_normalizer/__pycache__/md.cpython-310.pyc b/env/lib/python3.10/site-packages/charset_normalizer/__pycache__/md.cpython-310.pyc deleted file mode 100644 index 6590dcd..0000000 Binary files a/env/lib/python3.10/site-packages/charset_normalizer/__pycache__/md.cpython-310.pyc and /dev/null differ diff --git a/env/lib/python3.10/site-packages/charset_normalizer/__pycache__/models.cpython-310.pyc b/env/lib/python3.10/site-packages/charset_normalizer/__pycache__/models.cpython-310.pyc deleted file mode 100644 index 453e518..0000000 Binary files a/env/lib/python3.10/site-packages/charset_normalizer/__pycache__/models.cpython-310.pyc and /dev/null differ diff --git a/env/lib/python3.10/site-packages/charset_normalizer/__pycache__/utils.cpython-310.pyc b/env/lib/python3.10/site-packages/charset_normalizer/__pycache__/utils.cpython-310.pyc deleted file mode 100644 index 2b35610..0000000 Binary files a/env/lib/python3.10/site-packages/charset_normalizer/__pycache__/utils.cpython-310.pyc and /dev/null differ diff --git a/env/lib/python3.10/site-packages/charset_normalizer/__pycache__/version.cpython-310.pyc b/env/lib/python3.10/site-packages/charset_normalizer/__pycache__/version.cpython-310.pyc deleted file mode 100644 index 5355e84..0000000 Binary files a/env/lib/python3.10/site-packages/charset_normalizer/__pycache__/version.cpython-310.pyc and /dev/null differ diff --git a/env/lib/python3.10/site-packages/charset_normalizer/api.py b/env/lib/python3.10/site-packages/charset_normalizer/api.py deleted file mode 100644 index e3f2283..0000000 --- a/env/lib/python3.10/site-packages/charset_normalizer/api.py +++ /dev/null @@ -1,668 +0,0 @@ -import logging -from os import PathLike -from typing import BinaryIO, List, Optional, Set, Union - -from .cd import ( - coherence_ratio, - encoding_languages, - mb_encoding_languages, - merge_coherence_ratios, -) -from .constant import IANA_SUPPORTED, TOO_BIG_SEQUENCE, TOO_SMALL_SEQUENCE, TRACE -from .md import mess_ratio -from .models import CharsetMatch, CharsetMatches -from .utils import ( - any_specified_encoding, - cut_sequence_chunks, - iana_name, - identify_sig_or_bom, - is_cp_similar, - is_multi_byte_encoding, - should_strip_sig_or_bom, -) - -# Will most likely be controversial -# logging.addLevelName(TRACE, "TRACE") -logger = logging.getLogger("charset_normalizer") -explain_handler = logging.StreamHandler() -explain_handler.setFormatter( - logging.Formatter("%(asctime)s | %(levelname)s | %(message)s") -) - - -def from_bytes( - sequences: Union[bytes, bytearray], - steps: int = 5, - chunk_size: int = 512, - threshold: float = 0.2, - cp_isolation: Optional[List[str]] = None, - cp_exclusion: Optional[List[str]] = None, - preemptive_behaviour: bool = True, - explain: bool = False, - language_threshold: float = 0.1, - enable_fallback: bool = True, -) -> CharsetMatches: - """ - Given a raw bytes sequence, return the best possibles charset usable to render str objects. - If there is no results, it is a strong indicator that the source is binary/not text. - By default, the process will extract 5 blocks of 512o each to assess the mess and coherence of a given sequence. - And will give up a particular code page after 20% of measured mess. Those criteria are customizable at will. - - The preemptive behavior DOES NOT replace the traditional detection workflow, it prioritize a particular code page - but never take it for granted. Can improve the performance. - - You may want to focus your attention to some code page or/and not others, use cp_isolation and cp_exclusion for that - purpose. - - This function will strip the SIG in the payload/sequence every time except on UTF-16, UTF-32. - By default the library does not setup any handler other than the NullHandler, if you choose to set the 'explain' - toggle to True it will alter the logger configuration to add a StreamHandler that is suitable for debugging. - Custom logging format and handler can be set manually. - """ - - if not isinstance(sequences, (bytearray, bytes)): - raise TypeError( - "Expected object of type bytes or bytearray, got: {0}".format( - type(sequences) - ) - ) - - if explain: - previous_logger_level: int = logger.level - logger.addHandler(explain_handler) - logger.setLevel(TRACE) - - length: int = len(sequences) - - if length == 0: - logger.debug("Encoding detection on empty bytes, assuming utf_8 intention.") - if explain: - logger.removeHandler(explain_handler) - logger.setLevel(previous_logger_level or logging.WARNING) - return CharsetMatches([CharsetMatch(sequences, "utf_8", 0.0, False, [], "")]) - - if cp_isolation is not None: - logger.log( - TRACE, - "cp_isolation is set. use this flag for debugging purpose. " - "limited list of encoding allowed : %s.", - ", ".join(cp_isolation), - ) - cp_isolation = [iana_name(cp, False) for cp in cp_isolation] - else: - cp_isolation = [] - - if cp_exclusion is not None: - logger.log( - TRACE, - "cp_exclusion is set. use this flag for debugging purpose. " - "limited list of encoding excluded : %s.", - ", ".join(cp_exclusion), - ) - cp_exclusion = [iana_name(cp, False) for cp in cp_exclusion] - else: - cp_exclusion = [] - - if length <= (chunk_size * steps): - logger.log( - TRACE, - "override steps (%i) and chunk_size (%i) as content does not fit (%i byte(s) given) parameters.", - steps, - chunk_size, - length, - ) - steps = 1 - chunk_size = length - - if steps > 1 and length / steps < chunk_size: - chunk_size = int(length / steps) - - is_too_small_sequence: bool = len(sequences) < TOO_SMALL_SEQUENCE - is_too_large_sequence: bool = len(sequences) >= TOO_BIG_SEQUENCE - - if is_too_small_sequence: - logger.log( - TRACE, - "Trying to detect encoding from a tiny portion of ({}) byte(s).".format( - length - ), - ) - elif is_too_large_sequence: - logger.log( - TRACE, - "Using lazy str decoding because the payload is quite large, ({}) byte(s).".format( - length - ), - ) - - prioritized_encodings: List[str] = [] - - specified_encoding: Optional[str] = ( - any_specified_encoding(sequences) if preemptive_behaviour else None - ) - - if specified_encoding is not None: - prioritized_encodings.append(specified_encoding) - logger.log( - TRACE, - "Detected declarative mark in sequence. Priority +1 given for %s.", - specified_encoding, - ) - - tested: Set[str] = set() - tested_but_hard_failure: List[str] = [] - tested_but_soft_failure: List[str] = [] - - fallback_ascii: Optional[CharsetMatch] = None - fallback_u8: Optional[CharsetMatch] = None - fallback_specified: Optional[CharsetMatch] = None - - results: CharsetMatches = CharsetMatches() - - early_stop_results: CharsetMatches = CharsetMatches() - - sig_encoding, sig_payload = identify_sig_or_bom(sequences) - - if sig_encoding is not None: - prioritized_encodings.append(sig_encoding) - logger.log( - TRACE, - "Detected a SIG or BOM mark on first %i byte(s). Priority +1 given for %s.", - len(sig_payload), - sig_encoding, - ) - - prioritized_encodings.append("ascii") - - if "utf_8" not in prioritized_encodings: - prioritized_encodings.append("utf_8") - - for encoding_iana in prioritized_encodings + IANA_SUPPORTED: - if cp_isolation and encoding_iana not in cp_isolation: - continue - - if cp_exclusion and encoding_iana in cp_exclusion: - continue - - if encoding_iana in tested: - continue - - tested.add(encoding_iana) - - decoded_payload: Optional[str] = None - bom_or_sig_available: bool = sig_encoding == encoding_iana - strip_sig_or_bom: bool = bom_or_sig_available and should_strip_sig_or_bom( - encoding_iana - ) - - if encoding_iana in {"utf_16", "utf_32"} and not bom_or_sig_available: - logger.log( - TRACE, - "Encoding %s won't be tested as-is because it require a BOM. Will try some sub-encoder LE/BE.", - encoding_iana, - ) - continue - if encoding_iana in {"utf_7"} and not bom_or_sig_available: - logger.log( - TRACE, - "Encoding %s won't be tested as-is because detection is unreliable without BOM/SIG.", - encoding_iana, - ) - continue - - try: - is_multi_byte_decoder: bool = is_multi_byte_encoding(encoding_iana) - except (ModuleNotFoundError, ImportError): - logger.log( - TRACE, - "Encoding %s does not provide an IncrementalDecoder", - encoding_iana, - ) - continue - - try: - if is_too_large_sequence and is_multi_byte_decoder is False: - str( - ( - sequences[: int(50e4)] - if strip_sig_or_bom is False - else sequences[len(sig_payload) : int(50e4)] - ), - encoding=encoding_iana, - ) - else: - decoded_payload = str( - ( - sequences - if strip_sig_or_bom is False - else sequences[len(sig_payload) :] - ), - encoding=encoding_iana, - ) - except (UnicodeDecodeError, LookupError) as e: - if not isinstance(e, LookupError): - logger.log( - TRACE, - "Code page %s does not fit given bytes sequence at ALL. %s", - encoding_iana, - str(e), - ) - tested_but_hard_failure.append(encoding_iana) - continue - - similar_soft_failure_test: bool = False - - for encoding_soft_failed in tested_but_soft_failure: - if is_cp_similar(encoding_iana, encoding_soft_failed): - similar_soft_failure_test = True - break - - if similar_soft_failure_test: - logger.log( - TRACE, - "%s is deemed too similar to code page %s and was consider unsuited already. Continuing!", - encoding_iana, - encoding_soft_failed, - ) - continue - - r_ = range( - 0 if not bom_or_sig_available else len(sig_payload), - length, - int(length / steps), - ) - - multi_byte_bonus: bool = ( - is_multi_byte_decoder - and decoded_payload is not None - and len(decoded_payload) < length - ) - - if multi_byte_bonus: - logger.log( - TRACE, - "Code page %s is a multi byte encoding table and it appear that at least one character " - "was encoded using n-bytes.", - encoding_iana, - ) - - max_chunk_gave_up: int = int(len(r_) / 4) - - max_chunk_gave_up = max(max_chunk_gave_up, 2) - early_stop_count: int = 0 - lazy_str_hard_failure = False - - md_chunks: List[str] = [] - md_ratios = [] - - try: - for chunk in cut_sequence_chunks( - sequences, - encoding_iana, - r_, - chunk_size, - bom_or_sig_available, - strip_sig_or_bom, - sig_payload, - is_multi_byte_decoder, - decoded_payload, - ): - md_chunks.append(chunk) - - md_ratios.append( - mess_ratio( - chunk, - threshold, - explain is True and 1 <= len(cp_isolation) <= 2, - ) - ) - - if md_ratios[-1] >= threshold: - early_stop_count += 1 - - if (early_stop_count >= max_chunk_gave_up) or ( - bom_or_sig_available and strip_sig_or_bom is False - ): - break - except ( - UnicodeDecodeError - ) as e: # Lazy str loading may have missed something there - logger.log( - TRACE, - "LazyStr Loading: After MD chunk decode, code page %s does not fit given bytes sequence at ALL. %s", - encoding_iana, - str(e), - ) - early_stop_count = max_chunk_gave_up - lazy_str_hard_failure = True - - # We might want to check the sequence again with the whole content - # Only if initial MD tests passes - if ( - not lazy_str_hard_failure - and is_too_large_sequence - and not is_multi_byte_decoder - ): - try: - sequences[int(50e3) :].decode(encoding_iana, errors="strict") - except UnicodeDecodeError as e: - logger.log( - TRACE, - "LazyStr Loading: After final lookup, code page %s does not fit given bytes sequence at ALL. %s", - encoding_iana, - str(e), - ) - tested_but_hard_failure.append(encoding_iana) - continue - - mean_mess_ratio: float = sum(md_ratios) / len(md_ratios) if md_ratios else 0.0 - if mean_mess_ratio >= threshold or early_stop_count >= max_chunk_gave_up: - tested_but_soft_failure.append(encoding_iana) - logger.log( - TRACE, - "%s was excluded because of initial chaos probing. Gave up %i time(s). " - "Computed mean chaos is %f %%.", - encoding_iana, - early_stop_count, - round(mean_mess_ratio * 100, ndigits=3), - ) - # Preparing those fallbacks in case we got nothing. - if ( - enable_fallback - and encoding_iana in ["ascii", "utf_8", specified_encoding] - and not lazy_str_hard_failure - ): - fallback_entry = CharsetMatch( - sequences, - encoding_iana, - threshold, - False, - [], - decoded_payload, - preemptive_declaration=specified_encoding, - ) - if encoding_iana == specified_encoding: - fallback_specified = fallback_entry - elif encoding_iana == "ascii": - fallback_ascii = fallback_entry - else: - fallback_u8 = fallback_entry - continue - - logger.log( - TRACE, - "%s passed initial chaos probing. Mean measured chaos is %f %%", - encoding_iana, - round(mean_mess_ratio * 100, ndigits=3), - ) - - if not is_multi_byte_decoder: - target_languages: List[str] = encoding_languages(encoding_iana) - else: - target_languages = mb_encoding_languages(encoding_iana) - - if target_languages: - logger.log( - TRACE, - "{} should target any language(s) of {}".format( - encoding_iana, str(target_languages) - ), - ) - - cd_ratios = [] - - # We shall skip the CD when its about ASCII - # Most of the time its not relevant to run "language-detection" on it. - if encoding_iana != "ascii": - for chunk in md_chunks: - chunk_languages = coherence_ratio( - chunk, - language_threshold, - ",".join(target_languages) if target_languages else None, - ) - - cd_ratios.append(chunk_languages) - - cd_ratios_merged = merge_coherence_ratios(cd_ratios) - - if cd_ratios_merged: - logger.log( - TRACE, - "We detected language {} using {}".format( - cd_ratios_merged, encoding_iana - ), - ) - - current_match = CharsetMatch( - sequences, - encoding_iana, - mean_mess_ratio, - bom_or_sig_available, - cd_ratios_merged, - ( - decoded_payload - if ( - is_too_large_sequence is False - or encoding_iana in [specified_encoding, "ascii", "utf_8"] - ) - else None - ), - preemptive_declaration=specified_encoding, - ) - - results.append(current_match) - - if ( - encoding_iana in [specified_encoding, "ascii", "utf_8"] - and mean_mess_ratio < 0.1 - ): - # If md says nothing to worry about, then... stop immediately! - if mean_mess_ratio == 0.0: - logger.debug( - "Encoding detection: %s is most likely the one.", - current_match.encoding, - ) - if explain: - logger.removeHandler(explain_handler) - logger.setLevel(previous_logger_level) - return CharsetMatches([current_match]) - - early_stop_results.append(current_match) - - if ( - len(early_stop_results) - and (specified_encoding is None or specified_encoding in tested) - and "ascii" in tested - and "utf_8" in tested - ): - probable_result: CharsetMatch = early_stop_results.best() # type: ignore[assignment] - logger.debug( - "Encoding detection: %s is most likely the one.", - probable_result.encoding, - ) - if explain: - logger.removeHandler(explain_handler) - logger.setLevel(previous_logger_level) - - return CharsetMatches([probable_result]) - - if encoding_iana == sig_encoding: - logger.debug( - "Encoding detection: %s is most likely the one as we detected a BOM or SIG within " - "the beginning of the sequence.", - encoding_iana, - ) - if explain: - logger.removeHandler(explain_handler) - logger.setLevel(previous_logger_level) - return CharsetMatches([results[encoding_iana]]) - - if len(results) == 0: - if fallback_u8 or fallback_ascii or fallback_specified: - logger.log( - TRACE, - "Nothing got out of the detection process. Using ASCII/UTF-8/Specified fallback.", - ) - - if fallback_specified: - logger.debug( - "Encoding detection: %s will be used as a fallback match", - fallback_specified.encoding, - ) - results.append(fallback_specified) - elif ( - (fallback_u8 and fallback_ascii is None) - or ( - fallback_u8 - and fallback_ascii - and fallback_u8.fingerprint != fallback_ascii.fingerprint - ) - or (fallback_u8 is not None) - ): - logger.debug("Encoding detection: utf_8 will be used as a fallback match") - results.append(fallback_u8) - elif fallback_ascii: - logger.debug("Encoding detection: ascii will be used as a fallback match") - results.append(fallback_ascii) - - if results: - logger.debug( - "Encoding detection: Found %s as plausible (best-candidate) for content. With %i alternatives.", - results.best().encoding, # type: ignore - len(results) - 1, - ) - else: - logger.debug("Encoding detection: Unable to determine any suitable charset.") - - if explain: - logger.removeHandler(explain_handler) - logger.setLevel(previous_logger_level) - - return results - - -def from_fp( - fp: BinaryIO, - steps: int = 5, - chunk_size: int = 512, - threshold: float = 0.20, - cp_isolation: Optional[List[str]] = None, - cp_exclusion: Optional[List[str]] = None, - preemptive_behaviour: bool = True, - explain: bool = False, - language_threshold: float = 0.1, - enable_fallback: bool = True, -) -> CharsetMatches: - """ - Same thing than the function from_bytes but using a file pointer that is already ready. - Will not close the file pointer. - """ - return from_bytes( - fp.read(), - steps, - chunk_size, - threshold, - cp_isolation, - cp_exclusion, - preemptive_behaviour, - explain, - language_threshold, - enable_fallback, - ) - - -def from_path( - path: Union[str, bytes, PathLike], # type: ignore[type-arg] - steps: int = 5, - chunk_size: int = 512, - threshold: float = 0.20, - cp_isolation: Optional[List[str]] = None, - cp_exclusion: Optional[List[str]] = None, - preemptive_behaviour: bool = True, - explain: bool = False, - language_threshold: float = 0.1, - enable_fallback: bool = True, -) -> CharsetMatches: - """ - Same thing than the function from_bytes but with one extra step. Opening and reading given file path in binary mode. - Can raise IOError. - """ - with open(path, "rb") as fp: - return from_fp( - fp, - steps, - chunk_size, - threshold, - cp_isolation, - cp_exclusion, - preemptive_behaviour, - explain, - language_threshold, - enable_fallback, - ) - - -def is_binary( - fp_or_path_or_payload: Union[PathLike, str, BinaryIO, bytes], # type: ignore[type-arg] - steps: int = 5, - chunk_size: int = 512, - threshold: float = 0.20, - cp_isolation: Optional[List[str]] = None, - cp_exclusion: Optional[List[str]] = None, - preemptive_behaviour: bool = True, - explain: bool = False, - language_threshold: float = 0.1, - enable_fallback: bool = False, -) -> bool: - """ - Detect if the given input (file, bytes, or path) points to a binary file. aka. not a string. - Based on the same main heuristic algorithms and default kwargs at the sole exception that fallbacks match - are disabled to be stricter around ASCII-compatible but unlikely to be a string. - """ - if isinstance(fp_or_path_or_payload, (str, PathLike)): - guesses = from_path( - fp_or_path_or_payload, - steps=steps, - chunk_size=chunk_size, - threshold=threshold, - cp_isolation=cp_isolation, - cp_exclusion=cp_exclusion, - preemptive_behaviour=preemptive_behaviour, - explain=explain, - language_threshold=language_threshold, - enable_fallback=enable_fallback, - ) - elif isinstance( - fp_or_path_or_payload, - ( - bytes, - bytearray, - ), - ): - guesses = from_bytes( - fp_or_path_or_payload, - steps=steps, - chunk_size=chunk_size, - threshold=threshold, - cp_isolation=cp_isolation, - cp_exclusion=cp_exclusion, - preemptive_behaviour=preemptive_behaviour, - explain=explain, - language_threshold=language_threshold, - enable_fallback=enable_fallback, - ) - else: - guesses = from_fp( - fp_or_path_or_payload, - steps=steps, - chunk_size=chunk_size, - threshold=threshold, - cp_isolation=cp_isolation, - cp_exclusion=cp_exclusion, - preemptive_behaviour=preemptive_behaviour, - explain=explain, - language_threshold=language_threshold, - enable_fallback=enable_fallback, - ) - - return not guesses diff --git a/env/lib/python3.10/site-packages/charset_normalizer/cd.py b/env/lib/python3.10/site-packages/charset_normalizer/cd.py deleted file mode 100644 index 4ea6760..0000000 --- a/env/lib/python3.10/site-packages/charset_normalizer/cd.py +++ /dev/null @@ -1,395 +0,0 @@ -import importlib -from codecs import IncrementalDecoder -from collections import Counter -from functools import lru_cache -from typing import Counter as TypeCounter, Dict, List, Optional, Tuple - -from .constant import ( - FREQUENCIES, - KO_NAMES, - LANGUAGE_SUPPORTED_COUNT, - TOO_SMALL_SEQUENCE, - ZH_NAMES, -) -from .md import is_suspiciously_successive_range -from .models import CoherenceMatches -from .utils import ( - is_accentuated, - is_latin, - is_multi_byte_encoding, - is_unicode_range_secondary, - unicode_range, -) - - -def encoding_unicode_range(iana_name: str) -> List[str]: - """ - Return associated unicode ranges in a single byte code page. - """ - if is_multi_byte_encoding(iana_name): - raise IOError("Function not supported on multi-byte code page") - - decoder = importlib.import_module( - "encodings.{}".format(iana_name) - ).IncrementalDecoder - - p: IncrementalDecoder = decoder(errors="ignore") - seen_ranges: Dict[str, int] = {} - character_count: int = 0 - - for i in range(0x40, 0xFF): - chunk: str = p.decode(bytes([i])) - - if chunk: - character_range: Optional[str] = unicode_range(chunk) - - if character_range is None: - continue - - if is_unicode_range_secondary(character_range) is False: - if character_range not in seen_ranges: - seen_ranges[character_range] = 0 - seen_ranges[character_range] += 1 - character_count += 1 - - return sorted( - [ - character_range - for character_range in seen_ranges - if seen_ranges[character_range] / character_count >= 0.15 - ] - ) - - -def unicode_range_languages(primary_range: str) -> List[str]: - """ - Return inferred languages used with a unicode range. - """ - languages: List[str] = [] - - for language, characters in FREQUENCIES.items(): - for character in characters: - if unicode_range(character) == primary_range: - languages.append(language) - break - - return languages - - -@lru_cache() -def encoding_languages(iana_name: str) -> List[str]: - """ - Single-byte encoding language association. Some code page are heavily linked to particular language(s). - This function does the correspondence. - """ - unicode_ranges: List[str] = encoding_unicode_range(iana_name) - primary_range: Optional[str] = None - - for specified_range in unicode_ranges: - if "Latin" not in specified_range: - primary_range = specified_range - break - - if primary_range is None: - return ["Latin Based"] - - return unicode_range_languages(primary_range) - - -@lru_cache() -def mb_encoding_languages(iana_name: str) -> List[str]: - """ - Multi-byte encoding language association. Some code page are heavily linked to particular language(s). - This function does the correspondence. - """ - if ( - iana_name.startswith("shift_") - or iana_name.startswith("iso2022_jp") - or iana_name.startswith("euc_j") - or iana_name == "cp932" - ): - return ["Japanese"] - if iana_name.startswith("gb") or iana_name in ZH_NAMES: - return ["Chinese"] - if iana_name.startswith("iso2022_kr") or iana_name in KO_NAMES: - return ["Korean"] - - return [] - - -@lru_cache(maxsize=LANGUAGE_SUPPORTED_COUNT) -def get_target_features(language: str) -> Tuple[bool, bool]: - """ - Determine main aspects from a supported language if it contains accents and if is pure Latin. - """ - target_have_accents: bool = False - target_pure_latin: bool = True - - for character in FREQUENCIES[language]: - if not target_have_accents and is_accentuated(character): - target_have_accents = True - if target_pure_latin and is_latin(character) is False: - target_pure_latin = False - - return target_have_accents, target_pure_latin - - -def alphabet_languages( - characters: List[str], ignore_non_latin: bool = False -) -> List[str]: - """ - Return associated languages associated to given characters. - """ - languages: List[Tuple[str, float]] = [] - - source_have_accents = any(is_accentuated(character) for character in characters) - - for language, language_characters in FREQUENCIES.items(): - target_have_accents, target_pure_latin = get_target_features(language) - - if ignore_non_latin and target_pure_latin is False: - continue - - if target_have_accents is False and source_have_accents: - continue - - character_count: int = len(language_characters) - - character_match_count: int = len( - [c for c in language_characters if c in characters] - ) - - ratio: float = character_match_count / character_count - - if ratio >= 0.2: - languages.append((language, ratio)) - - languages = sorted(languages, key=lambda x: x[1], reverse=True) - - return [compatible_language[0] for compatible_language in languages] - - -def characters_popularity_compare( - language: str, ordered_characters: List[str] -) -> float: - """ - Determine if a ordered characters list (by occurrence from most appearance to rarest) match a particular language. - The result is a ratio between 0. (absolutely no correspondence) and 1. (near perfect fit). - Beware that is function is not strict on the match in order to ease the detection. (Meaning close match is 1.) - """ - if language not in FREQUENCIES: - raise ValueError("{} not available".format(language)) - - character_approved_count: int = 0 - FREQUENCIES_language_set = set(FREQUENCIES[language]) - - ordered_characters_count: int = len(ordered_characters) - target_language_characters_count: int = len(FREQUENCIES[language]) - - large_alphabet: bool = target_language_characters_count > 26 - - for character, character_rank in zip( - ordered_characters, range(0, ordered_characters_count) - ): - if character not in FREQUENCIES_language_set: - continue - - character_rank_in_language: int = FREQUENCIES[language].index(character) - expected_projection_ratio: float = ( - target_language_characters_count / ordered_characters_count - ) - character_rank_projection: int = int(character_rank * expected_projection_ratio) - - if ( - large_alphabet is False - and abs(character_rank_projection - character_rank_in_language) > 4 - ): - continue - - if ( - large_alphabet is True - and abs(character_rank_projection - character_rank_in_language) - < target_language_characters_count / 3 - ): - character_approved_count += 1 - continue - - characters_before_source: List[str] = FREQUENCIES[language][ - 0:character_rank_in_language - ] - characters_after_source: List[str] = FREQUENCIES[language][ - character_rank_in_language: - ] - characters_before: List[str] = ordered_characters[0:character_rank] - characters_after: List[str] = ordered_characters[character_rank:] - - before_match_count: int = len( - set(characters_before) & set(characters_before_source) - ) - - after_match_count: int = len( - set(characters_after) & set(characters_after_source) - ) - - if len(characters_before_source) == 0 and before_match_count <= 4: - character_approved_count += 1 - continue - - if len(characters_after_source) == 0 and after_match_count <= 4: - character_approved_count += 1 - continue - - if ( - before_match_count / len(characters_before_source) >= 0.4 - or after_match_count / len(characters_after_source) >= 0.4 - ): - character_approved_count += 1 - continue - - return character_approved_count / len(ordered_characters) - - -def alpha_unicode_split(decoded_sequence: str) -> List[str]: - """ - Given a decoded text sequence, return a list of str. Unicode range / alphabet separation. - Ex. a text containing English/Latin with a bit a Hebrew will return two items in the resulting list; - One containing the latin letters and the other hebrew. - """ - layers: Dict[str, str] = {} - - for character in decoded_sequence: - if character.isalpha() is False: - continue - - character_range: Optional[str] = unicode_range(character) - - if character_range is None: - continue - - layer_target_range: Optional[str] = None - - for discovered_range in layers: - if ( - is_suspiciously_successive_range(discovered_range, character_range) - is False - ): - layer_target_range = discovered_range - break - - if layer_target_range is None: - layer_target_range = character_range - - if layer_target_range not in layers: - layers[layer_target_range] = character.lower() - continue - - layers[layer_target_range] += character.lower() - - return list(layers.values()) - - -def merge_coherence_ratios(results: List[CoherenceMatches]) -> CoherenceMatches: - """ - This function merge results previously given by the function coherence_ratio. - The return type is the same as coherence_ratio. - """ - per_language_ratios: Dict[str, List[float]] = {} - for result in results: - for sub_result in result: - language, ratio = sub_result - if language not in per_language_ratios: - per_language_ratios[language] = [ratio] - continue - per_language_ratios[language].append(ratio) - - merge = [ - ( - language, - round( - sum(per_language_ratios[language]) / len(per_language_ratios[language]), - 4, - ), - ) - for language in per_language_ratios - ] - - return sorted(merge, key=lambda x: x[1], reverse=True) - - -def filter_alt_coherence_matches(results: CoherenceMatches) -> CoherenceMatches: - """ - We shall NOT return "English—" in CoherenceMatches because it is an alternative - of "English". This function only keeps the best match and remove the em-dash in it. - """ - index_results: Dict[str, List[float]] = dict() - - for result in results: - language, ratio = result - no_em_name: str = language.replace("—", "") - - if no_em_name not in index_results: - index_results[no_em_name] = [] - - index_results[no_em_name].append(ratio) - - if any(len(index_results[e]) > 1 for e in index_results): - filtered_results: CoherenceMatches = [] - - for language in index_results: - filtered_results.append((language, max(index_results[language]))) - - return filtered_results - - return results - - -@lru_cache(maxsize=2048) -def coherence_ratio( - decoded_sequence: str, threshold: float = 0.1, lg_inclusion: Optional[str] = None -) -> CoherenceMatches: - """ - Detect ANY language that can be identified in given sequence. The sequence will be analysed by layers. - A layer = Character extraction by alphabets/ranges. - """ - - results: List[Tuple[str, float]] = [] - ignore_non_latin: bool = False - - sufficient_match_count: int = 0 - - lg_inclusion_list = lg_inclusion.split(",") if lg_inclusion is not None else [] - if "Latin Based" in lg_inclusion_list: - ignore_non_latin = True - lg_inclusion_list.remove("Latin Based") - - for layer in alpha_unicode_split(decoded_sequence): - sequence_frequencies: TypeCounter[str] = Counter(layer) - most_common = sequence_frequencies.most_common() - - character_count: int = sum(o for c, o in most_common) - - if character_count <= TOO_SMALL_SEQUENCE: - continue - - popular_character_ordered: List[str] = [c for c, o in most_common] - - for language in lg_inclusion_list or alphabet_languages( - popular_character_ordered, ignore_non_latin - ): - ratio: float = characters_popularity_compare( - language, popular_character_ordered - ) - - if ratio < threshold: - continue - elif ratio >= 0.8: - sufficient_match_count += 1 - - results.append((language, round(ratio, 4))) - - if sufficient_match_count >= 3: - break - - return sorted( - filter_alt_coherence_matches(results), key=lambda x: x[1], reverse=True - ) diff --git a/env/lib/python3.10/site-packages/charset_normalizer/cli/__init__.py b/env/lib/python3.10/site-packages/charset_normalizer/cli/__init__.py deleted file mode 100644 index d95fedf..0000000 --- a/env/lib/python3.10/site-packages/charset_normalizer/cli/__init__.py +++ /dev/null @@ -1,6 +0,0 @@ -from .__main__ import cli_detect, query_yes_no - -__all__ = ( - "cli_detect", - "query_yes_no", -) diff --git a/env/lib/python3.10/site-packages/charset_normalizer/cli/__main__.py b/env/lib/python3.10/site-packages/charset_normalizer/cli/__main__.py deleted file mode 100644 index e7edd0f..0000000 --- a/env/lib/python3.10/site-packages/charset_normalizer/cli/__main__.py +++ /dev/null @@ -1,320 +0,0 @@ -import argparse -import sys -from json import dumps -from os.path import abspath, basename, dirname, join, realpath -from platform import python_version -from typing import List, Optional -from unicodedata import unidata_version - -import charset_normalizer.md as md_module -from charset_normalizer import from_fp -from charset_normalizer.models import CliDetectionResult -from charset_normalizer.version import __version__ - - -def query_yes_no(question: str, default: str = "yes") -> bool: - """Ask a yes/no question via input() and return their answer. - - "question" is a string that is presented to the user. - "default" is the presumed answer if the user just hits . - It must be "yes" (the default), "no" or None (meaning - an answer is required of the user). - - The "answer" return value is True for "yes" or False for "no". - - Credit goes to (c) https://stackoverflow.com/questions/3041986/apt-command-line-interface-like-yes-no-input - """ - valid = {"yes": True, "y": True, "ye": True, "no": False, "n": False} - if default is None: - prompt = " [y/n] " - elif default == "yes": - prompt = " [Y/n] " - elif default == "no": - prompt = " [y/N] " - else: - raise ValueError("invalid default answer: '%s'" % default) - - while True: - sys.stdout.write(question + prompt) - choice = input().lower() - if default is not None and choice == "": - return valid[default] - elif choice in valid: - return valid[choice] - else: - sys.stdout.write("Please respond with 'yes' or 'no' " "(or 'y' or 'n').\n") - - -def cli_detect(argv: Optional[List[str]] = None) -> int: - """ - CLI assistant using ARGV and ArgumentParser - :param argv: - :return: 0 if everything is fine, anything else equal trouble - """ - parser = argparse.ArgumentParser( - description="The Real First Universal Charset Detector. " - "Discover originating encoding used on text file. " - "Normalize text to unicode." - ) - - parser.add_argument( - "files", type=argparse.FileType("rb"), nargs="+", help="File(s) to be analysed" - ) - parser.add_argument( - "-v", - "--verbose", - action="store_true", - default=False, - dest="verbose", - help="Display complementary information about file if any. " - "Stdout will contain logs about the detection process.", - ) - parser.add_argument( - "-a", - "--with-alternative", - action="store_true", - default=False, - dest="alternatives", - help="Output complementary possibilities if any. Top-level JSON WILL be a list.", - ) - parser.add_argument( - "-n", - "--normalize", - action="store_true", - default=False, - dest="normalize", - help="Permit to normalize input file. If not set, program does not write anything.", - ) - parser.add_argument( - "-m", - "--minimal", - action="store_true", - default=False, - dest="minimal", - help="Only output the charset detected to STDOUT. Disabling JSON output.", - ) - parser.add_argument( - "-r", - "--replace", - action="store_true", - default=False, - dest="replace", - help="Replace file when trying to normalize it instead of creating a new one.", - ) - parser.add_argument( - "-f", - "--force", - action="store_true", - default=False, - dest="force", - help="Replace file without asking if you are sure, use this flag with caution.", - ) - parser.add_argument( - "-i", - "--no-preemptive", - action="store_true", - default=False, - dest="no_preemptive", - help="Disable looking at a charset declaration to hint the detector.", - ) - parser.add_argument( - "-t", - "--threshold", - action="store", - default=0.2, - type=float, - dest="threshold", - help="Define a custom maximum amount of chaos allowed in decoded content. 0. <= chaos <= 1.", - ) - parser.add_argument( - "--version", - action="version", - version="Charset-Normalizer {} - Python {} - Unicode {} - SpeedUp {}".format( - __version__, - python_version(), - unidata_version, - "OFF" if md_module.__file__.lower().endswith(".py") else "ON", - ), - help="Show version information and exit.", - ) - - args = parser.parse_args(argv) - - if args.replace is True and args.normalize is False: - if args.files: - for my_file in args.files: - my_file.close() - print("Use --replace in addition of --normalize only.", file=sys.stderr) - return 1 - - if args.force is True and args.replace is False: - if args.files: - for my_file in args.files: - my_file.close() - print("Use --force in addition of --replace only.", file=sys.stderr) - return 1 - - if args.threshold < 0.0 or args.threshold > 1.0: - if args.files: - for my_file in args.files: - my_file.close() - print("--threshold VALUE should be between 0. AND 1.", file=sys.stderr) - return 1 - - x_ = [] - - for my_file in args.files: - matches = from_fp( - my_file, - threshold=args.threshold, - explain=args.verbose, - preemptive_behaviour=args.no_preemptive is False, - ) - - best_guess = matches.best() - - if best_guess is None: - print( - 'Unable to identify originating encoding for "{}". {}'.format( - my_file.name, - ( - "Maybe try increasing maximum amount of chaos." - if args.threshold < 1.0 - else "" - ), - ), - file=sys.stderr, - ) - x_.append( - CliDetectionResult( - abspath(my_file.name), - None, - [], - [], - "Unknown", - [], - False, - 1.0, - 0.0, - None, - True, - ) - ) - else: - x_.append( - CliDetectionResult( - abspath(my_file.name), - best_guess.encoding, - best_guess.encoding_aliases, - [ - cp - for cp in best_guess.could_be_from_charset - if cp != best_guess.encoding - ], - best_guess.language, - best_guess.alphabets, - best_guess.bom, - best_guess.percent_chaos, - best_guess.percent_coherence, - None, - True, - ) - ) - - if len(matches) > 1 and args.alternatives: - for el in matches: - if el != best_guess: - x_.append( - CliDetectionResult( - abspath(my_file.name), - el.encoding, - el.encoding_aliases, - [ - cp - for cp in el.could_be_from_charset - if cp != el.encoding - ], - el.language, - el.alphabets, - el.bom, - el.percent_chaos, - el.percent_coherence, - None, - False, - ) - ) - - if args.normalize is True: - if best_guess.encoding.startswith("utf") is True: - print( - '"{}" file does not need to be normalized, as it already came from unicode.'.format( - my_file.name - ), - file=sys.stderr, - ) - if my_file.closed is False: - my_file.close() - continue - - dir_path = dirname(realpath(my_file.name)) - file_name = basename(realpath(my_file.name)) - - o_: List[str] = file_name.split(".") - - if args.replace is False: - o_.insert(-1, best_guess.encoding) - if my_file.closed is False: - my_file.close() - elif ( - args.force is False - and query_yes_no( - 'Are you sure to normalize "{}" by replacing it ?'.format( - my_file.name - ), - "no", - ) - is False - ): - if my_file.closed is False: - my_file.close() - continue - - try: - x_[0].unicode_path = join(dir_path, ".".join(o_)) - - with open(x_[0].unicode_path, "wb") as fp: - fp.write(best_guess.output()) - except IOError as e: - print(str(e), file=sys.stderr) - if my_file.closed is False: - my_file.close() - return 2 - - if my_file.closed is False: - my_file.close() - - if args.minimal is False: - print( - dumps( - [el.__dict__ for el in x_] if len(x_) > 1 else x_[0].__dict__, - ensure_ascii=True, - indent=4, - ) - ) - else: - for my_file in args.files: - print( - ", ".join( - [ - el.encoding or "undefined" - for el in x_ - if el.path == abspath(my_file.name) - ] - ) - ) - - return 0 - - -if __name__ == "__main__": - cli_detect() diff --git a/env/lib/python3.10/site-packages/charset_normalizer/cli/__pycache__/__init__.cpython-310.pyc b/env/lib/python3.10/site-packages/charset_normalizer/cli/__pycache__/__init__.cpython-310.pyc deleted file mode 100644 index 4db4e0c..0000000 Binary files a/env/lib/python3.10/site-packages/charset_normalizer/cli/__pycache__/__init__.cpython-310.pyc and /dev/null differ diff --git a/env/lib/python3.10/site-packages/charset_normalizer/cli/__pycache__/__main__.cpython-310.pyc b/env/lib/python3.10/site-packages/charset_normalizer/cli/__pycache__/__main__.cpython-310.pyc deleted file mode 100644 index cccd3e0..0000000 Binary files a/env/lib/python3.10/site-packages/charset_normalizer/cli/__pycache__/__main__.cpython-310.pyc and /dev/null differ diff --git a/env/lib/python3.10/site-packages/charset_normalizer/constant.py b/env/lib/python3.10/site-packages/charset_normalizer/constant.py deleted file mode 100644 index f8f2a81..0000000 --- a/env/lib/python3.10/site-packages/charset_normalizer/constant.py +++ /dev/null @@ -1,1997 +0,0 @@ -# -*- coding: utf-8 -*- -from codecs import BOM_UTF8, BOM_UTF16_BE, BOM_UTF16_LE, BOM_UTF32_BE, BOM_UTF32_LE -from encodings.aliases import aliases -from re import IGNORECASE, compile as re_compile -from typing import Dict, List, Set, Union - -# Contain for each eligible encoding a list of/item bytes SIG/BOM -ENCODING_MARKS: Dict[str, Union[bytes, List[bytes]]] = { - "utf_8": BOM_UTF8, - "utf_7": [ - b"\x2b\x2f\x76\x38", - b"\x2b\x2f\x76\x39", - b"\x2b\x2f\x76\x2b", - b"\x2b\x2f\x76\x2f", - b"\x2b\x2f\x76\x38\x2d", - ], - "gb18030": b"\x84\x31\x95\x33", - "utf_32": [BOM_UTF32_BE, BOM_UTF32_LE], - "utf_16": [BOM_UTF16_BE, BOM_UTF16_LE], -} - -TOO_SMALL_SEQUENCE: int = 32 -TOO_BIG_SEQUENCE: int = int(10e6) - -UTF8_MAXIMAL_ALLOCATION: int = 1_112_064 - -# Up-to-date Unicode ucd/15.0.0 -UNICODE_RANGES_COMBINED: Dict[str, range] = { - "Control character": range(32), - "Basic Latin": range(32, 128), - "Latin-1 Supplement": range(128, 256), - "Latin Extended-A": range(256, 384), - "Latin Extended-B": range(384, 592), - "IPA Extensions": range(592, 688), - "Spacing Modifier Letters": range(688, 768), - "Combining Diacritical Marks": range(768, 880), - "Greek and Coptic": range(880, 1024), - "Cyrillic": range(1024, 1280), - "Cyrillic Supplement": range(1280, 1328), - "Armenian": range(1328, 1424), - "Hebrew": range(1424, 1536), - "Arabic": range(1536, 1792), - "Syriac": range(1792, 1872), - "Arabic Supplement": range(1872, 1920), - "Thaana": range(1920, 1984), - "NKo": range(1984, 2048), - "Samaritan": range(2048, 2112), - "Mandaic": range(2112, 2144), - "Syriac Supplement": range(2144, 2160), - "Arabic Extended-B": range(2160, 2208), - "Arabic Extended-A": range(2208, 2304), - "Devanagari": range(2304, 2432), - "Bengali": range(2432, 2560), - "Gurmukhi": range(2560, 2688), - "Gujarati": range(2688, 2816), - "Oriya": range(2816, 2944), - "Tamil": range(2944, 3072), - "Telugu": range(3072, 3200), - "Kannada": range(3200, 3328), - "Malayalam": range(3328, 3456), - "Sinhala": range(3456, 3584), - "Thai": range(3584, 3712), - "Lao": range(3712, 3840), - "Tibetan": range(3840, 4096), - "Myanmar": range(4096, 4256), - "Georgian": range(4256, 4352), - "Hangul Jamo": range(4352, 4608), - "Ethiopic": range(4608, 4992), - "Ethiopic Supplement": range(4992, 5024), - "Cherokee": range(5024, 5120), - "Unified Canadian Aboriginal Syllabics": range(5120, 5760), - "Ogham": range(5760, 5792), - "Runic": range(5792, 5888), - "Tagalog": range(5888, 5920), - "Hanunoo": range(5920, 5952), - "Buhid": range(5952, 5984), - "Tagbanwa": range(5984, 6016), - "Khmer": range(6016, 6144), - "Mongolian": range(6144, 6320), - "Unified Canadian Aboriginal Syllabics Extended": range(6320, 6400), - "Limbu": range(6400, 6480), - "Tai Le": range(6480, 6528), - "New Tai Lue": range(6528, 6624), - "Khmer Symbols": range(6624, 6656), - "Buginese": range(6656, 6688), - "Tai Tham": range(6688, 6832), - "Combining Diacritical Marks Extended": range(6832, 6912), - "Balinese": range(6912, 7040), - "Sundanese": range(7040, 7104), - "Batak": range(7104, 7168), - "Lepcha": range(7168, 7248), - "Ol Chiki": range(7248, 7296), - "Cyrillic Extended-C": range(7296, 7312), - "Georgian Extended": range(7312, 7360), - "Sundanese Supplement": range(7360, 7376), - "Vedic Extensions": range(7376, 7424), - "Phonetic Extensions": range(7424, 7552), - "Phonetic Extensions Supplement": range(7552, 7616), - "Combining Diacritical Marks Supplement": range(7616, 7680), - "Latin Extended Additional": range(7680, 7936), - "Greek Extended": range(7936, 8192), - "General Punctuation": range(8192, 8304), - "Superscripts and Subscripts": range(8304, 8352), - "Currency Symbols": range(8352, 8400), - "Combining Diacritical Marks for Symbols": range(8400, 8448), - "Letterlike Symbols": range(8448, 8528), - "Number Forms": range(8528, 8592), - "Arrows": range(8592, 8704), - "Mathematical Operators": range(8704, 8960), - "Miscellaneous Technical": range(8960, 9216), - "Control Pictures": range(9216, 9280), - "Optical Character Recognition": range(9280, 9312), - "Enclosed Alphanumerics": range(9312, 9472), - "Box Drawing": range(9472, 9600), - "Block Elements": range(9600, 9632), - "Geometric Shapes": range(9632, 9728), - "Miscellaneous Symbols": range(9728, 9984), - "Dingbats": range(9984, 10176), - "Miscellaneous Mathematical Symbols-A": range(10176, 10224), - "Supplemental Arrows-A": range(10224, 10240), - "Braille Patterns": range(10240, 10496), - "Supplemental Arrows-B": range(10496, 10624), - "Miscellaneous Mathematical Symbols-B": range(10624, 10752), - "Supplemental Mathematical Operators": range(10752, 11008), - "Miscellaneous Symbols and Arrows": range(11008, 11264), - "Glagolitic": range(11264, 11360), - "Latin Extended-C": range(11360, 11392), - "Coptic": range(11392, 11520), - "Georgian Supplement": range(11520, 11568), - "Tifinagh": range(11568, 11648), - "Ethiopic Extended": range(11648, 11744), - "Cyrillic Extended-A": range(11744, 11776), - "Supplemental Punctuation": range(11776, 11904), - "CJK Radicals Supplement": range(11904, 12032), - "Kangxi Radicals": range(12032, 12256), - "Ideographic Description Characters": range(12272, 12288), - "CJK Symbols and Punctuation": range(12288, 12352), - "Hiragana": range(12352, 12448), - "Katakana": range(12448, 12544), - "Bopomofo": range(12544, 12592), - "Hangul Compatibility Jamo": range(12592, 12688), - "Kanbun": range(12688, 12704), - "Bopomofo Extended": range(12704, 12736), - "CJK Strokes": range(12736, 12784), - "Katakana Phonetic Extensions": range(12784, 12800), - "Enclosed CJK Letters and Months": range(12800, 13056), - "CJK Compatibility": range(13056, 13312), - "CJK Unified Ideographs Extension A": range(13312, 19904), - "Yijing Hexagram Symbols": range(19904, 19968), - "CJK Unified Ideographs": range(19968, 40960), - "Yi Syllables": range(40960, 42128), - "Yi Radicals": range(42128, 42192), - "Lisu": range(42192, 42240), - "Vai": range(42240, 42560), - "Cyrillic Extended-B": range(42560, 42656), - "Bamum": range(42656, 42752), - "Modifier Tone Letters": range(42752, 42784), - "Latin Extended-D": range(42784, 43008), - "Syloti Nagri": range(43008, 43056), - "Common Indic Number Forms": range(43056, 43072), - "Phags-pa": range(43072, 43136), - "Saurashtra": range(43136, 43232), - "Devanagari Extended": range(43232, 43264), - "Kayah Li": range(43264, 43312), - "Rejang": range(43312, 43360), - "Hangul Jamo Extended-A": range(43360, 43392), - "Javanese": range(43392, 43488), - "Myanmar Extended-B": range(43488, 43520), - "Cham": range(43520, 43616), - "Myanmar Extended-A": range(43616, 43648), - "Tai Viet": range(43648, 43744), - "Meetei Mayek Extensions": range(43744, 43776), - "Ethiopic Extended-A": range(43776, 43824), - "Latin Extended-E": range(43824, 43888), - "Cherokee Supplement": range(43888, 43968), - "Meetei Mayek": range(43968, 44032), - "Hangul Syllables": range(44032, 55216), - "Hangul Jamo Extended-B": range(55216, 55296), - "High Surrogates": range(55296, 56192), - "High Private Use Surrogates": range(56192, 56320), - "Low Surrogates": range(56320, 57344), - "Private Use Area": range(57344, 63744), - "CJK Compatibility Ideographs": range(63744, 64256), - "Alphabetic Presentation Forms": range(64256, 64336), - "Arabic Presentation Forms-A": range(64336, 65024), - "Variation Selectors": range(65024, 65040), - "Vertical Forms": range(65040, 65056), - "Combining Half Marks": range(65056, 65072), - "CJK Compatibility Forms": range(65072, 65104), - "Small Form Variants": range(65104, 65136), - "Arabic Presentation Forms-B": range(65136, 65280), - "Halfwidth and Fullwidth Forms": range(65280, 65520), - "Specials": range(65520, 65536), - "Linear B Syllabary": range(65536, 65664), - "Linear B Ideograms": range(65664, 65792), - "Aegean Numbers": range(65792, 65856), - "Ancient Greek Numbers": range(65856, 65936), - "Ancient Symbols": range(65936, 66000), - "Phaistos Disc": range(66000, 66048), - "Lycian": range(66176, 66208), - "Carian": range(66208, 66272), - "Coptic Epact Numbers": range(66272, 66304), - "Old Italic": range(66304, 66352), - "Gothic": range(66352, 66384), - "Old Permic": range(66384, 66432), - "Ugaritic": range(66432, 66464), - "Old Persian": range(66464, 66528), - "Deseret": range(66560, 66640), - "Shavian": range(66640, 66688), - "Osmanya": range(66688, 66736), - "Osage": range(66736, 66816), - "Elbasan": range(66816, 66864), - "Caucasian Albanian": range(66864, 66928), - "Vithkuqi": range(66928, 67008), - "Linear A": range(67072, 67456), - "Latin Extended-F": range(67456, 67520), - "Cypriot Syllabary": range(67584, 67648), - "Imperial Aramaic": range(67648, 67680), - "Palmyrene": range(67680, 67712), - "Nabataean": range(67712, 67760), - "Hatran": range(67808, 67840), - "Phoenician": range(67840, 67872), - "Lydian": range(67872, 67904), - "Meroitic Hieroglyphs": range(67968, 68000), - "Meroitic Cursive": range(68000, 68096), - "Kharoshthi": range(68096, 68192), - "Old South Arabian": range(68192, 68224), - "Old North Arabian": range(68224, 68256), - "Manichaean": range(68288, 68352), - "Avestan": range(68352, 68416), - "Inscriptional Parthian": range(68416, 68448), - "Inscriptional Pahlavi": range(68448, 68480), - "Psalter Pahlavi": range(68480, 68528), - "Old Turkic": range(68608, 68688), - "Old Hungarian": range(68736, 68864), - "Hanifi Rohingya": range(68864, 68928), - "Rumi Numeral Symbols": range(69216, 69248), - "Yezidi": range(69248, 69312), - "Arabic Extended-C": range(69312, 69376), - "Old Sogdian": range(69376, 69424), - "Sogdian": range(69424, 69488), - "Old Uyghur": range(69488, 69552), - "Chorasmian": range(69552, 69600), - "Elymaic": range(69600, 69632), - "Brahmi": range(69632, 69760), - "Kaithi": range(69760, 69840), - "Sora Sompeng": range(69840, 69888), - "Chakma": range(69888, 69968), - "Mahajani": range(69968, 70016), - "Sharada": range(70016, 70112), - "Sinhala Archaic Numbers": range(70112, 70144), - "Khojki": range(70144, 70224), - "Multani": range(70272, 70320), - "Khudawadi": range(70320, 70400), - "Grantha": range(70400, 70528), - "Newa": range(70656, 70784), - "Tirhuta": range(70784, 70880), - "Siddham": range(71040, 71168), - "Modi": range(71168, 71264), - "Mongolian Supplement": range(71264, 71296), - "Takri": range(71296, 71376), - "Ahom": range(71424, 71504), - "Dogra": range(71680, 71760), - "Warang Citi": range(71840, 71936), - "Dives Akuru": range(71936, 72032), - "Nandinagari": range(72096, 72192), - "Zanabazar Square": range(72192, 72272), - "Soyombo": range(72272, 72368), - "Unified Canadian Aboriginal Syllabics Extended-A": range(72368, 72384), - "Pau Cin Hau": range(72384, 72448), - "Devanagari Extended-A": range(72448, 72544), - "Bhaiksuki": range(72704, 72816), - "Marchen": range(72816, 72896), - "Masaram Gondi": range(72960, 73056), - "Gunjala Gondi": range(73056, 73136), - "Makasar": range(73440, 73472), - "Kawi": range(73472, 73568), - "Lisu Supplement": range(73648, 73664), - "Tamil Supplement": range(73664, 73728), - "Cuneiform": range(73728, 74752), - "Cuneiform Numbers and Punctuation": range(74752, 74880), - "Early Dynastic Cuneiform": range(74880, 75088), - "Cypro-Minoan": range(77712, 77824), - "Egyptian Hieroglyphs": range(77824, 78896), - "Egyptian Hieroglyph Format Controls": range(78896, 78944), - "Anatolian Hieroglyphs": range(82944, 83584), - "Bamum Supplement": range(92160, 92736), - "Mro": range(92736, 92784), - "Tangsa": range(92784, 92880), - "Bassa Vah": range(92880, 92928), - "Pahawh Hmong": range(92928, 93072), - "Medefaidrin": range(93760, 93856), - "Miao": range(93952, 94112), - "Ideographic Symbols and Punctuation": range(94176, 94208), - "Tangut": range(94208, 100352), - "Tangut Components": range(100352, 101120), - "Khitan Small Script": range(101120, 101632), - "Tangut Supplement": range(101632, 101760), - "Kana Extended-B": range(110576, 110592), - "Kana Supplement": range(110592, 110848), - "Kana Extended-A": range(110848, 110896), - "Small Kana Extension": range(110896, 110960), - "Nushu": range(110960, 111360), - "Duployan": range(113664, 113824), - "Shorthand Format Controls": range(113824, 113840), - "Znamenny Musical Notation": range(118528, 118736), - "Byzantine Musical Symbols": range(118784, 119040), - "Musical Symbols": range(119040, 119296), - "Ancient Greek Musical Notation": range(119296, 119376), - "Kaktovik Numerals": range(119488, 119520), - "Mayan Numerals": range(119520, 119552), - "Tai Xuan Jing Symbols": range(119552, 119648), - "Counting Rod Numerals": range(119648, 119680), - "Mathematical Alphanumeric Symbols": range(119808, 120832), - "Sutton SignWriting": range(120832, 121520), - "Latin Extended-G": range(122624, 122880), - "Glagolitic Supplement": range(122880, 122928), - "Cyrillic Extended-D": range(122928, 123024), - "Nyiakeng Puachue Hmong": range(123136, 123216), - "Toto": range(123536, 123584), - "Wancho": range(123584, 123648), - "Nag Mundari": range(124112, 124160), - "Ethiopic Extended-B": range(124896, 124928), - "Mende Kikakui": range(124928, 125152), - "Adlam": range(125184, 125280), - "Indic Siyaq Numbers": range(126064, 126144), - "Ottoman Siyaq Numbers": range(126208, 126288), - "Arabic Mathematical Alphabetic Symbols": range(126464, 126720), - "Mahjong Tiles": range(126976, 127024), - "Domino Tiles": range(127024, 127136), - "Playing Cards": range(127136, 127232), - "Enclosed Alphanumeric Supplement": range(127232, 127488), - "Enclosed Ideographic Supplement": range(127488, 127744), - "Miscellaneous Symbols and Pictographs": range(127744, 128512), - "Emoticons range(Emoji)": range(128512, 128592), - "Ornamental Dingbats": range(128592, 128640), - "Transport and Map Symbols": range(128640, 128768), - "Alchemical Symbols": range(128768, 128896), - "Geometric Shapes Extended": range(128896, 129024), - "Supplemental Arrows-C": range(129024, 129280), - "Supplemental Symbols and Pictographs": range(129280, 129536), - "Chess Symbols": range(129536, 129648), - "Symbols and Pictographs Extended-A": range(129648, 129792), - "Symbols for Legacy Computing": range(129792, 130048), - "CJK Unified Ideographs Extension B": range(131072, 173792), - "CJK Unified Ideographs Extension C": range(173824, 177984), - "CJK Unified Ideographs Extension D": range(177984, 178208), - "CJK Unified Ideographs Extension E": range(178208, 183984), - "CJK Unified Ideographs Extension F": range(183984, 191472), - "CJK Compatibility Ideographs Supplement": range(194560, 195104), - "CJK Unified Ideographs Extension G": range(196608, 201552), - "CJK Unified Ideographs Extension H": range(201552, 205744), - "Tags": range(917504, 917632), - "Variation Selectors Supplement": range(917760, 918000), - "Supplementary Private Use Area-A": range(983040, 1048576), - "Supplementary Private Use Area-B": range(1048576, 1114112), -} - - -UNICODE_SECONDARY_RANGE_KEYWORD: List[str] = [ - "Supplement", - "Extended", - "Extensions", - "Modifier", - "Marks", - "Punctuation", - "Symbols", - "Forms", - "Operators", - "Miscellaneous", - "Drawing", - "Block", - "Shapes", - "Supplemental", - "Tags", -] - -RE_POSSIBLE_ENCODING_INDICATION = re_compile( - r"(?:(?:encoding)|(?:charset)|(?:coding))(?:[\:= ]{1,10})(?:[\"\']?)([a-zA-Z0-9\-_]+)(?:[\"\']?)", - IGNORECASE, -) - -IANA_NO_ALIASES = [ - "cp720", - "cp737", - "cp856", - "cp874", - "cp875", - "cp1006", - "koi8_r", - "koi8_t", - "koi8_u", -] - -IANA_SUPPORTED: List[str] = sorted( - filter( - lambda x: x.endswith("_codec") is False - and x not in {"rot_13", "tactis", "mbcs"}, - list(set(aliases.values())) + IANA_NO_ALIASES, - ) -) - -IANA_SUPPORTED_COUNT: int = len(IANA_SUPPORTED) - -# pre-computed code page that are similar using the function cp_similarity. -IANA_SUPPORTED_SIMILAR: Dict[str, List[str]] = { - "cp037": ["cp1026", "cp1140", "cp273", "cp500"], - "cp1026": ["cp037", "cp1140", "cp273", "cp500"], - "cp1125": ["cp866"], - "cp1140": ["cp037", "cp1026", "cp273", "cp500"], - "cp1250": ["iso8859_2"], - "cp1251": ["kz1048", "ptcp154"], - "cp1252": ["iso8859_15", "iso8859_9", "latin_1"], - "cp1253": ["iso8859_7"], - "cp1254": ["iso8859_15", "iso8859_9", "latin_1"], - "cp1257": ["iso8859_13"], - "cp273": ["cp037", "cp1026", "cp1140", "cp500"], - "cp437": ["cp850", "cp858", "cp860", "cp861", "cp862", "cp863", "cp865"], - "cp500": ["cp037", "cp1026", "cp1140", "cp273"], - "cp850": ["cp437", "cp857", "cp858", "cp865"], - "cp857": ["cp850", "cp858", "cp865"], - "cp858": ["cp437", "cp850", "cp857", "cp865"], - "cp860": ["cp437", "cp861", "cp862", "cp863", "cp865"], - "cp861": ["cp437", "cp860", "cp862", "cp863", "cp865"], - "cp862": ["cp437", "cp860", "cp861", "cp863", "cp865"], - "cp863": ["cp437", "cp860", "cp861", "cp862", "cp865"], - "cp865": ["cp437", "cp850", "cp857", "cp858", "cp860", "cp861", "cp862", "cp863"], - "cp866": ["cp1125"], - "iso8859_10": ["iso8859_14", "iso8859_15", "iso8859_4", "iso8859_9", "latin_1"], - "iso8859_11": ["tis_620"], - "iso8859_13": ["cp1257"], - "iso8859_14": [ - "iso8859_10", - "iso8859_15", - "iso8859_16", - "iso8859_3", - "iso8859_9", - "latin_1", - ], - "iso8859_15": [ - "cp1252", - "cp1254", - "iso8859_10", - "iso8859_14", - "iso8859_16", - "iso8859_3", - "iso8859_9", - "latin_1", - ], - "iso8859_16": [ - "iso8859_14", - "iso8859_15", - "iso8859_2", - "iso8859_3", - "iso8859_9", - "latin_1", - ], - "iso8859_2": ["cp1250", "iso8859_16", "iso8859_4"], - "iso8859_3": ["iso8859_14", "iso8859_15", "iso8859_16", "iso8859_9", "latin_1"], - "iso8859_4": ["iso8859_10", "iso8859_2", "iso8859_9", "latin_1"], - "iso8859_7": ["cp1253"], - "iso8859_9": [ - "cp1252", - "cp1254", - "cp1258", - "iso8859_10", - "iso8859_14", - "iso8859_15", - "iso8859_16", - "iso8859_3", - "iso8859_4", - "latin_1", - ], - "kz1048": ["cp1251", "ptcp154"], - "latin_1": [ - "cp1252", - "cp1254", - "cp1258", - "iso8859_10", - "iso8859_14", - "iso8859_15", - "iso8859_16", - "iso8859_3", - "iso8859_4", - "iso8859_9", - ], - "mac_iceland": ["mac_roman", "mac_turkish"], - "mac_roman": ["mac_iceland", "mac_turkish"], - "mac_turkish": ["mac_iceland", "mac_roman"], - "ptcp154": ["cp1251", "kz1048"], - "tis_620": ["iso8859_11"], -} - - -CHARDET_CORRESPONDENCE: Dict[str, str] = { - "iso2022_kr": "ISO-2022-KR", - "iso2022_jp": "ISO-2022-JP", - "euc_kr": "EUC-KR", - "tis_620": "TIS-620", - "utf_32": "UTF-32", - "euc_jp": "EUC-JP", - "koi8_r": "KOI8-R", - "iso8859_1": "ISO-8859-1", - "iso8859_2": "ISO-8859-2", - "iso8859_5": "ISO-8859-5", - "iso8859_6": "ISO-8859-6", - "iso8859_7": "ISO-8859-7", - "iso8859_8": "ISO-8859-8", - "utf_16": "UTF-16", - "cp855": "IBM855", - "mac_cyrillic": "MacCyrillic", - "gb2312": "GB2312", - "gb18030": "GB18030", - "cp932": "CP932", - "cp866": "IBM866", - "utf_8": "utf-8", - "utf_8_sig": "UTF-8-SIG", - "shift_jis": "SHIFT_JIS", - "big5": "Big5", - "cp1250": "windows-1250", - "cp1251": "windows-1251", - "cp1252": "Windows-1252", - "cp1253": "windows-1253", - "cp1255": "windows-1255", - "cp1256": "windows-1256", - "cp1254": "Windows-1254", - "cp949": "CP949", -} - - -COMMON_SAFE_ASCII_CHARACTERS: Set[str] = { - "<", - ">", - "=", - ":", - "/", - "&", - ";", - "{", - "}", - "[", - "]", - ",", - "|", - '"', - "-", - "(", - ")", -} - - -KO_NAMES: Set[str] = {"johab", "cp949", "euc_kr"} -ZH_NAMES: Set[str] = {"big5", "cp950", "big5hkscs", "hz"} - -# Logging LEVEL below DEBUG -TRACE: int = 5 - - -# Language label that contain the em dash "—" -# character are to be considered alternative seq to origin -FREQUENCIES: Dict[str, List[str]] = { - "English": [ - "e", - "a", - "t", - "i", - "o", - "n", - "s", - "r", - "h", - "l", - "d", - "c", - "u", - "m", - "f", - "p", - "g", - "w", - "y", - "b", - "v", - "k", - "x", - "j", - "z", - "q", - ], - "English—": [ - "e", - "a", - "t", - "i", - "o", - "n", - "s", - "r", - "h", - "l", - "d", - "c", - "m", - "u", - "f", - "p", - "g", - "w", - "b", - "y", - "v", - "k", - "j", - "x", - "z", - "q", - ], - "German": [ - "e", - "n", - "i", - "r", - "s", - "t", - "a", - "d", - "h", - "u", - "l", - "g", - "o", - "c", - "m", - "b", - "f", - "k", - "w", - "z", - "p", - "v", - "ü", - "ä", - "ö", - "j", - ], - "French": [ - "e", - "a", - "s", - "n", - "i", - "t", - "r", - "l", - "u", - "o", - "d", - "c", - "p", - "m", - "é", - "v", - "g", - "f", - "b", - "h", - "q", - "à", - "x", - "è", - "y", - "j", - ], - "Dutch": [ - "e", - "n", - "a", - "i", - "r", - "t", - "o", - "d", - "s", - "l", - "g", - "h", - "v", - "m", - "u", - "k", - "c", - "p", - "b", - "w", - "j", - "z", - "f", - "y", - "x", - "ë", - ], - "Italian": [ - "e", - "i", - "a", - "o", - "n", - "l", - "t", - "r", - "s", - "c", - "d", - "u", - "p", - "m", - "g", - "v", - "f", - "b", - "z", - "h", - "q", - "è", - "à", - "k", - "y", - "ò", - ], - "Polish": [ - "a", - "i", - "o", - "e", - "n", - "r", - "z", - "w", - "s", - "c", - "t", - "k", - "y", - "d", - "p", - "m", - "u", - "l", - "j", - "ł", - "g", - "b", - "h", - "ą", - "ę", - "ó", - ], - "Spanish": [ - "e", - "a", - "o", - "n", - "s", - "r", - "i", - "l", - "d", - "t", - "c", - "u", - "m", - "p", - "b", - "g", - "v", - "f", - "y", - "ó", - "h", - "q", - "í", - "j", - "z", - "á", - ], - "Russian": [ - "о", - "а", - "е", - "и", - "н", - "с", - "т", - "р", - "в", - "л", - "к", - "м", - "д", - "п", - "у", - "г", - "я", - "ы", - "з", - "б", - "й", - "ь", - "ч", - "х", - "ж", - "ц", - ], - # Jap-Kanji - "Japanese": [ - "人", - "一", - "大", - "亅", - "丁", - "丨", - "竹", - "笑", - "口", - "日", - "今", - "二", - "彳", - "行", - "十", - "土", - "丶", - "寸", - "寺", - "時", - "乙", - "丿", - "乂", - "气", - "気", - "冂", - "巾", - "亠", - "市", - "目", - "儿", - "見", - "八", - "小", - "凵", - "県", - "月", - "彐", - "門", - "間", - "木", - "東", - "山", - "出", - "本", - "中", - "刀", - "分", - "耳", - "又", - "取", - "最", - "言", - "田", - "心", - "思", - "刂", - "前", - "京", - "尹", - "事", - "生", - "厶", - "云", - "会", - "未", - "来", - "白", - "冫", - "楽", - "灬", - "馬", - "尸", - "尺", - "駅", - "明", - "耂", - "者", - "了", - "阝", - "都", - "高", - "卜", - "占", - "厂", - "广", - "店", - "子", - "申", - "奄", - "亻", - "俺", - "上", - "方", - "冖", - "学", - "衣", - "艮", - "食", - "自", - ], - # Jap-Katakana - "Japanese—": [ - "ー", - "ン", - "ス", - "・", - "ル", - "ト", - "リ", - "イ", - "ア", - "ラ", - "ッ", - "ク", - "ド", - "シ", - "レ", - "ジ", - "タ", - "フ", - "ロ", - "カ", - "テ", - "マ", - "ィ", - "グ", - "バ", - "ム", - "プ", - "オ", - "コ", - "デ", - "ニ", - "ウ", - "メ", - "サ", - "ビ", - "ナ", - "ブ", - "ャ", - "エ", - "ュ", - "チ", - "キ", - "ズ", - "ダ", - "パ", - "ミ", - "ェ", - "ョ", - "ハ", - "セ", - "ベ", - "ガ", - "モ", - "ツ", - "ネ", - "ボ", - "ソ", - "ノ", - "ァ", - "ヴ", - "ワ", - "ポ", - "ペ", - "ピ", - "ケ", - "ゴ", - "ギ", - "ザ", - "ホ", - "ゲ", - "ォ", - "ヤ", - "ヒ", - "ユ", - "ヨ", - "ヘ", - "ゼ", - "ヌ", - "ゥ", - "ゾ", - "ヶ", - "ヂ", - "ヲ", - "ヅ", - "ヵ", - "ヱ", - "ヰ", - "ヮ", - "ヽ", - "゠", - "ヾ", - "ヷ", - "ヿ", - "ヸ", - "ヹ", - "ヺ", - ], - # Jap-Hiragana - "Japanese——": [ - "の", - "に", - "る", - "た", - "と", - "は", - "し", - "い", - "を", - "で", - "て", - "が", - "な", - "れ", - "か", - "ら", - "さ", - "っ", - "り", - "す", - "あ", - "も", - "こ", - "ま", - "う", - "く", - "よ", - "き", - "ん", - "め", - "お", - "け", - "そ", - "つ", - "だ", - "や", - "え", - "ど", - "わ", - "ち", - "み", - "せ", - "じ", - "ば", - "へ", - "び", - "ず", - "ろ", - "ほ", - "げ", - "む", - "べ", - "ひ", - "ょ", - "ゆ", - "ぶ", - "ご", - "ゃ", - "ね", - "ふ", - "ぐ", - "ぎ", - "ぼ", - "ゅ", - "づ", - "ざ", - "ぞ", - "ぬ", - "ぜ", - "ぱ", - "ぽ", - "ぷ", - "ぴ", - "ぃ", - "ぁ", - "ぇ", - "ぺ", - "ゞ", - "ぢ", - "ぉ", - "ぅ", - "ゐ", - "ゝ", - "ゑ", - "゛", - "゜", - "ゎ", - "ゔ", - "゚", - "ゟ", - "゙", - "ゕ", - "ゖ", - ], - "Portuguese": [ - "a", - "e", - "o", - "s", - "i", - "r", - "d", - "n", - "t", - "m", - "u", - "c", - "l", - "p", - "g", - "v", - "b", - "f", - "h", - "ã", - "q", - "é", - "ç", - "á", - "z", - "í", - ], - "Swedish": [ - "e", - "a", - "n", - "r", - "t", - "s", - "i", - "l", - "d", - "o", - "m", - "k", - "g", - "v", - "h", - "f", - "u", - "p", - "ä", - "c", - "b", - "ö", - "å", - "y", - "j", - "x", - ], - "Chinese": [ - "的", - "一", - "是", - "不", - "了", - "在", - "人", - "有", - "我", - "他", - "这", - "个", - "们", - "中", - "来", - "上", - "大", - "为", - "和", - "国", - "地", - "到", - "以", - "说", - "时", - "要", - "就", - "出", - "会", - "可", - "也", - "你", - "对", - "生", - "能", - "而", - "子", - "那", - "得", - "于", - "着", - "下", - "自", - "之", - "年", - "过", - "发", - "后", - "作", - "里", - "用", - "道", - "行", - "所", - "然", - "家", - "种", - "事", - "成", - "方", - "多", - "经", - "么", - "去", - "法", - "学", - "如", - "都", - "同", - "现", - "当", - "没", - "动", - "面", - "起", - "看", - "定", - "天", - "分", - "还", - "进", - "好", - "小", - "部", - "其", - "些", - "主", - "样", - "理", - "心", - "她", - "本", - "前", - "开", - "但", - "因", - "只", - "从", - "想", - "实", - ], - "Ukrainian": [ - "о", - "а", - "н", - "і", - "и", - "р", - "в", - "т", - "е", - "с", - "к", - "л", - "у", - "д", - "м", - "п", - "з", - "я", - "ь", - "б", - "г", - "й", - "ч", - "х", - "ц", - "ї", - ], - "Norwegian": [ - "e", - "r", - "n", - "t", - "a", - "s", - "i", - "o", - "l", - "d", - "g", - "k", - "m", - "v", - "f", - "p", - "u", - "b", - "h", - "å", - "y", - "j", - "ø", - "c", - "æ", - "w", - ], - "Finnish": [ - "a", - "i", - "n", - "t", - "e", - "s", - "l", - "o", - "u", - "k", - "ä", - "m", - "r", - "v", - "j", - "h", - "p", - "y", - "d", - "ö", - "g", - "c", - "b", - "f", - "w", - "z", - ], - "Vietnamese": [ - "n", - "h", - "t", - "i", - "c", - "g", - "a", - "o", - "u", - "m", - "l", - "r", - "à", - "đ", - "s", - "e", - "v", - "p", - "b", - "y", - "ư", - "d", - "á", - "k", - "ộ", - "ế", - ], - "Czech": [ - "o", - "e", - "a", - "n", - "t", - "s", - "i", - "l", - "v", - "r", - "k", - "d", - "u", - "m", - "p", - "í", - "c", - "h", - "z", - "á", - "y", - "j", - "b", - "ě", - "é", - "ř", - ], - "Hungarian": [ - "e", - "a", - "t", - "l", - "s", - "n", - "k", - "r", - "i", - "o", - "z", - "á", - "é", - "g", - "m", - "b", - "y", - "v", - "d", - "h", - "u", - "p", - "j", - "ö", - "f", - "c", - ], - "Korean": [ - "이", - "다", - "에", - "의", - "는", - "로", - "하", - "을", - "가", - "고", - "지", - "서", - "한", - "은", - "기", - "으", - "년", - "대", - "사", - "시", - "를", - "리", - "도", - "인", - "스", - "일", - ], - "Indonesian": [ - "a", - "n", - "e", - "i", - "r", - "t", - "u", - "s", - "d", - "k", - "m", - "l", - "g", - "p", - "b", - "o", - "h", - "y", - "j", - "c", - "w", - "f", - "v", - "z", - "x", - "q", - ], - "Turkish": [ - "a", - "e", - "i", - "n", - "r", - "l", - "ı", - "k", - "d", - "t", - "s", - "m", - "y", - "u", - "o", - "b", - "ü", - "ş", - "v", - "g", - "z", - "h", - "c", - "p", - "ç", - "ğ", - ], - "Romanian": [ - "e", - "i", - "a", - "r", - "n", - "t", - "u", - "l", - "o", - "c", - "s", - "d", - "p", - "m", - "ă", - "f", - "v", - "î", - "g", - "b", - "ș", - "ț", - "z", - "h", - "â", - "j", - ], - "Farsi": [ - "ا", - "ی", - "ر", - "د", - "ن", - "ه", - "و", - "م", - "ت", - "ب", - "س", - "ل", - "ک", - "ش", - "ز", - "ف", - "گ", - "ع", - "خ", - "ق", - "ج", - "آ", - "پ", - "ح", - "ط", - "ص", - ], - "Arabic": [ - "ا", - "ل", - "ي", - "م", - "و", - "ن", - "ر", - "ت", - "ب", - "ة", - "ع", - "د", - "س", - "ف", - "ه", - "ك", - "ق", - "أ", - "ح", - "ج", - "ش", - "ط", - "ص", - "ى", - "خ", - "إ", - ], - "Danish": [ - "e", - "r", - "n", - "t", - "a", - "i", - "s", - "d", - "l", - "o", - "g", - "m", - "k", - "f", - "v", - "u", - "b", - "h", - "p", - "å", - "y", - "ø", - "æ", - "c", - "j", - "w", - ], - "Serbian": [ - "а", - "и", - "о", - "е", - "н", - "р", - "с", - "у", - "т", - "к", - "ј", - "в", - "д", - "м", - "п", - "л", - "г", - "з", - "б", - "a", - "i", - "e", - "o", - "n", - "ц", - "ш", - ], - "Lithuanian": [ - "i", - "a", - "s", - "o", - "r", - "e", - "t", - "n", - "u", - "k", - "m", - "l", - "p", - "v", - "d", - "j", - "g", - "ė", - "b", - "y", - "ų", - "š", - "ž", - "c", - "ą", - "į", - ], - "Slovene": [ - "e", - "a", - "i", - "o", - "n", - "r", - "s", - "l", - "t", - "j", - "v", - "k", - "d", - "p", - "m", - "u", - "z", - "b", - "g", - "h", - "č", - "c", - "š", - "ž", - "f", - "y", - ], - "Slovak": [ - "o", - "a", - "e", - "n", - "i", - "r", - "v", - "t", - "s", - "l", - "k", - "d", - "m", - "p", - "u", - "c", - "h", - "j", - "b", - "z", - "á", - "y", - "ý", - "í", - "č", - "é", - ], - "Hebrew": [ - "י", - "ו", - "ה", - "ל", - "ר", - "ב", - "ת", - "מ", - "א", - "ש", - "נ", - "ע", - "ם", - "ד", - "ק", - "ח", - "פ", - "ס", - "כ", - "ג", - "ט", - "צ", - "ן", - "ז", - "ך", - ], - "Bulgarian": [ - "а", - "и", - "о", - "е", - "н", - "т", - "р", - "с", - "в", - "л", - "к", - "д", - "п", - "м", - "з", - "г", - "я", - "ъ", - "у", - "б", - "ч", - "ц", - "й", - "ж", - "щ", - "х", - ], - "Croatian": [ - "a", - "i", - "o", - "e", - "n", - "r", - "j", - "s", - "t", - "u", - "k", - "l", - "v", - "d", - "m", - "p", - "g", - "z", - "b", - "c", - "č", - "h", - "š", - "ž", - "ć", - "f", - ], - "Hindi": [ - "क", - "र", - "स", - "न", - "त", - "म", - "ह", - "प", - "य", - "ल", - "व", - "ज", - "द", - "ग", - "ब", - "श", - "ट", - "अ", - "ए", - "थ", - "भ", - "ड", - "च", - "ध", - "ष", - "इ", - ], - "Estonian": [ - "a", - "i", - "e", - "s", - "t", - "l", - "u", - "n", - "o", - "k", - "r", - "d", - "m", - "v", - "g", - "p", - "j", - "h", - "ä", - "b", - "õ", - "ü", - "f", - "c", - "ö", - "y", - ], - "Thai": [ - "า", - "น", - "ร", - "อ", - "ก", - "เ", - "ง", - "ม", - "ย", - "ล", - "ว", - "ด", - "ท", - "ส", - "ต", - "ะ", - "ป", - "บ", - "ค", - "ห", - "แ", - "จ", - "พ", - "ช", - "ข", - "ใ", - ], - "Greek": [ - "α", - "τ", - "ο", - "ι", - "ε", - "ν", - "ρ", - "σ", - "κ", - "η", - "π", - "ς", - "υ", - "μ", - "λ", - "ί", - "ό", - "ά", - "γ", - "έ", - "δ", - "ή", - "ω", - "χ", - "θ", - "ύ", - ], - "Tamil": [ - "க", - "த", - "ப", - "ட", - "ர", - "ம", - "ல", - "ன", - "வ", - "ற", - "ய", - "ள", - "ச", - "ந", - "இ", - "ண", - "அ", - "ஆ", - "ழ", - "ங", - "எ", - "உ", - "ஒ", - "ஸ", - ], - "Kazakh": [ - "а", - "ы", - "е", - "н", - "т", - "р", - "л", - "і", - "д", - "с", - "м", - "қ", - "к", - "о", - "б", - "и", - "у", - "ғ", - "ж", - "ң", - "з", - "ш", - "й", - "п", - "г", - "ө", - ], -} - -LANGUAGE_SUPPORTED_COUNT: int = len(FREQUENCIES) diff --git a/env/lib/python3.10/site-packages/charset_normalizer/legacy.py b/env/lib/python3.10/site-packages/charset_normalizer/legacy.py deleted file mode 100644 index 3f6d490..0000000 --- a/env/lib/python3.10/site-packages/charset_normalizer/legacy.py +++ /dev/null @@ -1,65 +0,0 @@ -from __future__ import annotations - -from typing import TYPE_CHECKING, Any, Optional -from warnings import warn - -from .api import from_bytes -from .constant import CHARDET_CORRESPONDENCE - -# TODO: remove this check when dropping Python 3.7 support -if TYPE_CHECKING: - from typing_extensions import TypedDict - - class ResultDict(TypedDict): - encoding: Optional[str] - language: str - confidence: Optional[float] - - -def detect( - byte_str: bytes, should_rename_legacy: bool = False, **kwargs: Any -) -> ResultDict: - """ - chardet legacy method - Detect the encoding of the given byte string. It should be mostly backward-compatible. - Encoding name will match Chardet own writing whenever possible. (Not on encoding name unsupported by it) - This function is deprecated and should be used to migrate your project easily, consult the documentation for - further information. Not planned for removal. - - :param byte_str: The byte sequence to examine. - :param should_rename_legacy: Should we rename legacy encodings - to their more modern equivalents? - """ - if len(kwargs): - warn( - f"charset-normalizer disregard arguments '{','.join(list(kwargs.keys()))}' in legacy function detect()" - ) - - if not isinstance(byte_str, (bytearray, bytes)): - raise TypeError( # pragma: nocover - "Expected object of type bytes or bytearray, got: " - "{0}".format(type(byte_str)) - ) - - if isinstance(byte_str, bytearray): - byte_str = bytes(byte_str) - - r = from_bytes(byte_str).best() - - encoding = r.encoding if r is not None else None - language = r.language if r is not None and r.language != "Unknown" else "" - confidence = 1.0 - r.chaos if r is not None else None - - # Note: CharsetNormalizer does not return 'UTF-8-SIG' as the sig get stripped in the detection/normalization process - # but chardet does return 'utf-8-sig' and it is a valid codec name. - if r is not None and encoding == "utf_8" and r.bom: - encoding += "_sig" - - if should_rename_legacy is False and encoding in CHARDET_CORRESPONDENCE: - encoding = CHARDET_CORRESPONDENCE[encoding] - - return { - "encoding": encoding, - "language": language, - "confidence": confidence, - } diff --git a/env/lib/python3.10/site-packages/charset_normalizer/md.cpython-310-x86_64-linux-gnu.so b/env/lib/python3.10/site-packages/charset_normalizer/md.cpython-310-x86_64-linux-gnu.so deleted file mode 100755 index 3824a42..0000000 Binary files a/env/lib/python3.10/site-packages/charset_normalizer/md.cpython-310-x86_64-linux-gnu.so and /dev/null differ diff --git a/env/lib/python3.10/site-packages/charset_normalizer/md.py b/env/lib/python3.10/site-packages/charset_normalizer/md.py deleted file mode 100644 index d834db0..0000000 --- a/env/lib/python3.10/site-packages/charset_normalizer/md.py +++ /dev/null @@ -1,628 +0,0 @@ -from functools import lru_cache -from logging import getLogger -from typing import List, Optional - -from .constant import ( - COMMON_SAFE_ASCII_CHARACTERS, - TRACE, - UNICODE_SECONDARY_RANGE_KEYWORD, -) -from .utils import ( - is_accentuated, - is_arabic, - is_arabic_isolated_form, - is_case_variable, - is_cjk, - is_emoticon, - is_hangul, - is_hiragana, - is_katakana, - is_latin, - is_punctuation, - is_separator, - is_symbol, - is_thai, - is_unprintable, - remove_accent, - unicode_range, -) - - -class MessDetectorPlugin: - """ - Base abstract class used for mess detection plugins. - All detectors MUST extend and implement given methods. - """ - - def eligible(self, character: str) -> bool: - """ - Determine if given character should be fed in. - """ - raise NotImplementedError # pragma: nocover - - def feed(self, character: str) -> None: - """ - The main routine to be executed upon character. - Insert the logic in witch the text would be considered chaotic. - """ - raise NotImplementedError # pragma: nocover - - def reset(self) -> None: # pragma: no cover - """ - Permit to reset the plugin to the initial state. - """ - raise NotImplementedError - - @property - def ratio(self) -> float: - """ - Compute the chaos ratio based on what your feed() has seen. - Must NOT be lower than 0.; No restriction gt 0. - """ - raise NotImplementedError # pragma: nocover - - -class TooManySymbolOrPunctuationPlugin(MessDetectorPlugin): - def __init__(self) -> None: - self._punctuation_count: int = 0 - self._symbol_count: int = 0 - self._character_count: int = 0 - - self._last_printable_char: Optional[str] = None - self._frenzy_symbol_in_word: bool = False - - def eligible(self, character: str) -> bool: - return character.isprintable() - - def feed(self, character: str) -> None: - self._character_count += 1 - - if ( - character != self._last_printable_char - and character not in COMMON_SAFE_ASCII_CHARACTERS - ): - if is_punctuation(character): - self._punctuation_count += 1 - elif ( - character.isdigit() is False - and is_symbol(character) - and is_emoticon(character) is False - ): - self._symbol_count += 2 - - self._last_printable_char = character - - def reset(self) -> None: # pragma: no cover - self._punctuation_count = 0 - self._character_count = 0 - self._symbol_count = 0 - - @property - def ratio(self) -> float: - if self._character_count == 0: - return 0.0 - - ratio_of_punctuation: float = ( - self._punctuation_count + self._symbol_count - ) / self._character_count - - return ratio_of_punctuation if ratio_of_punctuation >= 0.3 else 0.0 - - -class TooManyAccentuatedPlugin(MessDetectorPlugin): - def __init__(self) -> None: - self._character_count: int = 0 - self._accentuated_count: int = 0 - - def eligible(self, character: str) -> bool: - return character.isalpha() - - def feed(self, character: str) -> None: - self._character_count += 1 - - if is_accentuated(character): - self._accentuated_count += 1 - - def reset(self) -> None: # pragma: no cover - self._character_count = 0 - self._accentuated_count = 0 - - @property - def ratio(self) -> float: - if self._character_count < 8: - return 0.0 - - ratio_of_accentuation: float = self._accentuated_count / self._character_count - return ratio_of_accentuation if ratio_of_accentuation >= 0.35 else 0.0 - - -class UnprintablePlugin(MessDetectorPlugin): - def __init__(self) -> None: - self._unprintable_count: int = 0 - self._character_count: int = 0 - - def eligible(self, character: str) -> bool: - return True - - def feed(self, character: str) -> None: - if is_unprintable(character): - self._unprintable_count += 1 - self._character_count += 1 - - def reset(self) -> None: # pragma: no cover - self._unprintable_count = 0 - - @property - def ratio(self) -> float: - if self._character_count == 0: - return 0.0 - - return (self._unprintable_count * 8) / self._character_count - - -class SuspiciousDuplicateAccentPlugin(MessDetectorPlugin): - def __init__(self) -> None: - self._successive_count: int = 0 - self._character_count: int = 0 - - self._last_latin_character: Optional[str] = None - - def eligible(self, character: str) -> bool: - return character.isalpha() and is_latin(character) - - def feed(self, character: str) -> None: - self._character_count += 1 - if ( - self._last_latin_character is not None - and is_accentuated(character) - and is_accentuated(self._last_latin_character) - ): - if character.isupper() and self._last_latin_character.isupper(): - self._successive_count += 1 - # Worse if its the same char duplicated with different accent. - if remove_accent(character) == remove_accent(self._last_latin_character): - self._successive_count += 1 - self._last_latin_character = character - - def reset(self) -> None: # pragma: no cover - self._successive_count = 0 - self._character_count = 0 - self._last_latin_character = None - - @property - def ratio(self) -> float: - if self._character_count == 0: - return 0.0 - - return (self._successive_count * 2) / self._character_count - - -class SuspiciousRange(MessDetectorPlugin): - def __init__(self) -> None: - self._suspicious_successive_range_count: int = 0 - self._character_count: int = 0 - self._last_printable_seen: Optional[str] = None - - def eligible(self, character: str) -> bool: - return character.isprintable() - - def feed(self, character: str) -> None: - self._character_count += 1 - - if ( - character.isspace() - or is_punctuation(character) - or character in COMMON_SAFE_ASCII_CHARACTERS - ): - self._last_printable_seen = None - return - - if self._last_printable_seen is None: - self._last_printable_seen = character - return - - unicode_range_a: Optional[str] = unicode_range(self._last_printable_seen) - unicode_range_b: Optional[str] = unicode_range(character) - - if is_suspiciously_successive_range(unicode_range_a, unicode_range_b): - self._suspicious_successive_range_count += 1 - - self._last_printable_seen = character - - def reset(self) -> None: # pragma: no cover - self._character_count = 0 - self._suspicious_successive_range_count = 0 - self._last_printable_seen = None - - @property - def ratio(self) -> float: - if self._character_count <= 13: - return 0.0 - - ratio_of_suspicious_range_usage: float = ( - self._suspicious_successive_range_count * 2 - ) / self._character_count - - return ratio_of_suspicious_range_usage - - -class SuperWeirdWordPlugin(MessDetectorPlugin): - def __init__(self) -> None: - self._word_count: int = 0 - self._bad_word_count: int = 0 - self._foreign_long_count: int = 0 - - self._is_current_word_bad: bool = False - self._foreign_long_watch: bool = False - - self._character_count: int = 0 - self._bad_character_count: int = 0 - - self._buffer: str = "" - self._buffer_accent_count: int = 0 - self._buffer_glyph_count: int = 0 - - def eligible(self, character: str) -> bool: - return True - - def feed(self, character: str) -> None: - if character.isalpha(): - self._buffer += character - if is_accentuated(character): - self._buffer_accent_count += 1 - if ( - self._foreign_long_watch is False - and (is_latin(character) is False or is_accentuated(character)) - and is_cjk(character) is False - and is_hangul(character) is False - and is_katakana(character) is False - and is_hiragana(character) is False - and is_thai(character) is False - ): - self._foreign_long_watch = True - if ( - is_cjk(character) - or is_hangul(character) - or is_katakana(character) - or is_hiragana(character) - or is_thai(character) - ): - self._buffer_glyph_count += 1 - return - if not self._buffer: - return - if ( - character.isspace() or is_punctuation(character) or is_separator(character) - ) and self._buffer: - self._word_count += 1 - buffer_length: int = len(self._buffer) - - self._character_count += buffer_length - - if buffer_length >= 4: - if self._buffer_accent_count / buffer_length >= 0.5: - self._is_current_word_bad = True - # Word/Buffer ending with an upper case accentuated letter are so rare, - # that we will consider them all as suspicious. Same weight as foreign_long suspicious. - elif ( - is_accentuated(self._buffer[-1]) - and self._buffer[-1].isupper() - and all(_.isupper() for _ in self._buffer) is False - ): - self._foreign_long_count += 1 - self._is_current_word_bad = True - elif self._buffer_glyph_count == 1: - self._is_current_word_bad = True - self._foreign_long_count += 1 - if buffer_length >= 24 and self._foreign_long_watch: - camel_case_dst = [ - i - for c, i in zip(self._buffer, range(0, buffer_length)) - if c.isupper() - ] - probable_camel_cased: bool = False - - if camel_case_dst and (len(camel_case_dst) / buffer_length <= 0.3): - probable_camel_cased = True - - if not probable_camel_cased: - self._foreign_long_count += 1 - self._is_current_word_bad = True - - if self._is_current_word_bad: - self._bad_word_count += 1 - self._bad_character_count += len(self._buffer) - self._is_current_word_bad = False - - self._foreign_long_watch = False - self._buffer = "" - self._buffer_accent_count = 0 - self._buffer_glyph_count = 0 - elif ( - character not in {"<", ">", "-", "=", "~", "|", "_"} - and character.isdigit() is False - and is_symbol(character) - ): - self._is_current_word_bad = True - self._buffer += character - - def reset(self) -> None: # pragma: no cover - self._buffer = "" - self._is_current_word_bad = False - self._foreign_long_watch = False - self._bad_word_count = 0 - self._word_count = 0 - self._character_count = 0 - self._bad_character_count = 0 - self._foreign_long_count = 0 - - @property - def ratio(self) -> float: - if self._word_count <= 10 and self._foreign_long_count == 0: - return 0.0 - - return self._bad_character_count / self._character_count - - -class CjkInvalidStopPlugin(MessDetectorPlugin): - """ - GB(Chinese) based encoding often render the stop incorrectly when the content does not fit and - can be easily detected. Searching for the overuse of '丅' and '丄'. - """ - - def __init__(self) -> None: - self._wrong_stop_count: int = 0 - self._cjk_character_count: int = 0 - - def eligible(self, character: str) -> bool: - return True - - def feed(self, character: str) -> None: - if character in {"丅", "丄"}: - self._wrong_stop_count += 1 - return - if is_cjk(character): - self._cjk_character_count += 1 - - def reset(self) -> None: # pragma: no cover - self._wrong_stop_count = 0 - self._cjk_character_count = 0 - - @property - def ratio(self) -> float: - if self._cjk_character_count < 16: - return 0.0 - return self._wrong_stop_count / self._cjk_character_count - - -class ArchaicUpperLowerPlugin(MessDetectorPlugin): - def __init__(self) -> None: - self._buf: bool = False - - self._character_count_since_last_sep: int = 0 - - self._successive_upper_lower_count: int = 0 - self._successive_upper_lower_count_final: int = 0 - - self._character_count: int = 0 - - self._last_alpha_seen: Optional[str] = None - self._current_ascii_only: bool = True - - def eligible(self, character: str) -> bool: - return True - - def feed(self, character: str) -> None: - is_concerned = character.isalpha() and is_case_variable(character) - chunk_sep = is_concerned is False - - if chunk_sep and self._character_count_since_last_sep > 0: - if ( - self._character_count_since_last_sep <= 64 - and character.isdigit() is False - and self._current_ascii_only is False - ): - self._successive_upper_lower_count_final += ( - self._successive_upper_lower_count - ) - - self._successive_upper_lower_count = 0 - self._character_count_since_last_sep = 0 - self._last_alpha_seen = None - self._buf = False - self._character_count += 1 - self._current_ascii_only = True - - return - - if self._current_ascii_only is True and character.isascii() is False: - self._current_ascii_only = False - - if self._last_alpha_seen is not None: - if (character.isupper() and self._last_alpha_seen.islower()) or ( - character.islower() and self._last_alpha_seen.isupper() - ): - if self._buf is True: - self._successive_upper_lower_count += 2 - self._buf = False - else: - self._buf = True - else: - self._buf = False - - self._character_count += 1 - self._character_count_since_last_sep += 1 - self._last_alpha_seen = character - - def reset(self) -> None: # pragma: no cover - self._character_count = 0 - self._character_count_since_last_sep = 0 - self._successive_upper_lower_count = 0 - self._successive_upper_lower_count_final = 0 - self._last_alpha_seen = None - self._buf = False - self._current_ascii_only = True - - @property - def ratio(self) -> float: - if self._character_count == 0: - return 0.0 - - return self._successive_upper_lower_count_final / self._character_count - - -class ArabicIsolatedFormPlugin(MessDetectorPlugin): - def __init__(self) -> None: - self._character_count: int = 0 - self._isolated_form_count: int = 0 - - def reset(self) -> None: # pragma: no cover - self._character_count = 0 - self._isolated_form_count = 0 - - def eligible(self, character: str) -> bool: - return is_arabic(character) - - def feed(self, character: str) -> None: - self._character_count += 1 - - if is_arabic_isolated_form(character): - self._isolated_form_count += 1 - - @property - def ratio(self) -> float: - if self._character_count < 8: - return 0.0 - - isolated_form_usage: float = self._isolated_form_count / self._character_count - - return isolated_form_usage - - -@lru_cache(maxsize=1024) -def is_suspiciously_successive_range( - unicode_range_a: Optional[str], unicode_range_b: Optional[str] -) -> bool: - """ - Determine if two Unicode range seen next to each other can be considered as suspicious. - """ - if unicode_range_a is None or unicode_range_b is None: - return True - - if unicode_range_a == unicode_range_b: - return False - - if "Latin" in unicode_range_a and "Latin" in unicode_range_b: - return False - - if "Emoticons" in unicode_range_a or "Emoticons" in unicode_range_b: - return False - - # Latin characters can be accompanied with a combining diacritical mark - # eg. Vietnamese. - if ("Latin" in unicode_range_a or "Latin" in unicode_range_b) and ( - "Combining" in unicode_range_a or "Combining" in unicode_range_b - ): - return False - - keywords_range_a, keywords_range_b = unicode_range_a.split( - " " - ), unicode_range_b.split(" ") - - for el in keywords_range_a: - if el in UNICODE_SECONDARY_RANGE_KEYWORD: - continue - if el in keywords_range_b: - return False - - # Japanese Exception - range_a_jp_chars, range_b_jp_chars = ( - unicode_range_a - in ( - "Hiragana", - "Katakana", - ), - unicode_range_b in ("Hiragana", "Katakana"), - ) - if (range_a_jp_chars or range_b_jp_chars) and ( - "CJK" in unicode_range_a or "CJK" in unicode_range_b - ): - return False - if range_a_jp_chars and range_b_jp_chars: - return False - - if "Hangul" in unicode_range_a or "Hangul" in unicode_range_b: - if "CJK" in unicode_range_a or "CJK" in unicode_range_b: - return False - if unicode_range_a == "Basic Latin" or unicode_range_b == "Basic Latin": - return False - - # Chinese/Japanese use dedicated range for punctuation and/or separators. - if ("CJK" in unicode_range_a or "CJK" in unicode_range_b) or ( - unicode_range_a in ["Katakana", "Hiragana"] - and unicode_range_b in ["Katakana", "Hiragana"] - ): - if "Punctuation" in unicode_range_a or "Punctuation" in unicode_range_b: - return False - if "Forms" in unicode_range_a or "Forms" in unicode_range_b: - return False - if unicode_range_a == "Basic Latin" or unicode_range_b == "Basic Latin": - return False - - return True - - -@lru_cache(maxsize=2048) -def mess_ratio( - decoded_sequence: str, maximum_threshold: float = 0.2, debug: bool = False -) -> float: - """ - Compute a mess ratio given a decoded bytes sequence. The maximum threshold does stop the computation earlier. - """ - - detectors: List[MessDetectorPlugin] = [ - md_class() for md_class in MessDetectorPlugin.__subclasses__() - ] - - length: int = len(decoded_sequence) + 1 - - mean_mess_ratio: float = 0.0 - - if length < 512: - intermediary_mean_mess_ratio_calc: int = 32 - elif length <= 1024: - intermediary_mean_mess_ratio_calc = 64 - else: - intermediary_mean_mess_ratio_calc = 128 - - for character, index in zip(decoded_sequence + "\n", range(length)): - for detector in detectors: - if detector.eligible(character): - detector.feed(character) - - if ( - index > 0 and index % intermediary_mean_mess_ratio_calc == 0 - ) or index == length - 1: - mean_mess_ratio = sum(dt.ratio for dt in detectors) - - if mean_mess_ratio >= maximum_threshold: - break - - if debug: - logger = getLogger("charset_normalizer") - - logger.log( - TRACE, - "Mess-detector extended-analysis start. " - f"intermediary_mean_mess_ratio_calc={intermediary_mean_mess_ratio_calc} mean_mess_ratio={mean_mess_ratio} " - f"maximum_threshold={maximum_threshold}", - ) - - if len(decoded_sequence) > 16: - logger.log(TRACE, f"Starting with: {decoded_sequence[:16]}") - logger.log(TRACE, f"Ending with: {decoded_sequence[-16::]}") - - for dt in detectors: # pragma: nocover - logger.log(TRACE, f"{dt.__class__}: {dt.ratio}") - - return round(mean_mess_ratio, 3) diff --git a/env/lib/python3.10/site-packages/charset_normalizer/md__mypyc.cpython-310-x86_64-linux-gnu.so b/env/lib/python3.10/site-packages/charset_normalizer/md__mypyc.cpython-310-x86_64-linux-gnu.so deleted file mode 100755 index ae6120a..0000000 Binary files a/env/lib/python3.10/site-packages/charset_normalizer/md__mypyc.cpython-310-x86_64-linux-gnu.so and /dev/null differ diff --git a/env/lib/python3.10/site-packages/charset_normalizer/models.py b/env/lib/python3.10/site-packages/charset_normalizer/models.py deleted file mode 100644 index 6f6b86b..0000000 --- a/env/lib/python3.10/site-packages/charset_normalizer/models.py +++ /dev/null @@ -1,359 +0,0 @@ -from encodings.aliases import aliases -from hashlib import sha256 -from json import dumps -from re import sub -from typing import Any, Dict, Iterator, List, Optional, Tuple, Union - -from .constant import RE_POSSIBLE_ENCODING_INDICATION, TOO_BIG_SEQUENCE -from .utils import iana_name, is_multi_byte_encoding, unicode_range - - -class CharsetMatch: - def __init__( - self, - payload: bytes, - guessed_encoding: str, - mean_mess_ratio: float, - has_sig_or_bom: bool, - languages: "CoherenceMatches", - decoded_payload: Optional[str] = None, - preemptive_declaration: Optional[str] = None, - ): - self._payload: bytes = payload - - self._encoding: str = guessed_encoding - self._mean_mess_ratio: float = mean_mess_ratio - self._languages: CoherenceMatches = languages - self._has_sig_or_bom: bool = has_sig_or_bom - self._unicode_ranges: Optional[List[str]] = None - - self._leaves: List[CharsetMatch] = [] - self._mean_coherence_ratio: float = 0.0 - - self._output_payload: Optional[bytes] = None - self._output_encoding: Optional[str] = None - - self._string: Optional[str] = decoded_payload - - self._preemptive_declaration: Optional[str] = preemptive_declaration - - def __eq__(self, other: object) -> bool: - if not isinstance(other, CharsetMatch): - if isinstance(other, str): - return iana_name(other) == self.encoding - return False - return self.encoding == other.encoding and self.fingerprint == other.fingerprint - - def __lt__(self, other: object) -> bool: - """ - Implemented to make sorted available upon CharsetMatches items. - """ - if not isinstance(other, CharsetMatch): - raise ValueError - - chaos_difference: float = abs(self.chaos - other.chaos) - coherence_difference: float = abs(self.coherence - other.coherence) - - # Below 1% difference --> Use Coherence - if chaos_difference < 0.01 and coherence_difference > 0.02: - return self.coherence > other.coherence - elif chaos_difference < 0.01 and coherence_difference <= 0.02: - # When having a difficult decision, use the result that decoded as many multi-byte as possible. - # preserve RAM usage! - if len(self._payload) >= TOO_BIG_SEQUENCE: - return self.chaos < other.chaos - return self.multi_byte_usage > other.multi_byte_usage - - return self.chaos < other.chaos - - @property - def multi_byte_usage(self) -> float: - return 1.0 - (len(str(self)) / len(self.raw)) - - def __str__(self) -> str: - # Lazy Str Loading - if self._string is None: - self._string = str(self._payload, self._encoding, "strict") - return self._string - - def __repr__(self) -> str: - return "".format(self.encoding, self.fingerprint) - - def add_submatch(self, other: "CharsetMatch") -> None: - if not isinstance(other, CharsetMatch) or other == self: - raise ValueError( - "Unable to add instance <{}> as a submatch of a CharsetMatch".format( - other.__class__ - ) - ) - - other._string = None # Unload RAM usage; dirty trick. - self._leaves.append(other) - - @property - def encoding(self) -> str: - return self._encoding - - @property - def encoding_aliases(self) -> List[str]: - """ - Encoding name are known by many name, using this could help when searching for IBM855 when it's listed as CP855. - """ - also_known_as: List[str] = [] - for u, p in aliases.items(): - if self.encoding == u: - also_known_as.append(p) - elif self.encoding == p: - also_known_as.append(u) - return also_known_as - - @property - def bom(self) -> bool: - return self._has_sig_or_bom - - @property - def byte_order_mark(self) -> bool: - return self._has_sig_or_bom - - @property - def languages(self) -> List[str]: - """ - Return the complete list of possible languages found in decoded sequence. - Usually not really useful. Returned list may be empty even if 'language' property return something != 'Unknown'. - """ - return [e[0] for e in self._languages] - - @property - def language(self) -> str: - """ - Most probable language found in decoded sequence. If none were detected or inferred, the property will return - "Unknown". - """ - if not self._languages: - # Trying to infer the language based on the given encoding - # Its either English or we should not pronounce ourselves in certain cases. - if "ascii" in self.could_be_from_charset: - return "English" - - # doing it there to avoid circular import - from charset_normalizer.cd import encoding_languages, mb_encoding_languages - - languages = ( - mb_encoding_languages(self.encoding) - if is_multi_byte_encoding(self.encoding) - else encoding_languages(self.encoding) - ) - - if len(languages) == 0 or "Latin Based" in languages: - return "Unknown" - - return languages[0] - - return self._languages[0][0] - - @property - def chaos(self) -> float: - return self._mean_mess_ratio - - @property - def coherence(self) -> float: - if not self._languages: - return 0.0 - return self._languages[0][1] - - @property - def percent_chaos(self) -> float: - return round(self.chaos * 100, ndigits=3) - - @property - def percent_coherence(self) -> float: - return round(self.coherence * 100, ndigits=3) - - @property - def raw(self) -> bytes: - """ - Original untouched bytes. - """ - return self._payload - - @property - def submatch(self) -> List["CharsetMatch"]: - return self._leaves - - @property - def has_submatch(self) -> bool: - return len(self._leaves) > 0 - - @property - def alphabets(self) -> List[str]: - if self._unicode_ranges is not None: - return self._unicode_ranges - # list detected ranges - detected_ranges: List[Optional[str]] = [ - unicode_range(char) for char in str(self) - ] - # filter and sort - self._unicode_ranges = sorted(list({r for r in detected_ranges if r})) - return self._unicode_ranges - - @property - def could_be_from_charset(self) -> List[str]: - """ - The complete list of encoding that output the exact SAME str result and therefore could be the originating - encoding. - This list does include the encoding available in property 'encoding'. - """ - return [self._encoding] + [m.encoding for m in self._leaves] - - def output(self, encoding: str = "utf_8") -> bytes: - """ - Method to get re-encoded bytes payload using given target encoding. Default to UTF-8. - Any errors will be simply ignored by the encoder NOT replaced. - """ - if self._output_encoding is None or self._output_encoding != encoding: - self._output_encoding = encoding - decoded_string = str(self) - if ( - self._preemptive_declaration is not None - and self._preemptive_declaration.lower() - not in ["utf-8", "utf8", "utf_8"] - ): - patched_header = sub( - RE_POSSIBLE_ENCODING_INDICATION, - lambda m: m.string[m.span()[0] : m.span()[1]].replace( - m.groups()[0], iana_name(self._output_encoding) # type: ignore[arg-type] - ), - decoded_string[:8192], - 1, - ) - - decoded_string = patched_header + decoded_string[8192:] - - self._output_payload = decoded_string.encode(encoding, "replace") - - return self._output_payload # type: ignore - - @property - def fingerprint(self) -> str: - """ - Retrieve the unique SHA256 computed using the transformed (re-encoded) payload. Not the original one. - """ - return sha256(self.output()).hexdigest() - - -class CharsetMatches: - """ - Container with every CharsetMatch items ordered by default from most probable to the less one. - Act like a list(iterable) but does not implements all related methods. - """ - - def __init__(self, results: Optional[List[CharsetMatch]] = None): - self._results: List[CharsetMatch] = sorted(results) if results else [] - - def __iter__(self) -> Iterator[CharsetMatch]: - yield from self._results - - def __getitem__(self, item: Union[int, str]) -> CharsetMatch: - """ - Retrieve a single item either by its position or encoding name (alias may be used here). - Raise KeyError upon invalid index or encoding not present in results. - """ - if isinstance(item, int): - return self._results[item] - if isinstance(item, str): - item = iana_name(item, False) - for result in self._results: - if item in result.could_be_from_charset: - return result - raise KeyError - - def __len__(self) -> int: - return len(self._results) - - def __bool__(self) -> bool: - return len(self._results) > 0 - - def append(self, item: CharsetMatch) -> None: - """ - Insert a single match. Will be inserted accordingly to preserve sort. - Can be inserted as a submatch. - """ - if not isinstance(item, CharsetMatch): - raise ValueError( - "Cannot append instance '{}' to CharsetMatches".format( - str(item.__class__) - ) - ) - # We should disable the submatch factoring when the input file is too heavy (conserve RAM usage) - if len(item.raw) < TOO_BIG_SEQUENCE: - for match in self._results: - if match.fingerprint == item.fingerprint and match.chaos == item.chaos: - match.add_submatch(item) - return - self._results.append(item) - self._results = sorted(self._results) - - def best(self) -> Optional["CharsetMatch"]: - """ - Simply return the first match. Strict equivalent to matches[0]. - """ - if not self._results: - return None - return self._results[0] - - def first(self) -> Optional["CharsetMatch"]: - """ - Redundant method, call the method best(). Kept for BC reasons. - """ - return self.best() - - -CoherenceMatch = Tuple[str, float] -CoherenceMatches = List[CoherenceMatch] - - -class CliDetectionResult: - def __init__( - self, - path: str, - encoding: Optional[str], - encoding_aliases: List[str], - alternative_encodings: List[str], - language: str, - alphabets: List[str], - has_sig_or_bom: bool, - chaos: float, - coherence: float, - unicode_path: Optional[str], - is_preferred: bool, - ): - self.path: str = path - self.unicode_path: Optional[str] = unicode_path - self.encoding: Optional[str] = encoding - self.encoding_aliases: List[str] = encoding_aliases - self.alternative_encodings: List[str] = alternative_encodings - self.language: str = language - self.alphabets: List[str] = alphabets - self.has_sig_or_bom: bool = has_sig_or_bom - self.chaos: float = chaos - self.coherence: float = coherence - self.is_preferred: bool = is_preferred - - @property - def __dict__(self) -> Dict[str, Any]: # type: ignore - return { - "path": self.path, - "encoding": self.encoding, - "encoding_aliases": self.encoding_aliases, - "alternative_encodings": self.alternative_encodings, - "language": self.language, - "alphabets": self.alphabets, - "has_sig_or_bom": self.has_sig_or_bom, - "chaos": self.chaos, - "coherence": self.coherence, - "unicode_path": self.unicode_path, - "is_preferred": self.is_preferred, - } - - def to_json(self) -> str: - return dumps(self.__dict__, ensure_ascii=True, indent=4) diff --git a/env/lib/python3.10/site-packages/charset_normalizer/py.typed b/env/lib/python3.10/site-packages/charset_normalizer/py.typed deleted file mode 100644 index e69de29..0000000 diff --git a/env/lib/python3.10/site-packages/charset_normalizer/utils.py b/env/lib/python3.10/site-packages/charset_normalizer/utils.py deleted file mode 100644 index e5cbbf4..0000000 --- a/env/lib/python3.10/site-packages/charset_normalizer/utils.py +++ /dev/null @@ -1,421 +0,0 @@ -import importlib -import logging -import unicodedata -from codecs import IncrementalDecoder -from encodings.aliases import aliases -from functools import lru_cache -from re import findall -from typing import Generator, List, Optional, Set, Tuple, Union - -from _multibytecodec import MultibyteIncrementalDecoder - -from .constant import ( - ENCODING_MARKS, - IANA_SUPPORTED_SIMILAR, - RE_POSSIBLE_ENCODING_INDICATION, - UNICODE_RANGES_COMBINED, - UNICODE_SECONDARY_RANGE_KEYWORD, - UTF8_MAXIMAL_ALLOCATION, -) - - -@lru_cache(maxsize=UTF8_MAXIMAL_ALLOCATION) -def is_accentuated(character: str) -> bool: - try: - description: str = unicodedata.name(character) - except ValueError: - return False - return ( - "WITH GRAVE" in description - or "WITH ACUTE" in description - or "WITH CEDILLA" in description - or "WITH DIAERESIS" in description - or "WITH CIRCUMFLEX" in description - or "WITH TILDE" in description - or "WITH MACRON" in description - or "WITH RING ABOVE" in description - ) - - -@lru_cache(maxsize=UTF8_MAXIMAL_ALLOCATION) -def remove_accent(character: str) -> str: - decomposed: str = unicodedata.decomposition(character) - if not decomposed: - return character - - codes: List[str] = decomposed.split(" ") - - return chr(int(codes[0], 16)) - - -@lru_cache(maxsize=UTF8_MAXIMAL_ALLOCATION) -def unicode_range(character: str) -> Optional[str]: - """ - Retrieve the Unicode range official name from a single character. - """ - character_ord: int = ord(character) - - for range_name, ord_range in UNICODE_RANGES_COMBINED.items(): - if character_ord in ord_range: - return range_name - - return None - - -@lru_cache(maxsize=UTF8_MAXIMAL_ALLOCATION) -def is_latin(character: str) -> bool: - try: - description: str = unicodedata.name(character) - except ValueError: - return False - return "LATIN" in description - - -@lru_cache(maxsize=UTF8_MAXIMAL_ALLOCATION) -def is_punctuation(character: str) -> bool: - character_category: str = unicodedata.category(character) - - if "P" in character_category: - return True - - character_range: Optional[str] = unicode_range(character) - - if character_range is None: - return False - - return "Punctuation" in character_range - - -@lru_cache(maxsize=UTF8_MAXIMAL_ALLOCATION) -def is_symbol(character: str) -> bool: - character_category: str = unicodedata.category(character) - - if "S" in character_category or "N" in character_category: - return True - - character_range: Optional[str] = unicode_range(character) - - if character_range is None: - return False - - return "Forms" in character_range and character_category != "Lo" - - -@lru_cache(maxsize=UTF8_MAXIMAL_ALLOCATION) -def is_emoticon(character: str) -> bool: - character_range: Optional[str] = unicode_range(character) - - if character_range is None: - return False - - return "Emoticons" in character_range or "Pictographs" in character_range - - -@lru_cache(maxsize=UTF8_MAXIMAL_ALLOCATION) -def is_separator(character: str) -> bool: - if character.isspace() or character in {"|", "+", "<", ">"}: - return True - - character_category: str = unicodedata.category(character) - - return "Z" in character_category or character_category in {"Po", "Pd", "Pc"} - - -@lru_cache(maxsize=UTF8_MAXIMAL_ALLOCATION) -def is_case_variable(character: str) -> bool: - return character.islower() != character.isupper() - - -@lru_cache(maxsize=UTF8_MAXIMAL_ALLOCATION) -def is_cjk(character: str) -> bool: - try: - character_name = unicodedata.name(character) - except ValueError: - return False - - return "CJK" in character_name - - -@lru_cache(maxsize=UTF8_MAXIMAL_ALLOCATION) -def is_hiragana(character: str) -> bool: - try: - character_name = unicodedata.name(character) - except ValueError: - return False - - return "HIRAGANA" in character_name - - -@lru_cache(maxsize=UTF8_MAXIMAL_ALLOCATION) -def is_katakana(character: str) -> bool: - try: - character_name = unicodedata.name(character) - except ValueError: - return False - - return "KATAKANA" in character_name - - -@lru_cache(maxsize=UTF8_MAXIMAL_ALLOCATION) -def is_hangul(character: str) -> bool: - try: - character_name = unicodedata.name(character) - except ValueError: - return False - - return "HANGUL" in character_name - - -@lru_cache(maxsize=UTF8_MAXIMAL_ALLOCATION) -def is_thai(character: str) -> bool: - try: - character_name = unicodedata.name(character) - except ValueError: - return False - - return "THAI" in character_name - - -@lru_cache(maxsize=UTF8_MAXIMAL_ALLOCATION) -def is_arabic(character: str) -> bool: - try: - character_name = unicodedata.name(character) - except ValueError: - return False - - return "ARABIC" in character_name - - -@lru_cache(maxsize=UTF8_MAXIMAL_ALLOCATION) -def is_arabic_isolated_form(character: str) -> bool: - try: - character_name = unicodedata.name(character) - except ValueError: - return False - - return "ARABIC" in character_name and "ISOLATED FORM" in character_name - - -@lru_cache(maxsize=len(UNICODE_RANGES_COMBINED)) -def is_unicode_range_secondary(range_name: str) -> bool: - return any(keyword in range_name for keyword in UNICODE_SECONDARY_RANGE_KEYWORD) - - -@lru_cache(maxsize=UTF8_MAXIMAL_ALLOCATION) -def is_unprintable(character: str) -> bool: - return ( - character.isspace() is False # includes \n \t \r \v - and character.isprintable() is False - and character != "\x1A" # Why? Its the ASCII substitute character. - and character != "\ufeff" # bug discovered in Python, - # Zero Width No-Break Space located in Arabic Presentation Forms-B, Unicode 1.1 not acknowledged as space. - ) - - -def any_specified_encoding(sequence: bytes, search_zone: int = 8192) -> Optional[str]: - """ - Extract using ASCII-only decoder any specified encoding in the first n-bytes. - """ - if not isinstance(sequence, bytes): - raise TypeError - - seq_len: int = len(sequence) - - results: List[str] = findall( - RE_POSSIBLE_ENCODING_INDICATION, - sequence[: min(seq_len, search_zone)].decode("ascii", errors="ignore"), - ) - - if len(results) == 0: - return None - - for specified_encoding in results: - specified_encoding = specified_encoding.lower().replace("-", "_") - - encoding_alias: str - encoding_iana: str - - for encoding_alias, encoding_iana in aliases.items(): - if encoding_alias == specified_encoding: - return encoding_iana - if encoding_iana == specified_encoding: - return encoding_iana - - return None - - -@lru_cache(maxsize=128) -def is_multi_byte_encoding(name: str) -> bool: - """ - Verify is a specific encoding is a multi byte one based on it IANA name - """ - return name in { - "utf_8", - "utf_8_sig", - "utf_16", - "utf_16_be", - "utf_16_le", - "utf_32", - "utf_32_le", - "utf_32_be", - "utf_7", - } or issubclass( - importlib.import_module("encodings.{}".format(name)).IncrementalDecoder, - MultibyteIncrementalDecoder, - ) - - -def identify_sig_or_bom(sequence: bytes) -> Tuple[Optional[str], bytes]: - """ - Identify and extract SIG/BOM in given sequence. - """ - - for iana_encoding in ENCODING_MARKS: - marks: Union[bytes, List[bytes]] = ENCODING_MARKS[iana_encoding] - - if isinstance(marks, bytes): - marks = [marks] - - for mark in marks: - if sequence.startswith(mark): - return iana_encoding, mark - - return None, b"" - - -def should_strip_sig_or_bom(iana_encoding: str) -> bool: - return iana_encoding not in {"utf_16", "utf_32"} - - -def iana_name(cp_name: str, strict: bool = True) -> str: - cp_name = cp_name.lower().replace("-", "_") - - encoding_alias: str - encoding_iana: str - - for encoding_alias, encoding_iana in aliases.items(): - if cp_name in [encoding_alias, encoding_iana]: - return encoding_iana - - if strict: - raise ValueError("Unable to retrieve IANA for '{}'".format(cp_name)) - - return cp_name - - -def range_scan(decoded_sequence: str) -> List[str]: - ranges: Set[str] = set() - - for character in decoded_sequence: - character_range: Optional[str] = unicode_range(character) - - if character_range is None: - continue - - ranges.add(character_range) - - return list(ranges) - - -def cp_similarity(iana_name_a: str, iana_name_b: str) -> float: - if is_multi_byte_encoding(iana_name_a) or is_multi_byte_encoding(iana_name_b): - return 0.0 - - decoder_a = importlib.import_module( - "encodings.{}".format(iana_name_a) - ).IncrementalDecoder - decoder_b = importlib.import_module( - "encodings.{}".format(iana_name_b) - ).IncrementalDecoder - - id_a: IncrementalDecoder = decoder_a(errors="ignore") - id_b: IncrementalDecoder = decoder_b(errors="ignore") - - character_match_count: int = 0 - - for i in range(255): - to_be_decoded: bytes = bytes([i]) - if id_a.decode(to_be_decoded) == id_b.decode(to_be_decoded): - character_match_count += 1 - - return character_match_count / 254 - - -def is_cp_similar(iana_name_a: str, iana_name_b: str) -> bool: - """ - Determine if two code page are at least 80% similar. IANA_SUPPORTED_SIMILAR dict was generated using - the function cp_similarity. - """ - return ( - iana_name_a in IANA_SUPPORTED_SIMILAR - and iana_name_b in IANA_SUPPORTED_SIMILAR[iana_name_a] - ) - - -def set_logging_handler( - name: str = "charset_normalizer", - level: int = logging.INFO, - format_string: str = "%(asctime)s | %(levelname)s | %(message)s", -) -> None: - logger = logging.getLogger(name) - logger.setLevel(level) - - handler = logging.StreamHandler() - handler.setFormatter(logging.Formatter(format_string)) - logger.addHandler(handler) - - -def cut_sequence_chunks( - sequences: bytes, - encoding_iana: str, - offsets: range, - chunk_size: int, - bom_or_sig_available: bool, - strip_sig_or_bom: bool, - sig_payload: bytes, - is_multi_byte_decoder: bool, - decoded_payload: Optional[str] = None, -) -> Generator[str, None, None]: - if decoded_payload and is_multi_byte_decoder is False: - for i in offsets: - chunk = decoded_payload[i : i + chunk_size] - if not chunk: - break - yield chunk - else: - for i in offsets: - chunk_end = i + chunk_size - if chunk_end > len(sequences) + 8: - continue - - cut_sequence = sequences[i : i + chunk_size] - - if bom_or_sig_available and strip_sig_or_bom is False: - cut_sequence = sig_payload + cut_sequence - - chunk = cut_sequence.decode( - encoding_iana, - errors="ignore" if is_multi_byte_decoder else "strict", - ) - - # multi-byte bad cutting detector and adjustment - # not the cleanest way to perform that fix but clever enough for now. - if is_multi_byte_decoder and i > 0: - chunk_partial_size_chk: int = min(chunk_size, 16) - - if ( - decoded_payload - and chunk[:chunk_partial_size_chk] not in decoded_payload - ): - for j in range(i, i - 4, -1): - cut_sequence = sequences[j:chunk_end] - - if bom_or_sig_available and strip_sig_or_bom is False: - cut_sequence = sig_payload + cut_sequence - - chunk = cut_sequence.decode(encoding_iana, errors="ignore") - - if chunk[:chunk_partial_size_chk] in decoded_payload: - break - - yield chunk diff --git a/env/lib/python3.10/site-packages/charset_normalizer/version.py b/env/lib/python3.10/site-packages/charset_normalizer/version.py deleted file mode 100644 index 699990e..0000000 --- a/env/lib/python3.10/site-packages/charset_normalizer/version.py +++ /dev/null @@ -1,6 +0,0 @@ -""" -Expose version -""" - -__version__ = "3.4.0" -VERSION = __version__.split(".") diff --git a/env/lib/python3.10/site-packages/cryptography-43.0.3.dist-info/INSTALLER b/env/lib/python3.10/site-packages/cryptography-43.0.3.dist-info/INSTALLER deleted file mode 100644 index a1b589e..0000000 --- a/env/lib/python3.10/site-packages/cryptography-43.0.3.dist-info/INSTALLER +++ /dev/null @@ -1 +0,0 @@ -pip diff --git a/env/lib/python3.10/site-packages/cryptography-43.0.3.dist-info/METADATA b/env/lib/python3.10/site-packages/cryptography-43.0.3.dist-info/METADATA deleted file mode 100644 index 68b17eb..0000000 --- a/env/lib/python3.10/site-packages/cryptography-43.0.3.dist-info/METADATA +++ /dev/null @@ -1,138 +0,0 @@ -Metadata-Version: 2.3 -Name: cryptography -Version: 43.0.3 -Classifier: Development Status :: 5 - Production/Stable -Classifier: Intended Audience :: Developers -Classifier: License :: OSI Approved :: Apache Software License -Classifier: License :: OSI Approved :: BSD License -Classifier: Natural Language :: English -Classifier: Operating System :: MacOS :: MacOS X -Classifier: Operating System :: POSIX -Classifier: Operating System :: POSIX :: BSD -Classifier: Operating System :: POSIX :: Linux -Classifier: Operating System :: Microsoft :: Windows -Classifier: Programming Language :: Python -Classifier: Programming Language :: Python :: 3 -Classifier: Programming Language :: Python :: 3 :: Only -Classifier: Programming Language :: Python :: 3.7 -Classifier: Programming Language :: Python :: 3.8 -Classifier: Programming Language :: Python :: 3.9 -Classifier: Programming Language :: Python :: 3.10 -Classifier: Programming Language :: Python :: 3.11 -Classifier: Programming Language :: Python :: 3.12 -Classifier: Programming Language :: Python :: Implementation :: CPython -Classifier: Programming Language :: Python :: Implementation :: PyPy -Classifier: Topic :: Security :: Cryptography -Requires-Dist: cffi >=1.12 ; platform_python_implementation != 'PyPy' -Requires-Dist: bcrypt >=3.1.5 ; extra == 'ssh' -Requires-Dist: nox ; extra == 'nox' -Requires-Dist: cryptography-vectors ==43.0.3 ; extra == 'test' -Requires-Dist: pytest >=6.2.0 ; extra == 'test' -Requires-Dist: pytest-benchmark ; extra == 'test' -Requires-Dist: pytest-cov ; extra == 'test' -Requires-Dist: pytest-xdist ; extra == 'test' -Requires-Dist: pretend ; extra == 'test' -Requires-Dist: certifi ; extra == 'test' -Requires-Dist: pytest-randomly ; extra == 'test-randomorder' -Requires-Dist: sphinx >=5.3.0 ; extra == 'docs' -Requires-Dist: sphinx-rtd-theme >=1.1.1 ; extra == 'docs' -Requires-Dist: pyenchant >=1.6.11 ; extra == 'docstest' -Requires-Dist: readme-renderer ; extra == 'docstest' -Requires-Dist: sphinxcontrib-spelling >=4.0.1 ; extra == 'docstest' -Requires-Dist: build ; extra == 'sdist' -Requires-Dist: ruff ; extra == 'pep8test' -Requires-Dist: mypy ; extra == 'pep8test' -Requires-Dist: check-sdist ; extra == 'pep8test' -Requires-Dist: click ; extra == 'pep8test' -Provides-Extra: ssh -Provides-Extra: nox -Provides-Extra: test -Provides-Extra: test-randomorder -Provides-Extra: docs -Provides-Extra: docstest -Provides-Extra: sdist -Provides-Extra: pep8test -License-File: LICENSE -License-File: LICENSE.APACHE -License-File: LICENSE.BSD -Summary: cryptography is a package which provides cryptographic recipes and primitives to Python developers. -Author: The cryptography developers -Author-email: The Python Cryptographic Authority and individual contributors -License: Apache-2.0 OR BSD-3-Clause -Requires-Python: >=3.7 -Description-Content-Type: text/x-rst; charset=UTF-8 -Project-URL: homepage, https://github.com/pyca/cryptography -Project-URL: documentation, https://cryptography.io/ -Project-URL: source, https://github.com/pyca/cryptography/ -Project-URL: issues, https://github.com/pyca/cryptography/issues -Project-URL: changelog, https://cryptography.io/en/latest/changelog/ - -pyca/cryptography -================= - -.. image:: https://img.shields.io/pypi/v/cryptography.svg - :target: https://pypi.org/project/cryptography/ - :alt: Latest Version - -.. image:: https://readthedocs.org/projects/cryptography/badge/?version=latest - :target: https://cryptography.io - :alt: Latest Docs - -.. image:: https://github.com/pyca/cryptography/workflows/CI/badge.svg?branch=main - :target: https://github.com/pyca/cryptography/actions?query=workflow%3ACI+branch%3Amain - - -``cryptography`` is a package which provides cryptographic recipes and -primitives to Python developers. Our goal is for it to be your "cryptographic -standard library". It supports Python 3.7+ and PyPy3 7.3.11+. - -``cryptography`` includes both high level recipes and low level interfaces to -common cryptographic algorithms such as symmetric ciphers, message digests, and -key derivation functions. For example, to encrypt something with -``cryptography``'s high level symmetric encryption recipe: - -.. code-block:: pycon - - >>> from cryptography.fernet import Fernet - >>> # Put this somewhere safe! - >>> key = Fernet.generate_key() - >>> f = Fernet(key) - >>> token = f.encrypt(b"A really secret message. Not for prying eyes.") - >>> token - b'...' - >>> f.decrypt(token) - b'A really secret message. Not for prying eyes.' - -You can find more information in the `documentation`_. - -You can install ``cryptography`` with: - -.. code-block:: console - - $ pip install cryptography - -For full details see `the installation documentation`_. - -Discussion -~~~~~~~~~~ - -If you run into bugs, you can file them in our `issue tracker`_. - -We maintain a `cryptography-dev`_ mailing list for development discussion. - -You can also join ``#pyca`` on ``irc.libera.chat`` to ask questions or get -involved. - -Security -~~~~~~~~ - -Need to report a security issue? Please consult our `security reporting`_ -documentation. - - -.. _`documentation`: https://cryptography.io/ -.. _`the installation documentation`: https://cryptography.io/en/latest/installation/ -.. _`issue tracker`: https://github.com/pyca/cryptography/issues -.. _`cryptography-dev`: https://mail.python.org/mailman/listinfo/cryptography-dev -.. _`security reporting`: https://cryptography.io/en/latest/security/ - diff --git a/env/lib/python3.10/site-packages/cryptography-43.0.3.dist-info/RECORD b/env/lib/python3.10/site-packages/cryptography-43.0.3.dist-info/RECORD deleted file mode 100644 index a2f4543..0000000 --- a/env/lib/python3.10/site-packages/cryptography-43.0.3.dist-info/RECORD +++ /dev/null @@ -1,173 +0,0 @@ -cryptography-43.0.3.dist-info/INSTALLER,sha256=zuuue4knoyJ-UwPPXg8fezS7VCrXJQrAP7zeNuwvFQg,4 -cryptography-43.0.3.dist-info/METADATA,sha256=6zbg5CUehHnvNpZEQHVe8ivt1BG6h6k_cm-o5bsOZLA,5440 -cryptography-43.0.3.dist-info/RECORD,, -cryptography-43.0.3.dist-info/WHEEL,sha256=5SNCVD9cb88a-xAIrDHIo1CvpgNriOYcNgb4b8rPcOw,107 -cryptography-43.0.3.dist-info/license_files/LICENSE,sha256=Pgx8CRqUi4JTO6mP18u0BDLW8amsv4X1ki0vmak65rs,197 -cryptography-43.0.3.dist-info/license_files/LICENSE.APACHE,sha256=qsc7MUj20dcRHbyjIJn2jSbGRMaBOuHk8F9leaomY_4,11360 -cryptography-43.0.3.dist-info/license_files/LICENSE.BSD,sha256=YCxMdILeZHndLpeTzaJ15eY9dz2s0eymiSMqtwCPtPs,1532 -cryptography/__about__.py,sha256=-FkHKD9mSuEfH37wsSKnQzJZmL5zUAUTpB5OeUQjPE0,445 -cryptography/__init__.py,sha256=mthuUrTd4FROCpUYrTIqhjz6s6T9djAZrV7nZ1oMm2o,364 -cryptography/__pycache__/__about__.cpython-310.pyc,, -cryptography/__pycache__/__init__.cpython-310.pyc,, -cryptography/__pycache__/exceptions.cpython-310.pyc,, -cryptography/__pycache__/fernet.cpython-310.pyc,, -cryptography/__pycache__/utils.cpython-310.pyc,, -cryptography/exceptions.py,sha256=835EWILc2fwxw-gyFMriciC2SqhViETB10LBSytnDIc,1087 -cryptography/fernet.py,sha256=aPj82w-Z_1GBXUtWRUsZdVbMwRo5Mbjj0wkA9wG4rkw,6696 -cryptography/hazmat/__init__.py,sha256=5IwrLWrVp0AjEr_4FdWG_V057NSJGY_W4egNNsuct0g,455 -cryptography/hazmat/__pycache__/__init__.cpython-310.pyc,, -cryptography/hazmat/__pycache__/_oid.cpython-310.pyc,, -cryptography/hazmat/_oid.py,sha256=e9yLmxtdQtuL94ztQv3SGtt_ea1Mx6aUwGftJsP6EXk,15201 -cryptography/hazmat/backends/__init__.py,sha256=O5jvKFQdZnXhKeqJ-HtulaEL9Ni7mr1mDzZY5kHlYhI,361 -cryptography/hazmat/backends/__pycache__/__init__.cpython-310.pyc,, -cryptography/hazmat/backends/openssl/__init__.py,sha256=p3jmJfnCag9iE5sdMrN6VvVEu55u46xaS_IjoI0SrmA,305 -cryptography/hazmat/backends/openssl/__pycache__/__init__.cpython-310.pyc,, -cryptography/hazmat/backends/openssl/__pycache__/backend.cpython-310.pyc,, -cryptography/hazmat/backends/openssl/backend.py,sha256=pUXUbugLwMm2Gls-h5U5fw2RvepaNjEvnao6CTmL1xQ,9648 -cryptography/hazmat/bindings/__init__.py,sha256=s9oKCQ2ycFdXoERdS1imafueSkBsL9kvbyfghaauZ9Y,180 -cryptography/hazmat/bindings/__pycache__/__init__.cpython-310.pyc,, -cryptography/hazmat/bindings/_rust.abi3.so,sha256=QrghdFa6x-vG5lFvGVfv-slqoy0UA9a8eHmtp9hzGCk,10862344 -cryptography/hazmat/bindings/_rust/__init__.pyi,sha256=wb1OT76lG19vjq97_q2MM3qdJlQhyloXfVbKFDmRse4,737 -cryptography/hazmat/bindings/_rust/_openssl.pyi,sha256=mpNJLuYLbCVrd5i33FBTmWwL_55Dw7JPkSLlSX9Q7oI,230 -cryptography/hazmat/bindings/_rust/asn1.pyi,sha256=BrGjC8J6nwuS-r3EVcdXJB8ndotfY9mbQYOfpbPG0HA,354 -cryptography/hazmat/bindings/_rust/exceptions.pyi,sha256=exXr2xw_0pB1kk93cYbM3MohbzoUkjOms1ZMUi0uQZE,640 -cryptography/hazmat/bindings/_rust/ocsp.pyi,sha256=R-xJ-XmJZ1lOk-fWHHvRnP3QNTCFnKv-l3xlNWfLVt4,868 -cryptography/hazmat/bindings/_rust/openssl/__init__.pyi,sha256=Lvn250QMdPyeF-hoBF6rkQgHLBJxVauXCb8i8uYTomQ,1368 -cryptography/hazmat/bindings/_rust/openssl/aead.pyi,sha256=i0gA3jUQ4rkJXTGGZrq-AuY-VQLN31lyDeWuDZ0zJYw,2553 -cryptography/hazmat/bindings/_rust/openssl/ciphers.pyi,sha256=iK0ZhQ-WyCQbjaraaFgK6q4PpD-7Rf5RDHkFD3YEW_g,1301 -cryptography/hazmat/bindings/_rust/openssl/cmac.pyi,sha256=nPH0X57RYpsAkRowVpjQiHE566ThUTx7YXrsadmrmHk,564 -cryptography/hazmat/bindings/_rust/openssl/dh.pyi,sha256=Z3TC-G04-THtSdAOPLM1h2G7ml5bda1ElZUcn5wpuhk,1564 -cryptography/hazmat/bindings/_rust/openssl/dsa.pyi,sha256=qBtkgj2albt2qFcnZ9UDrhzoNhCVO7HTby5VSf1EXMI,1299 -cryptography/hazmat/bindings/_rust/openssl/ec.pyi,sha256=zJy0pRa5n-_p2dm45PxECB_-B6SVZyNKfjxFDpPqT38,1691 -cryptography/hazmat/bindings/_rust/openssl/ed25519.pyi,sha256=OJsrblS2nHptZctva-pAKFL5q8yPEAkhmjPZpJ6TA94,493 -cryptography/hazmat/bindings/_rust/openssl/ed448.pyi,sha256=SkPHK2HdbYN02TVQEUOgW3iTdiEY7HBE4DijpdkAzmk,475 -cryptography/hazmat/bindings/_rust/openssl/hashes.pyi,sha256=J8HoN0GdtPcjRAfNHr5Elva_nkmQfq63L75_z9dd8Uc,573 -cryptography/hazmat/bindings/_rust/openssl/hmac.pyi,sha256=ZmLJ73pmxcZFC1XosWEiXMRYtvJJor3ZLdCQOJu85Cw,662 -cryptography/hazmat/bindings/_rust/openssl/kdf.pyi,sha256=wPS5c7NLspM2632II0I4iH1RSxZvSRtBOVqmpyQATfk,544 -cryptography/hazmat/bindings/_rust/openssl/keys.pyi,sha256=JSrlGNaW49ZCZ1hcb-YJdS1EAbsMwRbVEcLL0P9OApA,872 -cryptography/hazmat/bindings/_rust/openssl/poly1305.pyi,sha256=9iogF7Q4i81IkOS-IMXp6HvxFF_3cNy_ucrAjVQnn14,540 -cryptography/hazmat/bindings/_rust/openssl/rsa.pyi,sha256=2OQCNSXkxgc-3uw1xiCCloIQTV6p9_kK79Yu0rhZgPc,1364 -cryptography/hazmat/bindings/_rust/openssl/x25519.pyi,sha256=2BKdbrddM_9SMUpdvHKGhb9MNjURCarPxccbUDzHeoA,484 -cryptography/hazmat/bindings/_rust/openssl/x448.pyi,sha256=AoRMWNvCJTiH5L-lkIkCdPlrPLUdJvvfXpIvf1GmxpM,466 -cryptography/hazmat/bindings/_rust/pkcs12.pyi,sha256=afhB_6M8xI1MIE5vxkaDF1jSxA48ib1--NiOxtf6boM,1394 -cryptography/hazmat/bindings/_rust/pkcs7.pyi,sha256=QCmuA0IgDr4iOecUOXgUUeh3BAjJx8ubjz__EnNbyGY,972 -cryptography/hazmat/bindings/_rust/test_support.pyi,sha256=Xo1Gd7bh9rU4HuIS4pm9UwCY6IS1gInvFwmhABLOVO4,936 -cryptography/hazmat/bindings/_rust/x509.pyi,sha256=WLrGmqmFss8dXKhlG_J9nVhoCcodR72xJdCoxEuBtjY,3551 -cryptography/hazmat/bindings/openssl/__init__.py,sha256=s9oKCQ2ycFdXoERdS1imafueSkBsL9kvbyfghaauZ9Y,180 -cryptography/hazmat/bindings/openssl/__pycache__/__init__.cpython-310.pyc,, -cryptography/hazmat/bindings/openssl/__pycache__/_conditional.cpython-310.pyc,, -cryptography/hazmat/bindings/openssl/__pycache__/binding.cpython-310.pyc,, -cryptography/hazmat/bindings/openssl/_conditional.py,sha256=dkGKGU-22uR2ZKeOOwaSxEJCGaafgUjb2romWcu03QE,5163 -cryptography/hazmat/bindings/openssl/binding.py,sha256=e1gnFAZBPrkJ3CsiZV-ug6kaPdNTAEROaUFiFrUh71M,4042 -cryptography/hazmat/decrepit/__init__.py,sha256=wHCbWfaefa-fk6THSw9th9fJUsStJo7245wfFBqmduA,216 -cryptography/hazmat/decrepit/__pycache__/__init__.cpython-310.pyc,, -cryptography/hazmat/decrepit/ciphers/__init__.py,sha256=wHCbWfaefa-fk6THSw9th9fJUsStJo7245wfFBqmduA,216 -cryptography/hazmat/decrepit/ciphers/__pycache__/__init__.cpython-310.pyc,, -cryptography/hazmat/decrepit/ciphers/__pycache__/algorithms.cpython-310.pyc,, -cryptography/hazmat/decrepit/ciphers/algorithms.py,sha256=HWA4PKDS2w4D2dQoRerpLRU7Kntt5vJeJC7j--AlZVU,2520 -cryptography/hazmat/primitives/__init__.py,sha256=s9oKCQ2ycFdXoERdS1imafueSkBsL9kvbyfghaauZ9Y,180 -cryptography/hazmat/primitives/__pycache__/__init__.cpython-310.pyc,, -cryptography/hazmat/primitives/__pycache__/_asymmetric.cpython-310.pyc,, -cryptography/hazmat/primitives/__pycache__/_cipheralgorithm.cpython-310.pyc,, -cryptography/hazmat/primitives/__pycache__/_serialization.cpython-310.pyc,, -cryptography/hazmat/primitives/__pycache__/cmac.cpython-310.pyc,, -cryptography/hazmat/primitives/__pycache__/constant_time.cpython-310.pyc,, -cryptography/hazmat/primitives/__pycache__/hashes.cpython-310.pyc,, -cryptography/hazmat/primitives/__pycache__/hmac.cpython-310.pyc,, -cryptography/hazmat/primitives/__pycache__/keywrap.cpython-310.pyc,, -cryptography/hazmat/primitives/__pycache__/padding.cpython-310.pyc,, -cryptography/hazmat/primitives/__pycache__/poly1305.cpython-310.pyc,, -cryptography/hazmat/primitives/_asymmetric.py,sha256=RhgcouUB6HTiFDBrR1LxqkMjpUxIiNvQ1r_zJjRG6qQ,532 -cryptography/hazmat/primitives/_cipheralgorithm.py,sha256=gKa0WrLz6K4fqhnGbfBYKDSxgLxsPU0uj_EK2UT47W4,1495 -cryptography/hazmat/primitives/_serialization.py,sha256=qrozc8fw2WZSbjk3DAlSl3ResxpauwJ74ZgGoUL-mj0,5142 -cryptography/hazmat/primitives/asymmetric/__init__.py,sha256=s9oKCQ2ycFdXoERdS1imafueSkBsL9kvbyfghaauZ9Y,180 -cryptography/hazmat/primitives/asymmetric/__pycache__/__init__.cpython-310.pyc,, -cryptography/hazmat/primitives/asymmetric/__pycache__/dh.cpython-310.pyc,, -cryptography/hazmat/primitives/asymmetric/__pycache__/dsa.cpython-310.pyc,, -cryptography/hazmat/primitives/asymmetric/__pycache__/ec.cpython-310.pyc,, -cryptography/hazmat/primitives/asymmetric/__pycache__/ed25519.cpython-310.pyc,, -cryptography/hazmat/primitives/asymmetric/__pycache__/ed448.cpython-310.pyc,, -cryptography/hazmat/primitives/asymmetric/__pycache__/padding.cpython-310.pyc,, -cryptography/hazmat/primitives/asymmetric/__pycache__/rsa.cpython-310.pyc,, -cryptography/hazmat/primitives/asymmetric/__pycache__/types.cpython-310.pyc,, -cryptography/hazmat/primitives/asymmetric/__pycache__/utils.cpython-310.pyc,, -cryptography/hazmat/primitives/asymmetric/__pycache__/x25519.cpython-310.pyc,, -cryptography/hazmat/primitives/asymmetric/__pycache__/x448.cpython-310.pyc,, -cryptography/hazmat/primitives/asymmetric/dh.py,sha256=OOCjMClH1Bf14Sy7jAdwzEeCxFPb8XUe2qePbExvXwc,3420 -cryptography/hazmat/primitives/asymmetric/dsa.py,sha256=xBwdf0pZOgvqjUKcO7Q0L3NxwalYj0SJDUqThemhSmI,3945 -cryptography/hazmat/primitives/asymmetric/ec.py,sha256=lwZmtAwi3PM8lsY1MsNaby_bVi--49OCxwE_1yqKC-A,10428 -cryptography/hazmat/primitives/asymmetric/ed25519.py,sha256=kl63fg7myuMjNTmMoVFeH6iVr0x5FkjNmggxIRTloJk,3423 -cryptography/hazmat/primitives/asymmetric/ed448.py,sha256=2UzEDzzfkPn83UFVFlMZfIMbAixxY09WmQyrwinWTn8,3456 -cryptography/hazmat/primitives/asymmetric/padding.py,sha256=eZcvUqVLbe3u48SunLdeniaPlV4-k6pwBl67OW4jSy8,2885 -cryptography/hazmat/primitives/asymmetric/rsa.py,sha256=nW_Ko7PID9UBJF10GVJOc_1L00ymFsfZDUJYtM5kfGQ,7637 -cryptography/hazmat/primitives/asymmetric/types.py,sha256=LnsOJym-wmPUJ7Knu_7bCNU3kIiELCd6krOaW_JU08I,2996 -cryptography/hazmat/primitives/asymmetric/utils.py,sha256=DPTs6T4F-UhwzFQTh-1fSEpQzazH2jf2xpIro3ItF4o,790 -cryptography/hazmat/primitives/asymmetric/x25519.py,sha256=VGYuRdIYuVBtizpFdNWd2bTrT10JRa1admQdBr08xz8,3341 -cryptography/hazmat/primitives/asymmetric/x448.py,sha256=GKKJBqYLr03VewMF18bXIM941aaWcZIQ4rC02GLLEmw,3374 -cryptography/hazmat/primitives/ciphers/__init__.py,sha256=eyEXmjk6_CZXaOPYDr7vAYGXr29QvzgWL2-4CSolLFs,680 -cryptography/hazmat/primitives/ciphers/__pycache__/__init__.cpython-310.pyc,, -cryptography/hazmat/primitives/ciphers/__pycache__/aead.cpython-310.pyc,, -cryptography/hazmat/primitives/ciphers/__pycache__/algorithms.cpython-310.pyc,, -cryptography/hazmat/primitives/ciphers/__pycache__/base.cpython-310.pyc,, -cryptography/hazmat/primitives/ciphers/__pycache__/modes.cpython-310.pyc,, -cryptography/hazmat/primitives/ciphers/aead.py,sha256=Fzlyx7w8KYQakzDp1zWgJnIr62zgZrgVh1u2h4exB54,634 -cryptography/hazmat/primitives/ciphers/algorithms.py,sha256=QvBMDmphRZfNmykij58L5eDkd_2NnCzIpJpyX2QwMxc,4223 -cryptography/hazmat/primitives/ciphers/base.py,sha256=tg-XNaKUyETBi7ounGDEL1_ICn-s4FF9LR7moV58blI,4211 -cryptography/hazmat/primitives/ciphers/modes.py,sha256=BFpxEGSaxoeZjrQ4sqpyPDvKClrqfDKIBv7kYtFURhE,8192 -cryptography/hazmat/primitives/cmac.py,sha256=sz_s6H_cYnOvx-VNWdIKhRhe3Ymp8z8J0D3CBqOX3gg,338 -cryptography/hazmat/primitives/constant_time.py,sha256=xdunWT0nf8OvKdcqUhhlFKayGp4_PgVJRU2W1wLSr_A,422 -cryptography/hazmat/primitives/hashes.py,sha256=EvDIJBhj83Z7f-oHbsA0TzZLFSDV_Yv8hQRdM4o8FD0,5091 -cryptography/hazmat/primitives/hmac.py,sha256=RpB3z9z5skirCQrm7zQbtnp9pLMnAjrlTUvKqF5aDDc,423 -cryptography/hazmat/primitives/kdf/__init__.py,sha256=4XibZnrYq4hh5xBjWiIXzaYW6FKx8hPbVaa_cB9zS64,750 -cryptography/hazmat/primitives/kdf/__pycache__/__init__.cpython-310.pyc,, -cryptography/hazmat/primitives/kdf/__pycache__/concatkdf.cpython-310.pyc,, -cryptography/hazmat/primitives/kdf/__pycache__/hkdf.cpython-310.pyc,, -cryptography/hazmat/primitives/kdf/__pycache__/kbkdf.cpython-310.pyc,, -cryptography/hazmat/primitives/kdf/__pycache__/pbkdf2.cpython-310.pyc,, -cryptography/hazmat/primitives/kdf/__pycache__/scrypt.cpython-310.pyc,, -cryptography/hazmat/primitives/kdf/__pycache__/x963kdf.cpython-310.pyc,, -cryptography/hazmat/primitives/kdf/concatkdf.py,sha256=bcn4NGXse-EsFl7nlU83e5ilop7TSHcX-CJJS107W80,3686 -cryptography/hazmat/primitives/kdf/hkdf.py,sha256=uhN5L87w4JvtAqQcPh_Ji2TPSc18IDThpaYJiHOWy3A,3015 -cryptography/hazmat/primitives/kdf/kbkdf.py,sha256=eSuLK1sATkamgCAit794jLr7sDNlu5X0USdcWhwJdmk,9146 -cryptography/hazmat/primitives/kdf/pbkdf2.py,sha256=Xj3YIeX30h2BUaoJAtOo1RMXV_em0-eCG0PU_0FHJzM,1950 -cryptography/hazmat/primitives/kdf/scrypt.py,sha256=4QONhjxA_ZtuQtQ7QV3FnbB8ftrFnM52B4HPfV7hFys,2354 -cryptography/hazmat/primitives/kdf/x963kdf.py,sha256=wCpWmwQjZ2vAu2rlk3R_PX0nINl8WGXYBmlyMOC5iPw,1992 -cryptography/hazmat/primitives/keywrap.py,sha256=XV4Pj2fqSeD-RqZVvY2cA3j5_7RwJSFygYuLfk2ujCo,5650 -cryptography/hazmat/primitives/padding.py,sha256=QUq0n-EAgEan9aQzuTsiJYGKbWiK1nSHkcYjDF1L1ok,5518 -cryptography/hazmat/primitives/poly1305.py,sha256=P5EPQV-RB_FJPahpg01u0Ts4S_PnAmsroxIGXbGeRRo,355 -cryptography/hazmat/primitives/serialization/__init__.py,sha256=jyNx_7NcOEbVRBY4nP9ks0IVXBafbcYnTK27vafPLW8,1653 -cryptography/hazmat/primitives/serialization/__pycache__/__init__.cpython-310.pyc,, -cryptography/hazmat/primitives/serialization/__pycache__/base.cpython-310.pyc,, -cryptography/hazmat/primitives/serialization/__pycache__/pkcs12.cpython-310.pyc,, -cryptography/hazmat/primitives/serialization/__pycache__/pkcs7.cpython-310.pyc,, -cryptography/hazmat/primitives/serialization/__pycache__/ssh.cpython-310.pyc,, -cryptography/hazmat/primitives/serialization/base.py,sha256=ikq5MJIwp_oUnjiaBco_PmQwOTYuGi-XkYUYHKy8Vo0,615 -cryptography/hazmat/primitives/serialization/pkcs12.py,sha256=7vVXbiP7qhhvKAHJT_M8-LBZdbpOwrpWRHWxNrNqzXE,4492 -cryptography/hazmat/primitives/serialization/pkcs7.py,sha256=CNzcsuDMyEFMe3EUii4NfJlQzmakB2hLlfRFYObnHRs,11141 -cryptography/hazmat/primitives/serialization/ssh.py,sha256=VKscMrVdYK5B9PQISjjdRMglRvqa_L3sDNm5vdjVHJY,51915 -cryptography/hazmat/primitives/twofactor/__init__.py,sha256=tmMZGB-g4IU1r7lIFqASU019zr0uPp_wEBYcwdDCKCA,258 -cryptography/hazmat/primitives/twofactor/__pycache__/__init__.cpython-310.pyc,, -cryptography/hazmat/primitives/twofactor/__pycache__/hotp.cpython-310.pyc,, -cryptography/hazmat/primitives/twofactor/__pycache__/totp.cpython-310.pyc,, -cryptography/hazmat/primitives/twofactor/hotp.py,sha256=l1YdRMIhfPIuHKkA66keBDHhNbnBAlh6-O44P-OHIK8,2976 -cryptography/hazmat/primitives/twofactor/totp.py,sha256=v0y0xKwtYrP83ypOo5Ofd441RJLOkaFfjmp554jo5F0,1450 -cryptography/py.typed,sha256=47DEQpj8HBSa-_TImW-5JCeuQeRkm5NMpJWZG3hSuFU,0 -cryptography/utils.py,sha256=Rp7ppg4XIBVVzNQ6XngGndwkICJoYp6FoFOOgTWLJ7g,3925 -cryptography/x509/__init__.py,sha256=uGdiViR7KFnWGoJFVUStt-e_ufomWc87RQBGAZ7dT-4,7980 -cryptography/x509/__pycache__/__init__.cpython-310.pyc,, -cryptography/x509/__pycache__/base.cpython-310.pyc,, -cryptography/x509/__pycache__/certificate_transparency.cpython-310.pyc,, -cryptography/x509/__pycache__/extensions.cpython-310.pyc,, -cryptography/x509/__pycache__/general_name.cpython-310.pyc,, -cryptography/x509/__pycache__/name.cpython-310.pyc,, -cryptography/x509/__pycache__/ocsp.cpython-310.pyc,, -cryptography/x509/__pycache__/oid.cpython-310.pyc,, -cryptography/x509/__pycache__/verification.cpython-310.pyc,, -cryptography/x509/base.py,sha256=3NbbUn9wPruhmoPO7Cl3trc3SrqV2OFIBBE0P2l05mg,37081 -cryptography/x509/certificate_transparency.py,sha256=6HvzAD0dlSQVxy6tnDhGj0-pisp1MaJ9bxQNRr92inI,2261 -cryptography/x509/extensions.py,sha256=R70KkJ_c5NQ6Kx7Rho0sGJ0Rh-bOuBHjVOFSQGRAFCs,67370 -cryptography/x509/general_name.py,sha256=sP_rV11Qlpsk4x3XXGJY_Mv0Q_s9dtjeLckHsjpLQoQ,7836 -cryptography/x509/name.py,sha256=MYCxCSTQTpzhjxFPZaANqJ9fGrhESH73vPkoay8HSWM,14830 -cryptography/x509/ocsp.py,sha256=P6A02msz5pe-IkUFpvxezHvnEHGvPdXiD3S0wsuf4-I,20003 -cryptography/x509/oid.py,sha256=X8EbhkRTLrGuv9vHZSGqPd9zpvRVsonU_joWAL5LLY8,885 -cryptography/x509/verification.py,sha256=alfx3VaTSb2bMz7_7s788oL90vzgHwBjVINssdz0Gv0,796 diff --git a/env/lib/python3.10/site-packages/cryptography-43.0.3.dist-info/WHEEL b/env/lib/python3.10/site-packages/cryptography-43.0.3.dist-info/WHEEL deleted file mode 100644 index b3e268a..0000000 --- a/env/lib/python3.10/site-packages/cryptography-43.0.3.dist-info/WHEEL +++ /dev/null @@ -1,5 +0,0 @@ -Wheel-Version: 1.0 -Generator: maturin (1.7.0) -Root-Is-Purelib: false -Tag: cp39-abi3-manylinux_2_28_x86_64 - diff --git a/env/lib/python3.10/site-packages/cryptography-43.0.3.dist-info/license_files/LICENSE b/env/lib/python3.10/site-packages/cryptography-43.0.3.dist-info/license_files/LICENSE deleted file mode 100644 index b11f379..0000000 --- a/env/lib/python3.10/site-packages/cryptography-43.0.3.dist-info/license_files/LICENSE +++ /dev/null @@ -1,3 +0,0 @@ -This software is made available under the terms of *either* of the licenses -found in LICENSE.APACHE or LICENSE.BSD. Contributions to cryptography are made -under the terms of *both* these licenses. diff --git a/env/lib/python3.10/site-packages/cryptography-43.0.3.dist-info/license_files/LICENSE.APACHE b/env/lib/python3.10/site-packages/cryptography-43.0.3.dist-info/license_files/LICENSE.APACHE deleted file mode 100644 index 62589ed..0000000 --- a/env/lib/python3.10/site-packages/cryptography-43.0.3.dist-info/license_files/LICENSE.APACHE +++ /dev/null @@ -1,202 +0,0 @@ - - Apache License - Version 2.0, January 2004 - https://www.apache.org/licenses/ - - TERMS AND CONDITIONS FOR USE, REPRODUCTION, AND DISTRIBUTION - - 1. Definitions. - - "License" shall mean the terms and conditions for use, reproduction, - and distribution as defined by Sections 1 through 9 of this document. - - "Licensor" shall mean the copyright owner or entity authorized by - the copyright owner that is granting the License. - - "Legal Entity" shall mean the union of the acting entity and all - other entities that control, are controlled by, or are under common - control with that entity. For the purposes of this definition, - "control" means (i) the power, direct or indirect, to cause the - direction or management of such entity, whether by contract or - otherwise, or (ii) ownership of fifty percent (50%) or more of the - outstanding shares, or (iii) beneficial ownership of such entity. - - "You" (or "Your") shall mean an individual or Legal Entity - exercising permissions granted by this License. - - "Source" form shall mean the preferred form for making modifications, - including but not limited to software source code, documentation - source, and configuration files. - - "Object" form shall mean any form resulting from mechanical - transformation or translation of a Source form, including but - not limited to compiled object code, generated documentation, - and conversions to other media types. - - "Work" shall mean the work of authorship, whether in Source or - Object form, made available under the License, as indicated by a - copyright notice that is included in or attached to the work - (an example is provided in the Appendix below). - - "Derivative Works" shall mean any work, whether in Source or Object - form, that is based on (or derived from) the Work and for which the - editorial revisions, annotations, elaborations, or other modifications - represent, as a whole, an original work of authorship. For the purposes - of this License, Derivative Works shall not include works that remain - separable from, or merely link (or bind by name) to the interfaces of, - the Work and Derivative Works thereof. - - "Contribution" shall mean any work of authorship, including - the original version of the Work and any modifications or additions - to that Work or Derivative Works thereof, that is intentionally - submitted to Licensor for inclusion in the Work by the copyright owner - or by an individual or Legal Entity authorized to submit on behalf of - the copyright owner. For the purposes of this definition, "submitted" - means any form of electronic, verbal, or written communication sent - to the Licensor or its representatives, including but not limited to - communication on electronic mailing lists, source code control systems, - and issue tracking systems that are managed by, or on behalf of, the - Licensor for the purpose of discussing and improving the Work, but - excluding communication that is conspicuously marked or otherwise - designated in writing by the copyright owner as "Not a Contribution." - - "Contributor" shall mean Licensor and any individual or Legal Entity - on behalf of whom a Contribution has been received by Licensor and - subsequently incorporated within the Work. - - 2. Grant of Copyright License. Subject to the terms and conditions of - this License, each Contributor hereby grants to You a perpetual, - worldwide, non-exclusive, no-charge, royalty-free, irrevocable - copyright license to reproduce, prepare Derivative Works of, - publicly display, publicly perform, sublicense, and distribute the - Work and such Derivative Works in Source or Object form. - - 3. Grant of Patent License. Subject to the terms and conditions of - this License, each Contributor hereby grants to You a perpetual, - worldwide, non-exclusive, no-charge, royalty-free, irrevocable - (except as stated in this section) patent license to make, have made, - use, offer to sell, sell, import, and otherwise transfer the Work, - where such license applies only to those patent claims licensable - by such Contributor that are necessarily infringed by their - Contribution(s) alone or by combination of their Contribution(s) - with the Work to which such Contribution(s) was submitted. If You - institute patent litigation against any entity (including a - cross-claim or counterclaim in a lawsuit) alleging that the Work - or a Contribution incorporated within the Work constitutes direct - or contributory patent infringement, then any patent licenses - granted to You under this License for that Work shall terminate - as of the date such litigation is filed. - - 4. Redistribution. You may reproduce and distribute copies of the - Work or Derivative Works thereof in any medium, with or without - modifications, and in Source or Object form, provided that You - meet the following conditions: - - (a) You must give any other recipients of the Work or - Derivative Works a copy of this License; and - - (b) You must cause any modified files to carry prominent notices - stating that You changed the files; and - - (c) You must retain, in the Source form of any Derivative Works - that You distribute, all copyright, patent, trademark, and - attribution notices from the Source form of the Work, - excluding those notices that do not pertain to any part of - the Derivative Works; and - - (d) If the Work includes a "NOTICE" text file as part of its - distribution, then any Derivative Works that You distribute must - include a readable copy of the attribution notices contained - within such NOTICE file, excluding those notices that do not - pertain to any part of the Derivative Works, in at least one - of the following places: within a NOTICE text file distributed - as part of the Derivative Works; within the Source form or - documentation, if provided along with the Derivative Works; or, - within a display generated by the Derivative Works, if and - wherever such third-party notices normally appear. The contents - of the NOTICE file are for informational purposes only and - do not modify the License. You may add Your own attribution - notices within Derivative Works that You distribute, alongside - or as an addendum to the NOTICE text from the Work, provided - that such additional attribution notices cannot be construed - as modifying the License. - - You may add Your own copyright statement to Your modifications and - may provide additional or different license terms and conditions - for use, reproduction, or distribution of Your modifications, or - for any such Derivative Works as a whole, provided Your use, - reproduction, and distribution of the Work otherwise complies with - the conditions stated in this License. - - 5. Submission of Contributions. Unless You explicitly state otherwise, - any Contribution intentionally submitted for inclusion in the Work - by You to the Licensor shall be under the terms and conditions of - this License, without any additional terms or conditions. - Notwithstanding the above, nothing herein shall supersede or modify - the terms of any separate license agreement you may have executed - with Licensor regarding such Contributions. - - 6. Trademarks. This License does not grant permission to use the trade - names, trademarks, service marks, or product names of the Licensor, - except as required for reasonable and customary use in describing the - origin of the Work and reproducing the content of the NOTICE file. - - 7. Disclaimer of Warranty. Unless required by applicable law or - agreed to in writing, Licensor provides the Work (and each - Contributor provides its Contributions) on an "AS IS" BASIS, - WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or - implied, including, without limitation, any warranties or conditions - of TITLE, NON-INFRINGEMENT, MERCHANTABILITY, or FITNESS FOR A - PARTICULAR PURPOSE. You are solely responsible for determining the - appropriateness of using or redistributing the Work and assume any - risks associated with Your exercise of permissions under this License. - - 8. Limitation of Liability. In no event and under no legal theory, - whether in tort (including negligence), contract, or otherwise, - unless required by applicable law (such as deliberate and grossly - negligent acts) or agreed to in writing, shall any Contributor be - liable to You for damages, including any direct, indirect, special, - incidental, or consequential damages of any character arising as a - result of this License or out of the use or inability to use the - Work (including but not limited to damages for loss of goodwill, - work stoppage, computer failure or malfunction, or any and all - other commercial damages or losses), even if such Contributor - has been advised of the possibility of such damages. - - 9. Accepting Warranty or Additional Liability. While redistributing - the Work or Derivative Works thereof, You may choose to offer, - and charge a fee for, acceptance of support, warranty, indemnity, - or other liability obligations and/or rights consistent with this - License. However, in accepting such obligations, You may act only - on Your own behalf and on Your sole responsibility, not on behalf - of any other Contributor, and only if You agree to indemnify, - defend, and hold each Contributor harmless for any liability - incurred by, or claims asserted against, such Contributor by reason - of your accepting any such warranty or additional liability. - - END OF TERMS AND CONDITIONS - - APPENDIX: How to apply the Apache License to your work. - - To apply the Apache License to your work, attach the following - boilerplate notice, with the fields enclosed by brackets "[]" - replaced with your own identifying information. (Don't include - the brackets!) The text should be enclosed in the appropriate - comment syntax for the file format. We also recommend that a - file or class name and description of purpose be included on the - same "printed page" as the copyright notice for easier - identification within third-party archives. - - Copyright [yyyy] [name of copyright owner] - - Licensed under the Apache License, Version 2.0 (the "License"); - you may not use this file except in compliance with the License. - You may obtain a copy of the License at - - https://www.apache.org/licenses/LICENSE-2.0 - - Unless required by applicable law or agreed to in writing, software - distributed under the License is distributed on an "AS IS" BASIS, - WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. - See the License for the specific language governing permissions and - limitations under the License. diff --git a/env/lib/python3.10/site-packages/cryptography-43.0.3.dist-info/license_files/LICENSE.BSD b/env/lib/python3.10/site-packages/cryptography-43.0.3.dist-info/license_files/LICENSE.BSD deleted file mode 100644 index ec1a29d..0000000 --- a/env/lib/python3.10/site-packages/cryptography-43.0.3.dist-info/license_files/LICENSE.BSD +++ /dev/null @@ -1,27 +0,0 @@ -Copyright (c) Individual contributors. -All rights reserved. - -Redistribution and use in source and binary forms, with or without -modification, are permitted provided that the following conditions are met: - - 1. Redistributions of source code must retain the above copyright notice, - this list of conditions and the following disclaimer. - - 2. Redistributions in binary form must reproduce the above copyright - notice, this list of conditions and the following disclaimer in the - documentation and/or other materials provided with the distribution. - - 3. Neither the name of PyCA Cryptography nor the names of its contributors - may be used to endorse or promote products derived from this software - without specific prior written permission. - -THIS SOFTWARE IS PROVIDED BY THE COPYRIGHT HOLDERS AND CONTRIBUTORS "AS IS" AND -ANY EXPRESS OR IMPLIED WARRANTIES, INCLUDING, BUT NOT LIMITED TO, THE IMPLIED -WARRANTIES OF MERCHANTABILITY AND FITNESS FOR A PARTICULAR PURPOSE ARE -DISCLAIMED. IN NO EVENT SHALL THE COPYRIGHT OWNER OR CONTRIBUTORS BE LIABLE FOR -ANY DIRECT, INDIRECT, INCIDENTAL, SPECIAL, EXEMPLARY, OR CONSEQUENTIAL DAMAGES -(INCLUDING, BUT NOT LIMITED TO, PROCUREMENT OF SUBSTITUTE GOODS OR SERVICES; -LOSS OF USE, DATA, OR PROFITS; OR BUSINESS INTERRUPTION) HOWEVER CAUSED AND ON -ANY THEORY OF LIABILITY, WHETHER IN CONTRACT, STRICT LIABILITY, OR TORT -(INCLUDING NEGLIGENCE OR OTHERWISE) ARISING IN ANY WAY OUT OF THE USE OF THIS -SOFTWARE, EVEN IF ADVISED OF THE POSSIBILITY OF SUCH DAMAGE. diff --git a/env/lib/python3.10/site-packages/cryptography/__about__.py b/env/lib/python3.10/site-packages/cryptography/__about__.py deleted file mode 100644 index 375ffbe..0000000 --- a/env/lib/python3.10/site-packages/cryptography/__about__.py +++ /dev/null @@ -1,17 +0,0 @@ -# This file is dual licensed under the terms of the Apache License, Version -# 2.0, and the BSD License. See the LICENSE file in the root of this repository -# for complete details. - -from __future__ import annotations - -__all__ = [ - "__author__", - "__copyright__", - "__version__", -] - -__version__ = "43.0.3" - - -__author__ = "The Python Cryptographic Authority and individual contributors" -__copyright__ = f"Copyright 2013-2024 {__author__}" diff --git a/env/lib/python3.10/site-packages/cryptography/__init__.py b/env/lib/python3.10/site-packages/cryptography/__init__.py deleted file mode 100644 index d374f75..0000000 --- a/env/lib/python3.10/site-packages/cryptography/__init__.py +++ /dev/null @@ -1,13 +0,0 @@ -# This file is dual licensed under the terms of the Apache License, Version -# 2.0, and the BSD License. See the LICENSE file in the root of this repository -# for complete details. - -from __future__ import annotations - -from cryptography.__about__ import __author__, __copyright__, __version__ - -__all__ = [ - "__author__", - "__copyright__", - "__version__", -] diff --git a/env/lib/python3.10/site-packages/cryptography/__pycache__/__about__.cpython-310.pyc b/env/lib/python3.10/site-packages/cryptography/__pycache__/__about__.cpython-310.pyc deleted file mode 100644 index 3015b89..0000000 Binary files a/env/lib/python3.10/site-packages/cryptography/__pycache__/__about__.cpython-310.pyc and /dev/null differ diff --git a/env/lib/python3.10/site-packages/cryptography/__pycache__/__init__.cpython-310.pyc b/env/lib/python3.10/site-packages/cryptography/__pycache__/__init__.cpython-310.pyc deleted file mode 100644 index 0f396b7..0000000 Binary files a/env/lib/python3.10/site-packages/cryptography/__pycache__/__init__.cpython-310.pyc and /dev/null differ diff --git a/env/lib/python3.10/site-packages/cryptography/__pycache__/exceptions.cpython-310.pyc b/env/lib/python3.10/site-packages/cryptography/__pycache__/exceptions.cpython-310.pyc deleted file mode 100644 index fa30ed5..0000000 Binary files a/env/lib/python3.10/site-packages/cryptography/__pycache__/exceptions.cpython-310.pyc and /dev/null differ diff --git a/env/lib/python3.10/site-packages/cryptography/__pycache__/fernet.cpython-310.pyc b/env/lib/python3.10/site-packages/cryptography/__pycache__/fernet.cpython-310.pyc deleted file mode 100644 index 21e0085..0000000 Binary files a/env/lib/python3.10/site-packages/cryptography/__pycache__/fernet.cpython-310.pyc and /dev/null differ diff --git a/env/lib/python3.10/site-packages/cryptography/__pycache__/utils.cpython-310.pyc b/env/lib/python3.10/site-packages/cryptography/__pycache__/utils.cpython-310.pyc deleted file mode 100644 index 8068120..0000000 Binary files a/env/lib/python3.10/site-packages/cryptography/__pycache__/utils.cpython-310.pyc and /dev/null differ diff --git a/env/lib/python3.10/site-packages/cryptography/exceptions.py b/env/lib/python3.10/site-packages/cryptography/exceptions.py deleted file mode 100644 index fe125ea..0000000 --- a/env/lib/python3.10/site-packages/cryptography/exceptions.py +++ /dev/null @@ -1,52 +0,0 @@ -# This file is dual licensed under the terms of the Apache License, Version -# 2.0, and the BSD License. See the LICENSE file in the root of this repository -# for complete details. - -from __future__ import annotations - -import typing - -from cryptography.hazmat.bindings._rust import exceptions as rust_exceptions - -if typing.TYPE_CHECKING: - from cryptography.hazmat.bindings._rust import openssl as rust_openssl - -_Reasons = rust_exceptions._Reasons - - -class UnsupportedAlgorithm(Exception): - def __init__(self, message: str, reason: _Reasons | None = None) -> None: - super().__init__(message) - self._reason = reason - - -class AlreadyFinalized(Exception): - pass - - -class AlreadyUpdated(Exception): - pass - - -class NotYetFinalized(Exception): - pass - - -class InvalidTag(Exception): - pass - - -class InvalidSignature(Exception): - pass - - -class InternalError(Exception): - def __init__( - self, msg: str, err_code: list[rust_openssl.OpenSSLError] - ) -> None: - super().__init__(msg) - self.err_code = err_code - - -class InvalidKey(Exception): - pass diff --git a/env/lib/python3.10/site-packages/cryptography/fernet.py b/env/lib/python3.10/site-packages/cryptography/fernet.py deleted file mode 100644 index 35ce113..0000000 --- a/env/lib/python3.10/site-packages/cryptography/fernet.py +++ /dev/null @@ -1,215 +0,0 @@ -# This file is dual licensed under the terms of the Apache License, Version -# 2.0, and the BSD License. See the LICENSE file in the root of this repository -# for complete details. - -from __future__ import annotations - -import base64 -import binascii -import os -import time -import typing - -from cryptography import utils -from cryptography.exceptions import InvalidSignature -from cryptography.hazmat.primitives import hashes, padding -from cryptography.hazmat.primitives.ciphers import Cipher, algorithms, modes -from cryptography.hazmat.primitives.hmac import HMAC - - -class InvalidToken(Exception): - pass - - -_MAX_CLOCK_SKEW = 60 - - -class Fernet: - def __init__( - self, - key: bytes | str, - backend: typing.Any = None, - ) -> None: - try: - key = base64.urlsafe_b64decode(key) - except binascii.Error as exc: - raise ValueError( - "Fernet key must be 32 url-safe base64-encoded bytes." - ) from exc - if len(key) != 32: - raise ValueError( - "Fernet key must be 32 url-safe base64-encoded bytes." - ) - - self._signing_key = key[:16] - self._encryption_key = key[16:] - - @classmethod - def generate_key(cls) -> bytes: - return base64.urlsafe_b64encode(os.urandom(32)) - - def encrypt(self, data: bytes) -> bytes: - return self.encrypt_at_time(data, int(time.time())) - - def encrypt_at_time(self, data: bytes, current_time: int) -> bytes: - iv = os.urandom(16) - return self._encrypt_from_parts(data, current_time, iv) - - def _encrypt_from_parts( - self, data: bytes, current_time: int, iv: bytes - ) -> bytes: - utils._check_bytes("data", data) - - padder = padding.PKCS7(algorithms.AES.block_size).padder() - padded_data = padder.update(data) + padder.finalize() - encryptor = Cipher( - algorithms.AES(self._encryption_key), - modes.CBC(iv), - ).encryptor() - ciphertext = encryptor.update(padded_data) + encryptor.finalize() - - basic_parts = ( - b"\x80" - + current_time.to_bytes(length=8, byteorder="big") - + iv - + ciphertext - ) - - h = HMAC(self._signing_key, hashes.SHA256()) - h.update(basic_parts) - hmac = h.finalize() - return base64.urlsafe_b64encode(basic_parts + hmac) - - def decrypt(self, token: bytes | str, ttl: int | None = None) -> bytes: - timestamp, data = Fernet._get_unverified_token_data(token) - if ttl is None: - time_info = None - else: - time_info = (ttl, int(time.time())) - return self._decrypt_data(data, timestamp, time_info) - - def decrypt_at_time( - self, token: bytes | str, ttl: int, current_time: int - ) -> bytes: - if ttl is None: - raise ValueError( - "decrypt_at_time() can only be used with a non-None ttl" - ) - timestamp, data = Fernet._get_unverified_token_data(token) - return self._decrypt_data(data, timestamp, (ttl, current_time)) - - def extract_timestamp(self, token: bytes | str) -> int: - timestamp, data = Fernet._get_unverified_token_data(token) - # Verify the token was not tampered with. - self._verify_signature(data) - return timestamp - - @staticmethod - def _get_unverified_token_data(token: bytes | str) -> tuple[int, bytes]: - if not isinstance(token, (str, bytes)): - raise TypeError("token must be bytes or str") - - try: - data = base64.urlsafe_b64decode(token) - except (TypeError, binascii.Error): - raise InvalidToken - - if not data or data[0] != 0x80: - raise InvalidToken - - if len(data) < 9: - raise InvalidToken - - timestamp = int.from_bytes(data[1:9], byteorder="big") - return timestamp, data - - def _verify_signature(self, data: bytes) -> None: - h = HMAC(self._signing_key, hashes.SHA256()) - h.update(data[:-32]) - try: - h.verify(data[-32:]) - except InvalidSignature: - raise InvalidToken - - def _decrypt_data( - self, - data: bytes, - timestamp: int, - time_info: tuple[int, int] | None, - ) -> bytes: - if time_info is not None: - ttl, current_time = time_info - if timestamp + ttl < current_time: - raise InvalidToken - - if current_time + _MAX_CLOCK_SKEW < timestamp: - raise InvalidToken - - self._verify_signature(data) - - iv = data[9:25] - ciphertext = data[25:-32] - decryptor = Cipher( - algorithms.AES(self._encryption_key), modes.CBC(iv) - ).decryptor() - plaintext_padded = decryptor.update(ciphertext) - try: - plaintext_padded += decryptor.finalize() - except ValueError: - raise InvalidToken - unpadder = padding.PKCS7(algorithms.AES.block_size).unpadder() - - unpadded = unpadder.update(plaintext_padded) - try: - unpadded += unpadder.finalize() - except ValueError: - raise InvalidToken - return unpadded - - -class MultiFernet: - def __init__(self, fernets: typing.Iterable[Fernet]): - fernets = list(fernets) - if not fernets: - raise ValueError( - "MultiFernet requires at least one Fernet instance" - ) - self._fernets = fernets - - def encrypt(self, msg: bytes) -> bytes: - return self.encrypt_at_time(msg, int(time.time())) - - def encrypt_at_time(self, msg: bytes, current_time: int) -> bytes: - return self._fernets[0].encrypt_at_time(msg, current_time) - - def rotate(self, msg: bytes | str) -> bytes: - timestamp, data = Fernet._get_unverified_token_data(msg) - for f in self._fernets: - try: - p = f._decrypt_data(data, timestamp, None) - break - except InvalidToken: - pass - else: - raise InvalidToken - - iv = os.urandom(16) - return self._fernets[0]._encrypt_from_parts(p, timestamp, iv) - - def decrypt(self, msg: bytes | str, ttl: int | None = None) -> bytes: - for f in self._fernets: - try: - return f.decrypt(msg, ttl) - except InvalidToken: - pass - raise InvalidToken - - def decrypt_at_time( - self, msg: bytes | str, ttl: int, current_time: int - ) -> bytes: - for f in self._fernets: - try: - return f.decrypt_at_time(msg, ttl, current_time) - except InvalidToken: - pass - raise InvalidToken diff --git a/env/lib/python3.10/site-packages/cryptography/hazmat/__init__.py b/env/lib/python3.10/site-packages/cryptography/hazmat/__init__.py deleted file mode 100644 index b9f1187..0000000 --- a/env/lib/python3.10/site-packages/cryptography/hazmat/__init__.py +++ /dev/null @@ -1,13 +0,0 @@ -# This file is dual licensed under the terms of the Apache License, Version -# 2.0, and the BSD License. See the LICENSE file in the root of this repository -# for complete details. - -from __future__ import annotations - -""" -Hazardous Materials - -This is a "Hazardous Materials" module. You should ONLY use it if you're -100% absolutely sure that you know what you're doing because this module -is full of land mines, dragons, and dinosaurs with laser guns. -""" diff --git a/env/lib/python3.10/site-packages/cryptography/hazmat/__pycache__/__init__.cpython-310.pyc b/env/lib/python3.10/site-packages/cryptography/hazmat/__pycache__/__init__.cpython-310.pyc deleted file mode 100644 index 8412f41..0000000 Binary files a/env/lib/python3.10/site-packages/cryptography/hazmat/__pycache__/__init__.cpython-310.pyc and /dev/null differ diff --git a/env/lib/python3.10/site-packages/cryptography/hazmat/__pycache__/_oid.cpython-310.pyc b/env/lib/python3.10/site-packages/cryptography/hazmat/__pycache__/_oid.cpython-310.pyc deleted file mode 100644 index 9cb1544..0000000 Binary files a/env/lib/python3.10/site-packages/cryptography/hazmat/__pycache__/_oid.cpython-310.pyc and /dev/null differ diff --git a/env/lib/python3.10/site-packages/cryptography/hazmat/_oid.py b/env/lib/python3.10/site-packages/cryptography/hazmat/_oid.py deleted file mode 100644 index fd5e37d..0000000 --- a/env/lib/python3.10/site-packages/cryptography/hazmat/_oid.py +++ /dev/null @@ -1,313 +0,0 @@ -# This file is dual licensed under the terms of the Apache License, Version -# 2.0, and the BSD License. See the LICENSE file in the root of this repository -# for complete details. - -from __future__ import annotations - -from cryptography.hazmat.bindings._rust import ( - ObjectIdentifier as ObjectIdentifier, -) -from cryptography.hazmat.primitives import hashes - - -class ExtensionOID: - SUBJECT_DIRECTORY_ATTRIBUTES = ObjectIdentifier("2.5.29.9") - SUBJECT_KEY_IDENTIFIER = ObjectIdentifier("2.5.29.14") - KEY_USAGE = ObjectIdentifier("2.5.29.15") - SUBJECT_ALTERNATIVE_NAME = ObjectIdentifier("2.5.29.17") - ISSUER_ALTERNATIVE_NAME = ObjectIdentifier("2.5.29.18") - BASIC_CONSTRAINTS = ObjectIdentifier("2.5.29.19") - NAME_CONSTRAINTS = ObjectIdentifier("2.5.29.30") - CRL_DISTRIBUTION_POINTS = ObjectIdentifier("2.5.29.31") - CERTIFICATE_POLICIES = ObjectIdentifier("2.5.29.32") - POLICY_MAPPINGS = ObjectIdentifier("2.5.29.33") - AUTHORITY_KEY_IDENTIFIER = ObjectIdentifier("2.5.29.35") - POLICY_CONSTRAINTS = ObjectIdentifier("2.5.29.36") - EXTENDED_KEY_USAGE = ObjectIdentifier("2.5.29.37") - FRESHEST_CRL = ObjectIdentifier("2.5.29.46") - INHIBIT_ANY_POLICY = ObjectIdentifier("2.5.29.54") - ISSUING_DISTRIBUTION_POINT = ObjectIdentifier("2.5.29.28") - AUTHORITY_INFORMATION_ACCESS = ObjectIdentifier("1.3.6.1.5.5.7.1.1") - SUBJECT_INFORMATION_ACCESS = ObjectIdentifier("1.3.6.1.5.5.7.1.11") - OCSP_NO_CHECK = ObjectIdentifier("1.3.6.1.5.5.7.48.1.5") - TLS_FEATURE = ObjectIdentifier("1.3.6.1.5.5.7.1.24") - CRL_NUMBER = ObjectIdentifier("2.5.29.20") - DELTA_CRL_INDICATOR = ObjectIdentifier("2.5.29.27") - PRECERT_SIGNED_CERTIFICATE_TIMESTAMPS = ObjectIdentifier( - "1.3.6.1.4.1.11129.2.4.2" - ) - PRECERT_POISON = ObjectIdentifier("1.3.6.1.4.1.11129.2.4.3") - SIGNED_CERTIFICATE_TIMESTAMPS = ObjectIdentifier("1.3.6.1.4.1.11129.2.4.5") - MS_CERTIFICATE_TEMPLATE = ObjectIdentifier("1.3.6.1.4.1.311.21.7") - - -class OCSPExtensionOID: - NONCE = ObjectIdentifier("1.3.6.1.5.5.7.48.1.2") - ACCEPTABLE_RESPONSES = ObjectIdentifier("1.3.6.1.5.5.7.48.1.4") - - -class CRLEntryExtensionOID: - CERTIFICATE_ISSUER = ObjectIdentifier("2.5.29.29") - CRL_REASON = ObjectIdentifier("2.5.29.21") - INVALIDITY_DATE = ObjectIdentifier("2.5.29.24") - - -class NameOID: - COMMON_NAME = ObjectIdentifier("2.5.4.3") - COUNTRY_NAME = ObjectIdentifier("2.5.4.6") - LOCALITY_NAME = ObjectIdentifier("2.5.4.7") - STATE_OR_PROVINCE_NAME = ObjectIdentifier("2.5.4.8") - STREET_ADDRESS = ObjectIdentifier("2.5.4.9") - ORGANIZATION_IDENTIFIER = ObjectIdentifier("2.5.4.97") - ORGANIZATION_NAME = ObjectIdentifier("2.5.4.10") - ORGANIZATIONAL_UNIT_NAME = ObjectIdentifier("2.5.4.11") - SERIAL_NUMBER = ObjectIdentifier("2.5.4.5") - SURNAME = ObjectIdentifier("2.5.4.4") - GIVEN_NAME = ObjectIdentifier("2.5.4.42") - TITLE = ObjectIdentifier("2.5.4.12") - INITIALS = ObjectIdentifier("2.5.4.43") - GENERATION_QUALIFIER = ObjectIdentifier("2.5.4.44") - X500_UNIQUE_IDENTIFIER = ObjectIdentifier("2.5.4.45") - DN_QUALIFIER = ObjectIdentifier("2.5.4.46") - PSEUDONYM = ObjectIdentifier("2.5.4.65") - USER_ID = ObjectIdentifier("0.9.2342.19200300.100.1.1") - DOMAIN_COMPONENT = ObjectIdentifier("0.9.2342.19200300.100.1.25") - EMAIL_ADDRESS = ObjectIdentifier("1.2.840.113549.1.9.1") - JURISDICTION_COUNTRY_NAME = ObjectIdentifier("1.3.6.1.4.1.311.60.2.1.3") - JURISDICTION_LOCALITY_NAME = ObjectIdentifier("1.3.6.1.4.1.311.60.2.1.1") - JURISDICTION_STATE_OR_PROVINCE_NAME = ObjectIdentifier( - "1.3.6.1.4.1.311.60.2.1.2" - ) - BUSINESS_CATEGORY = ObjectIdentifier("2.5.4.15") - POSTAL_ADDRESS = ObjectIdentifier("2.5.4.16") - POSTAL_CODE = ObjectIdentifier("2.5.4.17") - INN = ObjectIdentifier("1.2.643.3.131.1.1") - OGRN = ObjectIdentifier("1.2.643.100.1") - SNILS = ObjectIdentifier("1.2.643.100.3") - UNSTRUCTURED_NAME = ObjectIdentifier("1.2.840.113549.1.9.2") - - -class SignatureAlgorithmOID: - RSA_WITH_MD5 = ObjectIdentifier("1.2.840.113549.1.1.4") - RSA_WITH_SHA1 = ObjectIdentifier("1.2.840.113549.1.1.5") - # This is an alternate OID for RSA with SHA1 that is occasionally seen - _RSA_WITH_SHA1 = ObjectIdentifier("1.3.14.3.2.29") - RSA_WITH_SHA224 = ObjectIdentifier("1.2.840.113549.1.1.14") - RSA_WITH_SHA256 = ObjectIdentifier("1.2.840.113549.1.1.11") - RSA_WITH_SHA384 = ObjectIdentifier("1.2.840.113549.1.1.12") - RSA_WITH_SHA512 = ObjectIdentifier("1.2.840.113549.1.1.13") - RSA_WITH_SHA3_224 = ObjectIdentifier("2.16.840.1.101.3.4.3.13") - RSA_WITH_SHA3_256 = ObjectIdentifier("2.16.840.1.101.3.4.3.14") - RSA_WITH_SHA3_384 = ObjectIdentifier("2.16.840.1.101.3.4.3.15") - RSA_WITH_SHA3_512 = ObjectIdentifier("2.16.840.1.101.3.4.3.16") - RSASSA_PSS = ObjectIdentifier("1.2.840.113549.1.1.10") - ECDSA_WITH_SHA1 = ObjectIdentifier("1.2.840.10045.4.1") - ECDSA_WITH_SHA224 = ObjectIdentifier("1.2.840.10045.4.3.1") - ECDSA_WITH_SHA256 = ObjectIdentifier("1.2.840.10045.4.3.2") - ECDSA_WITH_SHA384 = ObjectIdentifier("1.2.840.10045.4.3.3") - ECDSA_WITH_SHA512 = ObjectIdentifier("1.2.840.10045.4.3.4") - ECDSA_WITH_SHA3_224 = ObjectIdentifier("2.16.840.1.101.3.4.3.9") - ECDSA_WITH_SHA3_256 = ObjectIdentifier("2.16.840.1.101.3.4.3.10") - ECDSA_WITH_SHA3_384 = ObjectIdentifier("2.16.840.1.101.3.4.3.11") - ECDSA_WITH_SHA3_512 = ObjectIdentifier("2.16.840.1.101.3.4.3.12") - DSA_WITH_SHA1 = ObjectIdentifier("1.2.840.10040.4.3") - DSA_WITH_SHA224 = ObjectIdentifier("2.16.840.1.101.3.4.3.1") - DSA_WITH_SHA256 = ObjectIdentifier("2.16.840.1.101.3.4.3.2") - DSA_WITH_SHA384 = ObjectIdentifier("2.16.840.1.101.3.4.3.3") - DSA_WITH_SHA512 = ObjectIdentifier("2.16.840.1.101.3.4.3.4") - ED25519 = ObjectIdentifier("1.3.101.112") - ED448 = ObjectIdentifier("1.3.101.113") - GOSTR3411_94_WITH_3410_2001 = ObjectIdentifier("1.2.643.2.2.3") - GOSTR3410_2012_WITH_3411_2012_256 = ObjectIdentifier("1.2.643.7.1.1.3.2") - GOSTR3410_2012_WITH_3411_2012_512 = ObjectIdentifier("1.2.643.7.1.1.3.3") - - -_SIG_OIDS_TO_HASH: dict[ObjectIdentifier, hashes.HashAlgorithm | None] = { - SignatureAlgorithmOID.RSA_WITH_MD5: hashes.MD5(), - SignatureAlgorithmOID.RSA_WITH_SHA1: hashes.SHA1(), - SignatureAlgorithmOID._RSA_WITH_SHA1: hashes.SHA1(), - SignatureAlgorithmOID.RSA_WITH_SHA224: hashes.SHA224(), - SignatureAlgorithmOID.RSA_WITH_SHA256: hashes.SHA256(), - SignatureAlgorithmOID.RSA_WITH_SHA384: hashes.SHA384(), - SignatureAlgorithmOID.RSA_WITH_SHA512: hashes.SHA512(), - SignatureAlgorithmOID.RSA_WITH_SHA3_224: hashes.SHA3_224(), - SignatureAlgorithmOID.RSA_WITH_SHA3_256: hashes.SHA3_256(), - SignatureAlgorithmOID.RSA_WITH_SHA3_384: hashes.SHA3_384(), - SignatureAlgorithmOID.RSA_WITH_SHA3_512: hashes.SHA3_512(), - SignatureAlgorithmOID.ECDSA_WITH_SHA1: hashes.SHA1(), - SignatureAlgorithmOID.ECDSA_WITH_SHA224: hashes.SHA224(), - SignatureAlgorithmOID.ECDSA_WITH_SHA256: hashes.SHA256(), - SignatureAlgorithmOID.ECDSA_WITH_SHA384: hashes.SHA384(), - SignatureAlgorithmOID.ECDSA_WITH_SHA512: hashes.SHA512(), - SignatureAlgorithmOID.ECDSA_WITH_SHA3_224: hashes.SHA3_224(), - SignatureAlgorithmOID.ECDSA_WITH_SHA3_256: hashes.SHA3_256(), - SignatureAlgorithmOID.ECDSA_WITH_SHA3_384: hashes.SHA3_384(), - SignatureAlgorithmOID.ECDSA_WITH_SHA3_512: hashes.SHA3_512(), - SignatureAlgorithmOID.DSA_WITH_SHA1: hashes.SHA1(), - SignatureAlgorithmOID.DSA_WITH_SHA224: hashes.SHA224(), - SignatureAlgorithmOID.DSA_WITH_SHA256: hashes.SHA256(), - SignatureAlgorithmOID.ED25519: None, - SignatureAlgorithmOID.ED448: None, - SignatureAlgorithmOID.GOSTR3411_94_WITH_3410_2001: None, - SignatureAlgorithmOID.GOSTR3410_2012_WITH_3411_2012_256: None, - SignatureAlgorithmOID.GOSTR3410_2012_WITH_3411_2012_512: None, -} - - -class PublicKeyAlgorithmOID: - DSA = ObjectIdentifier("1.2.840.10040.4.1") - EC_PUBLIC_KEY = ObjectIdentifier("1.2.840.10045.2.1") - RSAES_PKCS1_v1_5 = ObjectIdentifier("1.2.840.113549.1.1.1") - RSASSA_PSS = ObjectIdentifier("1.2.840.113549.1.1.10") - X25519 = ObjectIdentifier("1.3.101.110") - X448 = ObjectIdentifier("1.3.101.111") - ED25519 = ObjectIdentifier("1.3.101.112") - ED448 = ObjectIdentifier("1.3.101.113") - - -class ExtendedKeyUsageOID: - SERVER_AUTH = ObjectIdentifier("1.3.6.1.5.5.7.3.1") - CLIENT_AUTH = ObjectIdentifier("1.3.6.1.5.5.7.3.2") - CODE_SIGNING = ObjectIdentifier("1.3.6.1.5.5.7.3.3") - EMAIL_PROTECTION = ObjectIdentifier("1.3.6.1.5.5.7.3.4") - TIME_STAMPING = ObjectIdentifier("1.3.6.1.5.5.7.3.8") - OCSP_SIGNING = ObjectIdentifier("1.3.6.1.5.5.7.3.9") - ANY_EXTENDED_KEY_USAGE = ObjectIdentifier("2.5.29.37.0") - SMARTCARD_LOGON = ObjectIdentifier("1.3.6.1.4.1.311.20.2.2") - KERBEROS_PKINIT_KDC = ObjectIdentifier("1.3.6.1.5.2.3.5") - IPSEC_IKE = ObjectIdentifier("1.3.6.1.5.5.7.3.17") - CERTIFICATE_TRANSPARENCY = ObjectIdentifier("1.3.6.1.4.1.11129.2.4.4") - - -class AuthorityInformationAccessOID: - CA_ISSUERS = ObjectIdentifier("1.3.6.1.5.5.7.48.2") - OCSP = ObjectIdentifier("1.3.6.1.5.5.7.48.1") - - -class SubjectInformationAccessOID: - CA_REPOSITORY = ObjectIdentifier("1.3.6.1.5.5.7.48.5") - - -class CertificatePoliciesOID: - CPS_QUALIFIER = ObjectIdentifier("1.3.6.1.5.5.7.2.1") - CPS_USER_NOTICE = ObjectIdentifier("1.3.6.1.5.5.7.2.2") - ANY_POLICY = ObjectIdentifier("2.5.29.32.0") - - -class AttributeOID: - CHALLENGE_PASSWORD = ObjectIdentifier("1.2.840.113549.1.9.7") - UNSTRUCTURED_NAME = ObjectIdentifier("1.2.840.113549.1.9.2") - - -_OID_NAMES = { - NameOID.COMMON_NAME: "commonName", - NameOID.COUNTRY_NAME: "countryName", - NameOID.LOCALITY_NAME: "localityName", - NameOID.STATE_OR_PROVINCE_NAME: "stateOrProvinceName", - NameOID.STREET_ADDRESS: "streetAddress", - NameOID.ORGANIZATION_NAME: "organizationName", - NameOID.ORGANIZATIONAL_UNIT_NAME: "organizationalUnitName", - NameOID.SERIAL_NUMBER: "serialNumber", - NameOID.SURNAME: "surname", - NameOID.GIVEN_NAME: "givenName", - NameOID.TITLE: "title", - NameOID.GENERATION_QUALIFIER: "generationQualifier", - NameOID.X500_UNIQUE_IDENTIFIER: "x500UniqueIdentifier", - NameOID.DN_QUALIFIER: "dnQualifier", - NameOID.PSEUDONYM: "pseudonym", - NameOID.USER_ID: "userID", - NameOID.DOMAIN_COMPONENT: "domainComponent", - NameOID.EMAIL_ADDRESS: "emailAddress", - NameOID.JURISDICTION_COUNTRY_NAME: "jurisdictionCountryName", - NameOID.JURISDICTION_LOCALITY_NAME: "jurisdictionLocalityName", - NameOID.JURISDICTION_STATE_OR_PROVINCE_NAME: ( - "jurisdictionStateOrProvinceName" - ), - NameOID.BUSINESS_CATEGORY: "businessCategory", - NameOID.POSTAL_ADDRESS: "postalAddress", - NameOID.POSTAL_CODE: "postalCode", - NameOID.INN: "INN", - NameOID.OGRN: "OGRN", - NameOID.SNILS: "SNILS", - NameOID.UNSTRUCTURED_NAME: "unstructuredName", - SignatureAlgorithmOID.RSA_WITH_MD5: "md5WithRSAEncryption", - SignatureAlgorithmOID.RSA_WITH_SHA1: "sha1WithRSAEncryption", - SignatureAlgorithmOID.RSA_WITH_SHA224: "sha224WithRSAEncryption", - SignatureAlgorithmOID.RSA_WITH_SHA256: "sha256WithRSAEncryption", - SignatureAlgorithmOID.RSA_WITH_SHA384: "sha384WithRSAEncryption", - SignatureAlgorithmOID.RSA_WITH_SHA512: "sha512WithRSAEncryption", - SignatureAlgorithmOID.RSASSA_PSS: "RSASSA-PSS", - SignatureAlgorithmOID.ECDSA_WITH_SHA1: "ecdsa-with-SHA1", - SignatureAlgorithmOID.ECDSA_WITH_SHA224: "ecdsa-with-SHA224", - SignatureAlgorithmOID.ECDSA_WITH_SHA256: "ecdsa-with-SHA256", - SignatureAlgorithmOID.ECDSA_WITH_SHA384: "ecdsa-with-SHA384", - SignatureAlgorithmOID.ECDSA_WITH_SHA512: "ecdsa-with-SHA512", - SignatureAlgorithmOID.DSA_WITH_SHA1: "dsa-with-sha1", - SignatureAlgorithmOID.DSA_WITH_SHA224: "dsa-with-sha224", - SignatureAlgorithmOID.DSA_WITH_SHA256: "dsa-with-sha256", - SignatureAlgorithmOID.ED25519: "ed25519", - SignatureAlgorithmOID.ED448: "ed448", - SignatureAlgorithmOID.GOSTR3411_94_WITH_3410_2001: ( - "GOST R 34.11-94 with GOST R 34.10-2001" - ), - SignatureAlgorithmOID.GOSTR3410_2012_WITH_3411_2012_256: ( - "GOST R 34.10-2012 with GOST R 34.11-2012 (256 bit)" - ), - SignatureAlgorithmOID.GOSTR3410_2012_WITH_3411_2012_512: ( - "GOST R 34.10-2012 with GOST R 34.11-2012 (512 bit)" - ), - PublicKeyAlgorithmOID.DSA: "dsaEncryption", - PublicKeyAlgorithmOID.EC_PUBLIC_KEY: "id-ecPublicKey", - PublicKeyAlgorithmOID.RSAES_PKCS1_v1_5: "rsaEncryption", - PublicKeyAlgorithmOID.RSASSA_PSS: "rsassaPss", - PublicKeyAlgorithmOID.X25519: "X25519", - PublicKeyAlgorithmOID.X448: "X448", - ExtendedKeyUsageOID.SERVER_AUTH: "serverAuth", - ExtendedKeyUsageOID.CLIENT_AUTH: "clientAuth", - ExtendedKeyUsageOID.CODE_SIGNING: "codeSigning", - ExtendedKeyUsageOID.EMAIL_PROTECTION: "emailProtection", - ExtendedKeyUsageOID.TIME_STAMPING: "timeStamping", - ExtendedKeyUsageOID.OCSP_SIGNING: "OCSPSigning", - ExtendedKeyUsageOID.SMARTCARD_LOGON: "msSmartcardLogin", - ExtendedKeyUsageOID.KERBEROS_PKINIT_KDC: "pkInitKDC", - ExtensionOID.SUBJECT_DIRECTORY_ATTRIBUTES: "subjectDirectoryAttributes", - ExtensionOID.SUBJECT_KEY_IDENTIFIER: "subjectKeyIdentifier", - ExtensionOID.KEY_USAGE: "keyUsage", - ExtensionOID.SUBJECT_ALTERNATIVE_NAME: "subjectAltName", - ExtensionOID.ISSUER_ALTERNATIVE_NAME: "issuerAltName", - ExtensionOID.BASIC_CONSTRAINTS: "basicConstraints", - ExtensionOID.PRECERT_SIGNED_CERTIFICATE_TIMESTAMPS: ( - "signedCertificateTimestampList" - ), - ExtensionOID.SIGNED_CERTIFICATE_TIMESTAMPS: ( - "signedCertificateTimestampList" - ), - ExtensionOID.PRECERT_POISON: "ctPoison", - ExtensionOID.MS_CERTIFICATE_TEMPLATE: "msCertificateTemplate", - CRLEntryExtensionOID.CRL_REASON: "cRLReason", - CRLEntryExtensionOID.INVALIDITY_DATE: "invalidityDate", - CRLEntryExtensionOID.CERTIFICATE_ISSUER: "certificateIssuer", - ExtensionOID.NAME_CONSTRAINTS: "nameConstraints", - ExtensionOID.CRL_DISTRIBUTION_POINTS: "cRLDistributionPoints", - ExtensionOID.CERTIFICATE_POLICIES: "certificatePolicies", - ExtensionOID.POLICY_MAPPINGS: "policyMappings", - ExtensionOID.AUTHORITY_KEY_IDENTIFIER: "authorityKeyIdentifier", - ExtensionOID.POLICY_CONSTRAINTS: "policyConstraints", - ExtensionOID.EXTENDED_KEY_USAGE: "extendedKeyUsage", - ExtensionOID.FRESHEST_CRL: "freshestCRL", - ExtensionOID.INHIBIT_ANY_POLICY: "inhibitAnyPolicy", - ExtensionOID.ISSUING_DISTRIBUTION_POINT: "issuingDistributionPoint", - ExtensionOID.AUTHORITY_INFORMATION_ACCESS: "authorityInfoAccess", - ExtensionOID.SUBJECT_INFORMATION_ACCESS: "subjectInfoAccess", - ExtensionOID.OCSP_NO_CHECK: "OCSPNoCheck", - ExtensionOID.CRL_NUMBER: "cRLNumber", - ExtensionOID.DELTA_CRL_INDICATOR: "deltaCRLIndicator", - ExtensionOID.TLS_FEATURE: "TLSFeature", - AuthorityInformationAccessOID.OCSP: "OCSP", - AuthorityInformationAccessOID.CA_ISSUERS: "caIssuers", - SubjectInformationAccessOID.CA_REPOSITORY: "caRepository", - CertificatePoliciesOID.CPS_QUALIFIER: "id-qt-cps", - CertificatePoliciesOID.CPS_USER_NOTICE: "id-qt-unotice", - OCSPExtensionOID.NONCE: "OCSPNonce", - AttributeOID.CHALLENGE_PASSWORD: "challengePassword", -} diff --git a/env/lib/python3.10/site-packages/cryptography/hazmat/backends/__init__.py b/env/lib/python3.10/site-packages/cryptography/hazmat/backends/__init__.py deleted file mode 100644 index b4400aa..0000000 --- a/env/lib/python3.10/site-packages/cryptography/hazmat/backends/__init__.py +++ /dev/null @@ -1,13 +0,0 @@ -# This file is dual licensed under the terms of the Apache License, Version -# 2.0, and the BSD License. See the LICENSE file in the root of this repository -# for complete details. - -from __future__ import annotations - -from typing import Any - - -def default_backend() -> Any: - from cryptography.hazmat.backends.openssl.backend import backend - - return backend diff --git a/env/lib/python3.10/site-packages/cryptography/hazmat/backends/__pycache__/__init__.cpython-310.pyc b/env/lib/python3.10/site-packages/cryptography/hazmat/backends/__pycache__/__init__.cpython-310.pyc deleted file mode 100644 index 07ed9e0..0000000 Binary files a/env/lib/python3.10/site-packages/cryptography/hazmat/backends/__pycache__/__init__.cpython-310.pyc and /dev/null differ diff --git a/env/lib/python3.10/site-packages/cryptography/hazmat/backends/openssl/__init__.py b/env/lib/python3.10/site-packages/cryptography/hazmat/backends/openssl/__init__.py deleted file mode 100644 index 51b0447..0000000 --- a/env/lib/python3.10/site-packages/cryptography/hazmat/backends/openssl/__init__.py +++ /dev/null @@ -1,9 +0,0 @@ -# This file is dual licensed under the terms of the Apache License, Version -# 2.0, and the BSD License. See the LICENSE file in the root of this repository -# for complete details. - -from __future__ import annotations - -from cryptography.hazmat.backends.openssl.backend import backend - -__all__ = ["backend"] diff --git a/env/lib/python3.10/site-packages/cryptography/hazmat/backends/openssl/__pycache__/__init__.cpython-310.pyc b/env/lib/python3.10/site-packages/cryptography/hazmat/backends/openssl/__pycache__/__init__.cpython-310.pyc deleted file mode 100644 index c38c883..0000000 Binary files a/env/lib/python3.10/site-packages/cryptography/hazmat/backends/openssl/__pycache__/__init__.cpython-310.pyc and /dev/null differ diff --git a/env/lib/python3.10/site-packages/cryptography/hazmat/backends/openssl/__pycache__/backend.cpython-310.pyc b/env/lib/python3.10/site-packages/cryptography/hazmat/backends/openssl/__pycache__/backend.cpython-310.pyc deleted file mode 100644 index 2071158..0000000 Binary files a/env/lib/python3.10/site-packages/cryptography/hazmat/backends/openssl/__pycache__/backend.cpython-310.pyc and /dev/null differ diff --git a/env/lib/python3.10/site-packages/cryptography/hazmat/backends/openssl/backend.py b/env/lib/python3.10/site-packages/cryptography/hazmat/backends/openssl/backend.py deleted file mode 100644 index c87d3e8..0000000 --- a/env/lib/python3.10/site-packages/cryptography/hazmat/backends/openssl/backend.py +++ /dev/null @@ -1,291 +0,0 @@ -# This file is dual licensed under the terms of the Apache License, Version -# 2.0, and the BSD License. See the LICENSE file in the root of this repository -# for complete details. - -from __future__ import annotations - -from cryptography.hazmat.bindings._rust import openssl as rust_openssl -from cryptography.hazmat.bindings.openssl import binding -from cryptography.hazmat.primitives import hashes -from cryptography.hazmat.primitives._asymmetric import AsymmetricPadding -from cryptography.hazmat.primitives.asymmetric import ec -from cryptography.hazmat.primitives.asymmetric import utils as asym_utils -from cryptography.hazmat.primitives.asymmetric.padding import ( - MGF1, - OAEP, - PSS, - PKCS1v15, -) -from cryptography.hazmat.primitives.ciphers import ( - CipherAlgorithm, -) -from cryptography.hazmat.primitives.ciphers.algorithms import ( - AES, -) -from cryptography.hazmat.primitives.ciphers.modes import ( - CBC, - Mode, -) - - -class Backend: - """ - OpenSSL API binding interfaces. - """ - - name = "openssl" - - # TripleDES encryption is disallowed/deprecated throughout 2023 in - # FIPS 140-3. To keep it simple we denylist any use of TripleDES (TDEA). - _fips_ciphers = (AES,) - # Sometimes SHA1 is still permissible. That logic is contained - # within the various *_supported methods. - _fips_hashes = ( - hashes.SHA224, - hashes.SHA256, - hashes.SHA384, - hashes.SHA512, - hashes.SHA512_224, - hashes.SHA512_256, - hashes.SHA3_224, - hashes.SHA3_256, - hashes.SHA3_384, - hashes.SHA3_512, - hashes.SHAKE128, - hashes.SHAKE256, - ) - _fips_ecdh_curves = ( - ec.SECP224R1, - ec.SECP256R1, - ec.SECP384R1, - ec.SECP521R1, - ) - _fips_rsa_min_key_size = 2048 - _fips_rsa_min_public_exponent = 65537 - _fips_dsa_min_modulus = 1 << 2048 - _fips_dh_min_key_size = 2048 - _fips_dh_min_modulus = 1 << _fips_dh_min_key_size - - def __init__(self) -> None: - self._binding = binding.Binding() - self._ffi = self._binding.ffi - self._lib = self._binding.lib - self._fips_enabled = rust_openssl.is_fips_enabled() - - def __repr__(self) -> str: - return ( - f"" - ) - - def openssl_assert(self, ok: bool) -> None: - return binding._openssl_assert(ok) - - def _enable_fips(self) -> None: - # This function enables FIPS mode for OpenSSL 3.0.0 on installs that - # have the FIPS provider installed properly. - rust_openssl.enable_fips(rust_openssl._providers) - assert rust_openssl.is_fips_enabled() - self._fips_enabled = rust_openssl.is_fips_enabled() - - def openssl_version_text(self) -> str: - """ - Friendly string name of the loaded OpenSSL library. This is not - necessarily the same version as it was compiled against. - - Example: OpenSSL 3.2.1 30 Jan 2024 - """ - return rust_openssl.openssl_version_text() - - def openssl_version_number(self) -> int: - return rust_openssl.openssl_version() - - def _evp_md_from_algorithm(self, algorithm: hashes.HashAlgorithm): - if algorithm.name in ("blake2b", "blake2s"): - alg = f"{algorithm.name}{algorithm.digest_size * 8}".encode( - "ascii" - ) - else: - alg = algorithm.name.encode("ascii") - - evp_md = self._lib.EVP_get_digestbyname(alg) - return evp_md - - def hash_supported(self, algorithm: hashes.HashAlgorithm) -> bool: - if self._fips_enabled and not isinstance(algorithm, self._fips_hashes): - return False - - evp_md = self._evp_md_from_algorithm(algorithm) - return evp_md != self._ffi.NULL - - def signature_hash_supported( - self, algorithm: hashes.HashAlgorithm - ) -> bool: - # Dedicated check for hashing algorithm use in message digest for - # signatures, e.g. RSA PKCS#1 v1.5 SHA1 (sha1WithRSAEncryption). - if self._fips_enabled and isinstance(algorithm, hashes.SHA1): - return False - return self.hash_supported(algorithm) - - def scrypt_supported(self) -> bool: - if self._fips_enabled: - return False - else: - return hasattr(rust_openssl.kdf, "derive_scrypt") - - def hmac_supported(self, algorithm: hashes.HashAlgorithm) -> bool: - # FIPS mode still allows SHA1 for HMAC - if self._fips_enabled and isinstance(algorithm, hashes.SHA1): - return True - - return self.hash_supported(algorithm) - - def cipher_supported(self, cipher: CipherAlgorithm, mode: Mode) -> bool: - if self._fips_enabled: - # FIPS mode requires AES. TripleDES is disallowed/deprecated in - # FIPS 140-3. - if not isinstance(cipher, self._fips_ciphers): - return False - - return rust_openssl.ciphers.cipher_supported(cipher, mode) - - def pbkdf2_hmac_supported(self, algorithm: hashes.HashAlgorithm) -> bool: - return self.hmac_supported(algorithm) - - def _consume_errors(self) -> list[rust_openssl.OpenSSLError]: - return rust_openssl.capture_error_stack() - - def _oaep_hash_supported(self, algorithm: hashes.HashAlgorithm) -> bool: - if self._fips_enabled and isinstance(algorithm, hashes.SHA1): - return False - - return isinstance( - algorithm, - ( - hashes.SHA1, - hashes.SHA224, - hashes.SHA256, - hashes.SHA384, - hashes.SHA512, - ), - ) - - def rsa_padding_supported(self, padding: AsymmetricPadding) -> bool: - if isinstance(padding, PKCS1v15): - return True - elif isinstance(padding, PSS) and isinstance(padding._mgf, MGF1): - # SHA1 is permissible in MGF1 in FIPS even when SHA1 is blocked - # as signature algorithm. - if self._fips_enabled and isinstance( - padding._mgf._algorithm, hashes.SHA1 - ): - return True - else: - return self.hash_supported(padding._mgf._algorithm) - elif isinstance(padding, OAEP) and isinstance(padding._mgf, MGF1): - return self._oaep_hash_supported( - padding._mgf._algorithm - ) and self._oaep_hash_supported(padding._algorithm) - else: - return False - - def rsa_encryption_supported(self, padding: AsymmetricPadding) -> bool: - if self._fips_enabled and isinstance(padding, PKCS1v15): - return False - else: - return self.rsa_padding_supported(padding) - - def dsa_supported(self) -> bool: - return ( - not rust_openssl.CRYPTOGRAPHY_IS_BORINGSSL - and not self._fips_enabled - ) - - def dsa_hash_supported(self, algorithm: hashes.HashAlgorithm) -> bool: - if not self.dsa_supported(): - return False - return self.signature_hash_supported(algorithm) - - def cmac_algorithm_supported(self, algorithm) -> bool: - return self.cipher_supported( - algorithm, CBC(b"\x00" * algorithm.block_size) - ) - - def elliptic_curve_supported(self, curve: ec.EllipticCurve) -> bool: - if self._fips_enabled and not isinstance( - curve, self._fips_ecdh_curves - ): - return False - - return rust_openssl.ec.curve_supported(curve) - - def elliptic_curve_signature_algorithm_supported( - self, - signature_algorithm: ec.EllipticCurveSignatureAlgorithm, - curve: ec.EllipticCurve, - ) -> bool: - # We only support ECDSA right now. - if not isinstance(signature_algorithm, ec.ECDSA): - return False - - return self.elliptic_curve_supported(curve) and ( - isinstance(signature_algorithm.algorithm, asym_utils.Prehashed) - or self.hash_supported(signature_algorithm.algorithm) - ) - - def elliptic_curve_exchange_algorithm_supported( - self, algorithm: ec.ECDH, curve: ec.EllipticCurve - ) -> bool: - return self.elliptic_curve_supported(curve) and isinstance( - algorithm, ec.ECDH - ) - - def dh_supported(self) -> bool: - return not rust_openssl.CRYPTOGRAPHY_IS_BORINGSSL - - def dh_x942_serialization_supported(self) -> bool: - return self._lib.Cryptography_HAS_EVP_PKEY_DHX == 1 - - def x25519_supported(self) -> bool: - if self._fips_enabled: - return False - return True - - def x448_supported(self) -> bool: - if self._fips_enabled: - return False - return ( - not rust_openssl.CRYPTOGRAPHY_IS_LIBRESSL - and not rust_openssl.CRYPTOGRAPHY_IS_BORINGSSL - ) - - def ed25519_supported(self) -> bool: - if self._fips_enabled: - return False - return True - - def ed448_supported(self) -> bool: - if self._fips_enabled: - return False - return ( - not rust_openssl.CRYPTOGRAPHY_IS_LIBRESSL - and not rust_openssl.CRYPTOGRAPHY_IS_BORINGSSL - ) - - def ecdsa_deterministic_supported(self) -> bool: - return ( - rust_openssl.CRYPTOGRAPHY_OPENSSL_320_OR_GREATER - and not self._fips_enabled - ) - - def poly1305_supported(self) -> bool: - if self._fips_enabled: - return False - return True - - def pkcs7_supported(self) -> bool: - return not rust_openssl.CRYPTOGRAPHY_IS_BORINGSSL - - -backend = Backend() diff --git a/env/lib/python3.10/site-packages/cryptography/hazmat/bindings/__init__.py b/env/lib/python3.10/site-packages/cryptography/hazmat/bindings/__init__.py deleted file mode 100644 index b509336..0000000 --- a/env/lib/python3.10/site-packages/cryptography/hazmat/bindings/__init__.py +++ /dev/null @@ -1,3 +0,0 @@ -# This file is dual licensed under the terms of the Apache License, Version -# 2.0, and the BSD License. See the LICENSE file in the root of this repository -# for complete details. diff --git a/env/lib/python3.10/site-packages/cryptography/hazmat/bindings/__pycache__/__init__.cpython-310.pyc b/env/lib/python3.10/site-packages/cryptography/hazmat/bindings/__pycache__/__init__.cpython-310.pyc deleted file mode 100644 index b4aae34..0000000 Binary files a/env/lib/python3.10/site-packages/cryptography/hazmat/bindings/__pycache__/__init__.cpython-310.pyc and /dev/null differ diff --git a/env/lib/python3.10/site-packages/cryptography/hazmat/bindings/_rust.abi3.so b/env/lib/python3.10/site-packages/cryptography/hazmat/bindings/_rust.abi3.so deleted file mode 100755 index 7530c41..0000000 Binary files a/env/lib/python3.10/site-packages/cryptography/hazmat/bindings/_rust.abi3.so and /dev/null differ diff --git a/env/lib/python3.10/site-packages/cryptography/hazmat/bindings/_rust/__init__.pyi b/env/lib/python3.10/site-packages/cryptography/hazmat/bindings/_rust/__init__.pyi deleted file mode 100644 index c0ea0a5..0000000 --- a/env/lib/python3.10/site-packages/cryptography/hazmat/bindings/_rust/__init__.pyi +++ /dev/null @@ -1,24 +0,0 @@ -# This file is dual licensed under the terms of the Apache License, Version -# 2.0, and the BSD License. See the LICENSE file in the root of this repository -# for complete details. - -import typing - -from cryptography.hazmat.primitives import padding - -def check_pkcs7_padding(data: bytes) -> bool: ... -def check_ansix923_padding(data: bytes) -> bool: ... - -class PKCS7PaddingContext(padding.PaddingContext): - def __init__(self, block_size: int) -> None: ... - def update(self, data: bytes) -> bytes: ... - def finalize(self) -> bytes: ... - -class ObjectIdentifier: - def __init__(self, val: str) -> None: ... - @property - def dotted_string(self) -> str: ... - @property - def _name(self) -> str: ... - -T = typing.TypeVar("T") diff --git a/env/lib/python3.10/site-packages/cryptography/hazmat/bindings/_rust/_openssl.pyi b/env/lib/python3.10/site-packages/cryptography/hazmat/bindings/_rust/_openssl.pyi deleted file mode 100644 index 8010008..0000000 --- a/env/lib/python3.10/site-packages/cryptography/hazmat/bindings/_rust/_openssl.pyi +++ /dev/null @@ -1,8 +0,0 @@ -# This file is dual licensed under the terms of the Apache License, Version -# 2.0, and the BSD License. See the LICENSE file in the root of this repository -# for complete details. - -import typing - -lib = typing.Any -ffi = typing.Any diff --git a/env/lib/python3.10/site-packages/cryptography/hazmat/bindings/_rust/asn1.pyi b/env/lib/python3.10/site-packages/cryptography/hazmat/bindings/_rust/asn1.pyi deleted file mode 100644 index 3b5f208..0000000 --- a/env/lib/python3.10/site-packages/cryptography/hazmat/bindings/_rust/asn1.pyi +++ /dev/null @@ -1,7 +0,0 @@ -# This file is dual licensed under the terms of the Apache License, Version -# 2.0, and the BSD License. See the LICENSE file in the root of this repository -# for complete details. - -def decode_dss_signature(signature: bytes) -> tuple[int, int]: ... -def encode_dss_signature(r: int, s: int) -> bytes: ... -def parse_spki_for_data(data: bytes) -> bytes: ... diff --git a/env/lib/python3.10/site-packages/cryptography/hazmat/bindings/_rust/exceptions.pyi b/env/lib/python3.10/site-packages/cryptography/hazmat/bindings/_rust/exceptions.pyi deleted file mode 100644 index 09f46b1..0000000 --- a/env/lib/python3.10/site-packages/cryptography/hazmat/bindings/_rust/exceptions.pyi +++ /dev/null @@ -1,17 +0,0 @@ -# This file is dual licensed under the terms of the Apache License, Version -# 2.0, and the BSD License. See the LICENSE file in the root of this repository -# for complete details. - -class _Reasons: - BACKEND_MISSING_INTERFACE: _Reasons - UNSUPPORTED_HASH: _Reasons - UNSUPPORTED_CIPHER: _Reasons - UNSUPPORTED_PADDING: _Reasons - UNSUPPORTED_MGF: _Reasons - UNSUPPORTED_PUBLIC_KEY_ALGORITHM: _Reasons - UNSUPPORTED_ELLIPTIC_CURVE: _Reasons - UNSUPPORTED_SERIALIZATION: _Reasons - UNSUPPORTED_X509: _Reasons - UNSUPPORTED_EXCHANGE_ALGORITHM: _Reasons - UNSUPPORTED_DIFFIE_HELLMAN: _Reasons - UNSUPPORTED_MAC: _Reasons diff --git a/env/lib/python3.10/site-packages/cryptography/hazmat/bindings/_rust/ocsp.pyi b/env/lib/python3.10/site-packages/cryptography/hazmat/bindings/_rust/ocsp.pyi deleted file mode 100644 index 5e02145..0000000 --- a/env/lib/python3.10/site-packages/cryptography/hazmat/bindings/_rust/ocsp.pyi +++ /dev/null @@ -1,23 +0,0 @@ -# This file is dual licensed under the terms of the Apache License, Version -# 2.0, and the BSD License. See the LICENSE file in the root of this repository -# for complete details. - -from cryptography.hazmat.primitives import hashes -from cryptography.hazmat.primitives.asymmetric.types import PrivateKeyTypes -from cryptography.x509 import ocsp - -class OCSPRequest: ... -class OCSPResponse: ... -class OCSPSingleResponse: ... - -def load_der_ocsp_request(data: bytes) -> ocsp.OCSPRequest: ... -def load_der_ocsp_response(data: bytes) -> ocsp.OCSPResponse: ... -def create_ocsp_request( - builder: ocsp.OCSPRequestBuilder, -) -> ocsp.OCSPRequest: ... -def create_ocsp_response( - status: ocsp.OCSPResponseStatus, - builder: ocsp.OCSPResponseBuilder | None, - private_key: PrivateKeyTypes | None, - hash_algorithm: hashes.HashAlgorithm | None, -) -> ocsp.OCSPResponse: ... diff --git a/env/lib/python3.10/site-packages/cryptography/hazmat/bindings/_rust/openssl/__init__.pyi b/env/lib/python3.10/site-packages/cryptography/hazmat/bindings/_rust/openssl/__init__.pyi deleted file mode 100644 index 1e66d33..0000000 --- a/env/lib/python3.10/site-packages/cryptography/hazmat/bindings/_rust/openssl/__init__.pyi +++ /dev/null @@ -1,71 +0,0 @@ -# This file is dual licensed under the terms of the Apache License, Version -# 2.0, and the BSD License. See the LICENSE file in the root of this repository -# for complete details. - -import typing - -from cryptography.hazmat.bindings._rust.openssl import ( - aead, - ciphers, - cmac, - dh, - dsa, - ec, - ed448, - ed25519, - hashes, - hmac, - kdf, - keys, - poly1305, - rsa, - x448, - x25519, -) - -__all__ = [ - "aead", - "ciphers", - "cmac", - "dh", - "dsa", - "ec", - "ed448", - "ed25519", - "hashes", - "hmac", - "kdf", - "keys", - "openssl_version", - "openssl_version_text", - "poly1305", - "raise_openssl_error", - "rsa", - "x448", - "x25519", -] - -CRYPTOGRAPHY_IS_LIBRESSL: bool -CRYPTOGRAPHY_IS_BORINGSSL: bool -CRYPTOGRAPHY_OPENSSL_300_OR_GREATER: bool -CRYPTOGRAPHY_OPENSSL_320_OR_GREATER: bool - -class Providers: ... - -_legacy_provider_loaded: bool -_providers: Providers - -def openssl_version() -> int: ... -def openssl_version_text() -> str: ... -def raise_openssl_error() -> typing.NoReturn: ... -def capture_error_stack() -> list[OpenSSLError]: ... -def is_fips_enabled() -> bool: ... -def enable_fips(providers: Providers) -> None: ... - -class OpenSSLError: - @property - def lib(self) -> int: ... - @property - def reason(self) -> int: ... - @property - def reason_text(self) -> bytes: ... diff --git a/env/lib/python3.10/site-packages/cryptography/hazmat/bindings/_rust/openssl/aead.pyi b/env/lib/python3.10/site-packages/cryptography/hazmat/bindings/_rust/openssl/aead.pyi deleted file mode 100644 index 047f49d..0000000 --- a/env/lib/python3.10/site-packages/cryptography/hazmat/bindings/_rust/openssl/aead.pyi +++ /dev/null @@ -1,103 +0,0 @@ -# This file is dual licensed under the terms of the Apache License, Version -# 2.0, and the BSD License. See the LICENSE file in the root of this repository -# for complete details. - -class AESGCM: - def __init__(self, key: bytes) -> None: ... - @staticmethod - def generate_key(key_size: int) -> bytes: ... - def encrypt( - self, - nonce: bytes, - data: bytes, - associated_data: bytes | None, - ) -> bytes: ... - def decrypt( - self, - nonce: bytes, - data: bytes, - associated_data: bytes | None, - ) -> bytes: ... - -class ChaCha20Poly1305: - def __init__(self, key: bytes) -> None: ... - @staticmethod - def generate_key() -> bytes: ... - def encrypt( - self, - nonce: bytes, - data: bytes, - associated_data: bytes | None, - ) -> bytes: ... - def decrypt( - self, - nonce: bytes, - data: bytes, - associated_data: bytes | None, - ) -> bytes: ... - -class AESCCM: - def __init__(self, key: bytes, tag_length: int = 16) -> None: ... - @staticmethod - def generate_key(key_size: int) -> bytes: ... - def encrypt( - self, - nonce: bytes, - data: bytes, - associated_data: bytes | None, - ) -> bytes: ... - def decrypt( - self, - nonce: bytes, - data: bytes, - associated_data: bytes | None, - ) -> bytes: ... - -class AESSIV: - def __init__(self, key: bytes) -> None: ... - @staticmethod - def generate_key(key_size: int) -> bytes: ... - def encrypt( - self, - data: bytes, - associated_data: list[bytes] | None, - ) -> bytes: ... - def decrypt( - self, - data: bytes, - associated_data: list[bytes] | None, - ) -> bytes: ... - -class AESOCB3: - def __init__(self, key: bytes) -> None: ... - @staticmethod - def generate_key(key_size: int) -> bytes: ... - def encrypt( - self, - nonce: bytes, - data: bytes, - associated_data: bytes | None, - ) -> bytes: ... - def decrypt( - self, - nonce: bytes, - data: bytes, - associated_data: bytes | None, - ) -> bytes: ... - -class AESGCMSIV: - def __init__(self, key: bytes) -> None: ... - @staticmethod - def generate_key(key_size: int) -> bytes: ... - def encrypt( - self, - nonce: bytes, - data: bytes, - associated_data: bytes | None, - ) -> bytes: ... - def decrypt( - self, - nonce: bytes, - data: bytes, - associated_data: bytes | None, - ) -> bytes: ... diff --git a/env/lib/python3.10/site-packages/cryptography/hazmat/bindings/_rust/openssl/ciphers.pyi b/env/lib/python3.10/site-packages/cryptography/hazmat/bindings/_rust/openssl/ciphers.pyi deleted file mode 100644 index 759f3b5..0000000 --- a/env/lib/python3.10/site-packages/cryptography/hazmat/bindings/_rust/openssl/ciphers.pyi +++ /dev/null @@ -1,38 +0,0 @@ -# This file is dual licensed under the terms of the Apache License, Version -# 2.0, and the BSD License. See the LICENSE file in the root of this repository -# for complete details. - -import typing - -from cryptography.hazmat.primitives import ciphers -from cryptography.hazmat.primitives.ciphers import modes - -@typing.overload -def create_encryption_ctx( - algorithm: ciphers.CipherAlgorithm, mode: modes.ModeWithAuthenticationTag -) -> ciphers.AEADEncryptionContext: ... -@typing.overload -def create_encryption_ctx( - algorithm: ciphers.CipherAlgorithm, mode: modes.Mode -) -> ciphers.CipherContext: ... -@typing.overload -def create_decryption_ctx( - algorithm: ciphers.CipherAlgorithm, mode: modes.ModeWithAuthenticationTag -) -> ciphers.AEADDecryptionContext: ... -@typing.overload -def create_decryption_ctx( - algorithm: ciphers.CipherAlgorithm, mode: modes.Mode -) -> ciphers.CipherContext: ... -def cipher_supported( - algorithm: ciphers.CipherAlgorithm, mode: modes.Mode -) -> bool: ... -def _advance( - ctx: ciphers.AEADEncryptionContext | ciphers.AEADDecryptionContext, n: int -) -> None: ... -def _advance_aad( - ctx: ciphers.AEADEncryptionContext | ciphers.AEADDecryptionContext, n: int -) -> None: ... - -class CipherContext: ... -class AEADEncryptionContext: ... -class AEADDecryptionContext: ... diff --git a/env/lib/python3.10/site-packages/cryptography/hazmat/bindings/_rust/openssl/cmac.pyi b/env/lib/python3.10/site-packages/cryptography/hazmat/bindings/_rust/openssl/cmac.pyi deleted file mode 100644 index 9c03508..0000000 --- a/env/lib/python3.10/site-packages/cryptography/hazmat/bindings/_rust/openssl/cmac.pyi +++ /dev/null @@ -1,18 +0,0 @@ -# This file is dual licensed under the terms of the Apache License, Version -# 2.0, and the BSD License. See the LICENSE file in the root of this repository -# for complete details. - -import typing - -from cryptography.hazmat.primitives import ciphers - -class CMAC: - def __init__( - self, - algorithm: ciphers.BlockCipherAlgorithm, - backend: typing.Any = None, - ) -> None: ... - def update(self, data: bytes) -> None: ... - def finalize(self) -> bytes: ... - def verify(self, signature: bytes) -> None: ... - def copy(self) -> CMAC: ... diff --git a/env/lib/python3.10/site-packages/cryptography/hazmat/bindings/_rust/openssl/dh.pyi b/env/lib/python3.10/site-packages/cryptography/hazmat/bindings/_rust/openssl/dh.pyi deleted file mode 100644 index 08733d7..0000000 --- a/env/lib/python3.10/site-packages/cryptography/hazmat/bindings/_rust/openssl/dh.pyi +++ /dev/null @@ -1,51 +0,0 @@ -# This file is dual licensed under the terms of the Apache License, Version -# 2.0, and the BSD License. See the LICENSE file in the root of this repository -# for complete details. - -import typing - -from cryptography.hazmat.primitives.asymmetric import dh - -MIN_MODULUS_SIZE: int - -class DHPrivateKey: ... -class DHPublicKey: ... -class DHParameters: ... - -class DHPrivateNumbers: - def __init__(self, x: int, public_numbers: DHPublicNumbers) -> None: ... - def private_key(self, backend: typing.Any = None) -> dh.DHPrivateKey: ... - @property - def x(self) -> int: ... - @property - def public_numbers(self) -> DHPublicNumbers: ... - -class DHPublicNumbers: - def __init__( - self, y: int, parameter_numbers: DHParameterNumbers - ) -> None: ... - def public_key(self, backend: typing.Any = None) -> dh.DHPublicKey: ... - @property - def y(self) -> int: ... - @property - def parameter_numbers(self) -> DHParameterNumbers: ... - -class DHParameterNumbers: - def __init__(self, p: int, g: int, q: int | None = None) -> None: ... - def parameters(self, backend: typing.Any = None) -> dh.DHParameters: ... - @property - def p(self) -> int: ... - @property - def g(self) -> int: ... - @property - def q(self) -> int | None: ... - -def generate_parameters( - generator: int, key_size: int, backend: typing.Any = None -) -> dh.DHParameters: ... -def from_pem_parameters( - data: bytes, backend: typing.Any = None -) -> dh.DHParameters: ... -def from_der_parameters( - data: bytes, backend: typing.Any = None -) -> dh.DHParameters: ... diff --git a/env/lib/python3.10/site-packages/cryptography/hazmat/bindings/_rust/openssl/dsa.pyi b/env/lib/python3.10/site-packages/cryptography/hazmat/bindings/_rust/openssl/dsa.pyi deleted file mode 100644 index 0922a4c..0000000 --- a/env/lib/python3.10/site-packages/cryptography/hazmat/bindings/_rust/openssl/dsa.pyi +++ /dev/null @@ -1,41 +0,0 @@ -# This file is dual licensed under the terms of the Apache License, Version -# 2.0, and the BSD License. See the LICENSE file in the root of this repository -# for complete details. - -import typing - -from cryptography.hazmat.primitives.asymmetric import dsa - -class DSAPrivateKey: ... -class DSAPublicKey: ... -class DSAParameters: ... - -class DSAPrivateNumbers: - def __init__(self, x: int, public_numbers: DSAPublicNumbers) -> None: ... - @property - def x(self) -> int: ... - @property - def public_numbers(self) -> DSAPublicNumbers: ... - def private_key(self, backend: typing.Any = None) -> dsa.DSAPrivateKey: ... - -class DSAPublicNumbers: - def __init__( - self, y: int, parameter_numbers: DSAParameterNumbers - ) -> None: ... - @property - def y(self) -> int: ... - @property - def parameter_numbers(self) -> DSAParameterNumbers: ... - def public_key(self, backend: typing.Any = None) -> dsa.DSAPublicKey: ... - -class DSAParameterNumbers: - def __init__(self, p: int, q: int, g: int) -> None: ... - @property - def p(self) -> int: ... - @property - def q(self) -> int: ... - @property - def g(self) -> int: ... - def parameters(self, backend: typing.Any = None) -> dsa.DSAParameters: ... - -def generate_parameters(key_size: int) -> dsa.DSAParameters: ... diff --git a/env/lib/python3.10/site-packages/cryptography/hazmat/bindings/_rust/openssl/ec.pyi b/env/lib/python3.10/site-packages/cryptography/hazmat/bindings/_rust/openssl/ec.pyi deleted file mode 100644 index 5c3b7bf..0000000 --- a/env/lib/python3.10/site-packages/cryptography/hazmat/bindings/_rust/openssl/ec.pyi +++ /dev/null @@ -1,52 +0,0 @@ -# This file is dual licensed under the terms of the Apache License, Version -# 2.0, and the BSD License. See the LICENSE file in the root of this repository -# for complete details. - -import typing - -from cryptography.hazmat.primitives.asymmetric import ec - -class ECPrivateKey: ... -class ECPublicKey: ... - -class EllipticCurvePrivateNumbers: - def __init__( - self, private_value: int, public_numbers: EllipticCurvePublicNumbers - ) -> None: ... - def private_key( - self, backend: typing.Any = None - ) -> ec.EllipticCurvePrivateKey: ... - @property - def private_value(self) -> int: ... - @property - def public_numbers(self) -> EllipticCurvePublicNumbers: ... - -class EllipticCurvePublicNumbers: - def __init__(self, x: int, y: int, curve: ec.EllipticCurve) -> None: ... - def public_key( - self, backend: typing.Any = None - ) -> ec.EllipticCurvePublicKey: ... - @property - def x(self) -> int: ... - @property - def y(self) -> int: ... - @property - def curve(self) -> ec.EllipticCurve: ... - def __eq__(self, other: object) -> bool: ... - -def curve_supported(curve: ec.EllipticCurve) -> bool: ... -def generate_private_key( - curve: ec.EllipticCurve, backend: typing.Any = None -) -> ec.EllipticCurvePrivateKey: ... -def from_private_numbers( - numbers: ec.EllipticCurvePrivateNumbers, -) -> ec.EllipticCurvePrivateKey: ... -def from_public_numbers( - numbers: ec.EllipticCurvePublicNumbers, -) -> ec.EllipticCurvePublicKey: ... -def from_public_bytes( - curve: ec.EllipticCurve, data: bytes -) -> ec.EllipticCurvePublicKey: ... -def derive_private_key( - private_value: int, curve: ec.EllipticCurve -) -> ec.EllipticCurvePrivateKey: ... diff --git a/env/lib/python3.10/site-packages/cryptography/hazmat/bindings/_rust/openssl/ed25519.pyi b/env/lib/python3.10/site-packages/cryptography/hazmat/bindings/_rust/openssl/ed25519.pyi deleted file mode 100644 index 5233f9a..0000000 --- a/env/lib/python3.10/site-packages/cryptography/hazmat/bindings/_rust/openssl/ed25519.pyi +++ /dev/null @@ -1,12 +0,0 @@ -# This file is dual licensed under the terms of the Apache License, Version -# 2.0, and the BSD License. See the LICENSE file in the root of this repository -# for complete details. - -from cryptography.hazmat.primitives.asymmetric import ed25519 - -class Ed25519PrivateKey: ... -class Ed25519PublicKey: ... - -def generate_key() -> ed25519.Ed25519PrivateKey: ... -def from_private_bytes(data: bytes) -> ed25519.Ed25519PrivateKey: ... -def from_public_bytes(data: bytes) -> ed25519.Ed25519PublicKey: ... diff --git a/env/lib/python3.10/site-packages/cryptography/hazmat/bindings/_rust/openssl/ed448.pyi b/env/lib/python3.10/site-packages/cryptography/hazmat/bindings/_rust/openssl/ed448.pyi deleted file mode 100644 index 7a06520..0000000 --- a/env/lib/python3.10/site-packages/cryptography/hazmat/bindings/_rust/openssl/ed448.pyi +++ /dev/null @@ -1,12 +0,0 @@ -# This file is dual licensed under the terms of the Apache License, Version -# 2.0, and the BSD License. See the LICENSE file in the root of this repository -# for complete details. - -from cryptography.hazmat.primitives.asymmetric import ed448 - -class Ed448PrivateKey: ... -class Ed448PublicKey: ... - -def generate_key() -> ed448.Ed448PrivateKey: ... -def from_private_bytes(data: bytes) -> ed448.Ed448PrivateKey: ... -def from_public_bytes(data: bytes) -> ed448.Ed448PublicKey: ... diff --git a/env/lib/python3.10/site-packages/cryptography/hazmat/bindings/_rust/openssl/hashes.pyi b/env/lib/python3.10/site-packages/cryptography/hazmat/bindings/_rust/openssl/hashes.pyi deleted file mode 100644 index ca5f42a..0000000 --- a/env/lib/python3.10/site-packages/cryptography/hazmat/bindings/_rust/openssl/hashes.pyi +++ /dev/null @@ -1,17 +0,0 @@ -# This file is dual licensed under the terms of the Apache License, Version -# 2.0, and the BSD License. See the LICENSE file in the root of this repository -# for complete details. - -import typing - -from cryptography.hazmat.primitives import hashes - -class Hash(hashes.HashContext): - def __init__( - self, algorithm: hashes.HashAlgorithm, backend: typing.Any = None - ) -> None: ... - @property - def algorithm(self) -> hashes.HashAlgorithm: ... - def update(self, data: bytes) -> None: ... - def finalize(self) -> bytes: ... - def copy(self) -> Hash: ... diff --git a/env/lib/python3.10/site-packages/cryptography/hazmat/bindings/_rust/openssl/hmac.pyi b/env/lib/python3.10/site-packages/cryptography/hazmat/bindings/_rust/openssl/hmac.pyi deleted file mode 100644 index e38d9b5..0000000 --- a/env/lib/python3.10/site-packages/cryptography/hazmat/bindings/_rust/openssl/hmac.pyi +++ /dev/null @@ -1,21 +0,0 @@ -# This file is dual licensed under the terms of the Apache License, Version -# 2.0, and the BSD License. See the LICENSE file in the root of this repository -# for complete details. - -import typing - -from cryptography.hazmat.primitives import hashes - -class HMAC(hashes.HashContext): - def __init__( - self, - key: bytes, - algorithm: hashes.HashAlgorithm, - backend: typing.Any = None, - ) -> None: ... - @property - def algorithm(self) -> hashes.HashAlgorithm: ... - def update(self, data: bytes) -> None: ... - def finalize(self) -> bytes: ... - def verify(self, signature: bytes) -> None: ... - def copy(self) -> HMAC: ... diff --git a/env/lib/python3.10/site-packages/cryptography/hazmat/bindings/_rust/openssl/kdf.pyi b/env/lib/python3.10/site-packages/cryptography/hazmat/bindings/_rust/openssl/kdf.pyi deleted file mode 100644 index 034a8fe..0000000 --- a/env/lib/python3.10/site-packages/cryptography/hazmat/bindings/_rust/openssl/kdf.pyi +++ /dev/null @@ -1,22 +0,0 @@ -# This file is dual licensed under the terms of the Apache License, Version -# 2.0, and the BSD License. See the LICENSE file in the root of this repository -# for complete details. - -from cryptography.hazmat.primitives.hashes import HashAlgorithm - -def derive_pbkdf2_hmac( - key_material: bytes, - algorithm: HashAlgorithm, - salt: bytes, - iterations: int, - length: int, -) -> bytes: ... -def derive_scrypt( - key_material: bytes, - salt: bytes, - n: int, - r: int, - p: int, - max_mem: int, - length: int, -) -> bytes: ... diff --git a/env/lib/python3.10/site-packages/cryptography/hazmat/bindings/_rust/openssl/keys.pyi b/env/lib/python3.10/site-packages/cryptography/hazmat/bindings/_rust/openssl/keys.pyi deleted file mode 100644 index 6815b7d..0000000 --- a/env/lib/python3.10/site-packages/cryptography/hazmat/bindings/_rust/openssl/keys.pyi +++ /dev/null @@ -1,33 +0,0 @@ -# This file is dual licensed under the terms of the Apache License, Version -# 2.0, and the BSD License. See the LICENSE file in the root of this repository -# for complete details. - -import typing - -from cryptography.hazmat.primitives.asymmetric.types import ( - PrivateKeyTypes, - PublicKeyTypes, -) - -def load_der_private_key( - data: bytes, - password: bytes | None, - backend: typing.Any = None, - *, - unsafe_skip_rsa_key_validation: bool = False, -) -> PrivateKeyTypes: ... -def load_pem_private_key( - data: bytes, - password: bytes | None, - backend: typing.Any = None, - *, - unsafe_skip_rsa_key_validation: bool = False, -) -> PrivateKeyTypes: ... -def load_der_public_key( - data: bytes, - backend: typing.Any = None, -) -> PublicKeyTypes: ... -def load_pem_public_key( - data: bytes, - backend: typing.Any = None, -) -> PublicKeyTypes: ... diff --git a/env/lib/python3.10/site-packages/cryptography/hazmat/bindings/_rust/openssl/poly1305.pyi b/env/lib/python3.10/site-packages/cryptography/hazmat/bindings/_rust/openssl/poly1305.pyi deleted file mode 100644 index 2e9b0a9..0000000 --- a/env/lib/python3.10/site-packages/cryptography/hazmat/bindings/_rust/openssl/poly1305.pyi +++ /dev/null @@ -1,13 +0,0 @@ -# This file is dual licensed under the terms of the Apache License, Version -# 2.0, and the BSD License. See the LICENSE file in the root of this repository -# for complete details. - -class Poly1305: - def __init__(self, key: bytes) -> None: ... - @staticmethod - def generate_tag(key: bytes, data: bytes) -> bytes: ... - @staticmethod - def verify_tag(key: bytes, data: bytes, tag: bytes) -> None: ... - def update(self, data: bytes) -> None: ... - def finalize(self) -> bytes: ... - def verify(self, tag: bytes) -> None: ... diff --git a/env/lib/python3.10/site-packages/cryptography/hazmat/bindings/_rust/openssl/rsa.pyi b/env/lib/python3.10/site-packages/cryptography/hazmat/bindings/_rust/openssl/rsa.pyi deleted file mode 100644 index ef7752d..0000000 --- a/env/lib/python3.10/site-packages/cryptography/hazmat/bindings/_rust/openssl/rsa.pyi +++ /dev/null @@ -1,55 +0,0 @@ -# This file is dual licensed under the terms of the Apache License, Version -# 2.0, and the BSD License. See the LICENSE file in the root of this repository -# for complete details. - -import typing - -from cryptography.hazmat.primitives.asymmetric import rsa - -class RSAPrivateKey: ... -class RSAPublicKey: ... - -class RSAPrivateNumbers: - def __init__( - self, - p: int, - q: int, - d: int, - dmp1: int, - dmq1: int, - iqmp: int, - public_numbers: RSAPublicNumbers, - ) -> None: ... - @property - def p(self) -> int: ... - @property - def q(self) -> int: ... - @property - def d(self) -> int: ... - @property - def dmp1(self) -> int: ... - @property - def dmq1(self) -> int: ... - @property - def iqmp(self) -> int: ... - @property - def public_numbers(self) -> RSAPublicNumbers: ... - def private_key( - self, - backend: typing.Any = None, - *, - unsafe_skip_rsa_key_validation: bool = False, - ) -> rsa.RSAPrivateKey: ... - -class RSAPublicNumbers: - def __init__(self, e: int, n: int) -> None: ... - @property - def n(self) -> int: ... - @property - def e(self) -> int: ... - def public_key(self, backend: typing.Any = None) -> rsa.RSAPublicKey: ... - -def generate_private_key( - public_exponent: int, - key_size: int, -) -> rsa.RSAPrivateKey: ... diff --git a/env/lib/python3.10/site-packages/cryptography/hazmat/bindings/_rust/openssl/x25519.pyi b/env/lib/python3.10/site-packages/cryptography/hazmat/bindings/_rust/openssl/x25519.pyi deleted file mode 100644 index da0f3ec..0000000 --- a/env/lib/python3.10/site-packages/cryptography/hazmat/bindings/_rust/openssl/x25519.pyi +++ /dev/null @@ -1,12 +0,0 @@ -# This file is dual licensed under the terms of the Apache License, Version -# 2.0, and the BSD License. See the LICENSE file in the root of this repository -# for complete details. - -from cryptography.hazmat.primitives.asymmetric import x25519 - -class X25519PrivateKey: ... -class X25519PublicKey: ... - -def generate_key() -> x25519.X25519PrivateKey: ... -def from_private_bytes(data: bytes) -> x25519.X25519PrivateKey: ... -def from_public_bytes(data: bytes) -> x25519.X25519PublicKey: ... diff --git a/env/lib/python3.10/site-packages/cryptography/hazmat/bindings/_rust/openssl/x448.pyi b/env/lib/python3.10/site-packages/cryptography/hazmat/bindings/_rust/openssl/x448.pyi deleted file mode 100644 index e51cfeb..0000000 --- a/env/lib/python3.10/site-packages/cryptography/hazmat/bindings/_rust/openssl/x448.pyi +++ /dev/null @@ -1,12 +0,0 @@ -# This file is dual licensed under the terms of the Apache License, Version -# 2.0, and the BSD License. See the LICENSE file in the root of this repository -# for complete details. - -from cryptography.hazmat.primitives.asymmetric import x448 - -class X448PrivateKey: ... -class X448PublicKey: ... - -def generate_key() -> x448.X448PrivateKey: ... -def from_private_bytes(data: bytes) -> x448.X448PrivateKey: ... -def from_public_bytes(data: bytes) -> x448.X448PublicKey: ... diff --git a/env/lib/python3.10/site-packages/cryptography/hazmat/bindings/_rust/pkcs12.pyi b/env/lib/python3.10/site-packages/cryptography/hazmat/bindings/_rust/pkcs12.pyi deleted file mode 100644 index 40514c4..0000000 --- a/env/lib/python3.10/site-packages/cryptography/hazmat/bindings/_rust/pkcs12.pyi +++ /dev/null @@ -1,46 +0,0 @@ -# This file is dual licensed under the terms of the Apache License, Version -# 2.0, and the BSD License. See the LICENSE file in the root of this repository -# for complete details. - -import typing - -from cryptography import x509 -from cryptography.hazmat.primitives.asymmetric.types import PrivateKeyTypes -from cryptography.hazmat.primitives.serialization import ( - KeySerializationEncryption, -) -from cryptography.hazmat.primitives.serialization.pkcs12 import ( - PKCS12KeyAndCertificates, - PKCS12PrivateKeyTypes, -) - -class PKCS12Certificate: - def __init__( - self, cert: x509.Certificate, friendly_name: bytes | None - ) -> None: ... - @property - def friendly_name(self) -> bytes | None: ... - @property - def certificate(self) -> x509.Certificate: ... - -def load_key_and_certificates( - data: bytes, - password: bytes | None, - backend: typing.Any = None, -) -> tuple[ - PrivateKeyTypes | None, - x509.Certificate | None, - list[x509.Certificate], -]: ... -def load_pkcs12( - data: bytes, - password: bytes | None, - backend: typing.Any = None, -) -> PKCS12KeyAndCertificates: ... -def serialize_key_and_certificates( - name: bytes | None, - key: PKCS12PrivateKeyTypes | None, - cert: x509.Certificate | None, - cas: typing.Iterable[x509.Certificate | PKCS12Certificate] | None, - encryption_algorithm: KeySerializationEncryption, -) -> bytes: ... diff --git a/env/lib/python3.10/site-packages/cryptography/hazmat/bindings/_rust/pkcs7.pyi b/env/lib/python3.10/site-packages/cryptography/hazmat/bindings/_rust/pkcs7.pyi deleted file mode 100644 index a72120a..0000000 --- a/env/lib/python3.10/site-packages/cryptography/hazmat/bindings/_rust/pkcs7.pyi +++ /dev/null @@ -1,30 +0,0 @@ -# This file is dual licensed under the terms of the Apache License, Version -# 2.0, and the BSD License. See the LICENSE file in the root of this repository -# for complete details. - -import typing - -from cryptography import x509 -from cryptography.hazmat.primitives import serialization -from cryptography.hazmat.primitives.serialization import pkcs7 - -def serialize_certificates( - certs: list[x509.Certificate], - encoding: serialization.Encoding, -) -> bytes: ... -def encrypt_and_serialize( - builder: pkcs7.PKCS7EnvelopeBuilder, - encoding: serialization.Encoding, - options: typing.Iterable[pkcs7.PKCS7Options], -) -> bytes: ... -def sign_and_serialize( - builder: pkcs7.PKCS7SignatureBuilder, - encoding: serialization.Encoding, - options: typing.Iterable[pkcs7.PKCS7Options], -) -> bytes: ... -def load_pem_pkcs7_certificates( - data: bytes, -) -> list[x509.Certificate]: ... -def load_der_pkcs7_certificates( - data: bytes, -) -> list[x509.Certificate]: ... diff --git a/env/lib/python3.10/site-packages/cryptography/hazmat/bindings/_rust/test_support.pyi b/env/lib/python3.10/site-packages/cryptography/hazmat/bindings/_rust/test_support.pyi deleted file mode 100644 index a53ee25..0000000 --- a/env/lib/python3.10/site-packages/cryptography/hazmat/bindings/_rust/test_support.pyi +++ /dev/null @@ -1,29 +0,0 @@ -# This file is dual licensed under the terms of the Apache License, Version -# 2.0, and the BSD License. See the LICENSE file in the root of this repository -# for complete details. - -from cryptography import x509 -from cryptography.hazmat.primitives import serialization -from cryptography.hazmat.primitives.serialization import pkcs7 - -class TestCertificate: - not_after_tag: int - not_before_tag: int - issuer_value_tags: list[int] - subject_value_tags: list[int] - -def test_parse_certificate(data: bytes) -> TestCertificate: ... -def pkcs7_decrypt( - encoding: serialization.Encoding, - msg: bytes, - pkey: serialization.pkcs7.PKCS7PrivateKeyTypes, - cert_recipient: x509.Certificate, - options: list[pkcs7.PKCS7Options], -) -> bytes: ... -def pkcs7_verify( - encoding: serialization.Encoding, - sig: bytes, - msg: bytes | None, - certs: list[x509.Certificate], - options: list[pkcs7.PKCS7Options], -) -> None: ... diff --git a/env/lib/python3.10/site-packages/cryptography/hazmat/bindings/_rust/x509.pyi b/env/lib/python3.10/site-packages/cryptography/hazmat/bindings/_rust/x509.pyi deleted file mode 100644 index aa85657..0000000 --- a/env/lib/python3.10/site-packages/cryptography/hazmat/bindings/_rust/x509.pyi +++ /dev/null @@ -1,108 +0,0 @@ -# This file is dual licensed under the terms of the Apache License, Version -# 2.0, and the BSD License. See the LICENSE file in the root of this repository -# for complete details. - -import datetime -import typing - -from cryptography import x509 -from cryptography.hazmat.primitives import hashes -from cryptography.hazmat.primitives.asymmetric.padding import PSS, PKCS1v15 -from cryptography.hazmat.primitives.asymmetric.types import PrivateKeyTypes - -def load_pem_x509_certificate( - data: bytes, backend: typing.Any = None -) -> x509.Certificate: ... -def load_der_x509_certificate( - data: bytes, backend: typing.Any = None -) -> x509.Certificate: ... -def load_pem_x509_certificates( - data: bytes, -) -> list[x509.Certificate]: ... -def load_pem_x509_crl( - data: bytes, backend: typing.Any = None -) -> x509.CertificateRevocationList: ... -def load_der_x509_crl( - data: bytes, backend: typing.Any = None -) -> x509.CertificateRevocationList: ... -def load_pem_x509_csr( - data: bytes, backend: typing.Any = None -) -> x509.CertificateSigningRequest: ... -def load_der_x509_csr( - data: bytes, backend: typing.Any = None -) -> x509.CertificateSigningRequest: ... -def encode_name_bytes(name: x509.Name) -> bytes: ... -def encode_extension_value(extension: x509.ExtensionType) -> bytes: ... -def create_x509_certificate( - builder: x509.CertificateBuilder, - private_key: PrivateKeyTypes, - hash_algorithm: hashes.HashAlgorithm | None, - rsa_padding: PKCS1v15 | PSS | None, -) -> x509.Certificate: ... -def create_x509_csr( - builder: x509.CertificateSigningRequestBuilder, - private_key: PrivateKeyTypes, - hash_algorithm: hashes.HashAlgorithm | None, - rsa_padding: PKCS1v15 | PSS | None, -) -> x509.CertificateSigningRequest: ... -def create_x509_crl( - builder: x509.CertificateRevocationListBuilder, - private_key: PrivateKeyTypes, - hash_algorithm: hashes.HashAlgorithm | None, - rsa_padding: PKCS1v15 | PSS | None, -) -> x509.CertificateRevocationList: ... - -class Sct: ... -class Certificate: ... -class RevokedCertificate: ... -class CertificateRevocationList: ... -class CertificateSigningRequest: ... - -class PolicyBuilder: - def time(self, new_time: datetime.datetime) -> PolicyBuilder: ... - def store(self, new_store: Store) -> PolicyBuilder: ... - def max_chain_depth(self, new_max_chain_depth: int) -> PolicyBuilder: ... - def build_client_verifier(self) -> ClientVerifier: ... - def build_server_verifier( - self, subject: x509.verification.Subject - ) -> ServerVerifier: ... - -class VerifiedClient: - @property - def subjects(self) -> list[x509.GeneralName]: ... - @property - def chain(self) -> list[x509.Certificate]: ... - -class ClientVerifier: - @property - def validation_time(self) -> datetime.datetime: ... - @property - def store(self) -> Store: ... - @property - def max_chain_depth(self) -> int: ... - def verify( - self, - leaf: x509.Certificate, - intermediates: list[x509.Certificate], - ) -> VerifiedClient: ... - -class ServerVerifier: - @property - def subject(self) -> x509.verification.Subject: ... - @property - def validation_time(self) -> datetime.datetime: ... - @property - def store(self) -> Store: ... - @property - def max_chain_depth(self) -> int: ... - def verify( - self, - leaf: x509.Certificate, - intermediates: list[x509.Certificate], - ) -> list[x509.Certificate]: ... - -class Store: - def __init__(self, certs: list[x509.Certificate]) -> None: ... - -class VerificationError(Exception): - pass diff --git a/env/lib/python3.10/site-packages/cryptography/hazmat/bindings/openssl/__init__.py b/env/lib/python3.10/site-packages/cryptography/hazmat/bindings/openssl/__init__.py deleted file mode 100644 index b509336..0000000 --- a/env/lib/python3.10/site-packages/cryptography/hazmat/bindings/openssl/__init__.py +++ /dev/null @@ -1,3 +0,0 @@ -# This file is dual licensed under the terms of the Apache License, Version -# 2.0, and the BSD License. See the LICENSE file in the root of this repository -# for complete details. diff --git a/env/lib/python3.10/site-packages/cryptography/hazmat/bindings/openssl/__pycache__/__init__.cpython-310.pyc b/env/lib/python3.10/site-packages/cryptography/hazmat/bindings/openssl/__pycache__/__init__.cpython-310.pyc deleted file mode 100644 index 822893d..0000000 Binary files a/env/lib/python3.10/site-packages/cryptography/hazmat/bindings/openssl/__pycache__/__init__.cpython-310.pyc and /dev/null differ diff --git a/env/lib/python3.10/site-packages/cryptography/hazmat/bindings/openssl/__pycache__/_conditional.cpython-310.pyc b/env/lib/python3.10/site-packages/cryptography/hazmat/bindings/openssl/__pycache__/_conditional.cpython-310.pyc deleted file mode 100644 index 4c95eb4..0000000 Binary files a/env/lib/python3.10/site-packages/cryptography/hazmat/bindings/openssl/__pycache__/_conditional.cpython-310.pyc and /dev/null differ diff --git a/env/lib/python3.10/site-packages/cryptography/hazmat/bindings/openssl/__pycache__/binding.cpython-310.pyc b/env/lib/python3.10/site-packages/cryptography/hazmat/bindings/openssl/__pycache__/binding.cpython-310.pyc deleted file mode 100644 index e2545dd..0000000 Binary files a/env/lib/python3.10/site-packages/cryptography/hazmat/bindings/openssl/__pycache__/binding.cpython-310.pyc and /dev/null differ diff --git a/env/lib/python3.10/site-packages/cryptography/hazmat/bindings/openssl/_conditional.py b/env/lib/python3.10/site-packages/cryptography/hazmat/bindings/openssl/_conditional.py deleted file mode 100644 index 73c06f7..0000000 --- a/env/lib/python3.10/site-packages/cryptography/hazmat/bindings/openssl/_conditional.py +++ /dev/null @@ -1,183 +0,0 @@ -# This file is dual licensed under the terms of the Apache License, Version -# 2.0, and the BSD License. See the LICENSE file in the root of this repository -# for complete details. - -from __future__ import annotations - - -def cryptography_has_set_cert_cb() -> list[str]: - return [ - "SSL_CTX_set_cert_cb", - "SSL_set_cert_cb", - ] - - -def cryptography_has_ssl_st() -> list[str]: - return [ - "SSL_ST_BEFORE", - "SSL_ST_OK", - "SSL_ST_INIT", - "SSL_ST_RENEGOTIATE", - ] - - -def cryptography_has_tls_st() -> list[str]: - return [ - "TLS_ST_BEFORE", - "TLS_ST_OK", - ] - - -def cryptography_has_ssl_sigalgs() -> list[str]: - return [ - "SSL_CTX_set1_sigalgs_list", - ] - - -def cryptography_has_psk() -> list[str]: - return [ - "SSL_CTX_use_psk_identity_hint", - "SSL_CTX_set_psk_server_callback", - "SSL_CTX_set_psk_client_callback", - ] - - -def cryptography_has_psk_tlsv13() -> list[str]: - return [ - "SSL_CTX_set_psk_find_session_callback", - "SSL_CTX_set_psk_use_session_callback", - "Cryptography_SSL_SESSION_new", - "SSL_CIPHER_find", - "SSL_SESSION_set1_master_key", - "SSL_SESSION_set_cipher", - "SSL_SESSION_set_protocol_version", - ] - - -def cryptography_has_custom_ext() -> list[str]: - return [ - "SSL_CTX_add_client_custom_ext", - "SSL_CTX_add_server_custom_ext", - "SSL_extension_supported", - ] - - -def cryptography_has_tlsv13_functions() -> list[str]: - return [ - "SSL_VERIFY_POST_HANDSHAKE", - "SSL_CTX_set_ciphersuites", - "SSL_verify_client_post_handshake", - "SSL_CTX_set_post_handshake_auth", - "SSL_set_post_handshake_auth", - "SSL_SESSION_get_max_early_data", - "SSL_write_early_data", - "SSL_read_early_data", - "SSL_CTX_set_max_early_data", - ] - - -def cryptography_has_engine() -> list[str]: - return [ - "ENGINE_by_id", - "ENGINE_init", - "ENGINE_finish", - "ENGINE_get_default_RAND", - "ENGINE_set_default_RAND", - "ENGINE_unregister_RAND", - "ENGINE_ctrl_cmd", - "ENGINE_free", - "ENGINE_get_name", - "ENGINE_ctrl_cmd_string", - "ENGINE_load_builtin_engines", - "ENGINE_load_private_key", - "ENGINE_load_public_key", - "SSL_CTX_set_client_cert_engine", - ] - - -def cryptography_has_verified_chain() -> list[str]: - return [ - "SSL_get0_verified_chain", - ] - - -def cryptography_has_srtp() -> list[str]: - return [ - "SSL_CTX_set_tlsext_use_srtp", - "SSL_set_tlsext_use_srtp", - "SSL_get_selected_srtp_profile", - ] - - -def cryptography_has_op_no_renegotiation() -> list[str]: - return [ - "SSL_OP_NO_RENEGOTIATION", - ] - - -def cryptography_has_dtls_get_data_mtu() -> list[str]: - return [ - "DTLS_get_data_mtu", - ] - - -def cryptography_has_ssl_cookie() -> list[str]: - return [ - "SSL_OP_COOKIE_EXCHANGE", - "DTLSv1_listen", - "SSL_CTX_set_cookie_generate_cb", - "SSL_CTX_set_cookie_verify_cb", - ] - - -def cryptography_has_prime_checks() -> list[str]: - return [ - "BN_prime_checks_for_size", - ] - - -def cryptography_has_unexpected_eof_while_reading() -> list[str]: - return ["SSL_R_UNEXPECTED_EOF_WHILE_READING"] - - -def cryptography_has_ssl_op_ignore_unexpected_eof() -> list[str]: - return [ - "SSL_OP_IGNORE_UNEXPECTED_EOF", - ] - - -def cryptography_has_get_extms_support() -> list[str]: - return ["SSL_get_extms_support"] - - -# This is a mapping of -# {condition: function-returning-names-dependent-on-that-condition} so we can -# loop over them and delete unsupported names at runtime. It will be removed -# when cffi supports #if in cdef. We use functions instead of just a dict of -# lists so we can use coverage to measure which are used. -CONDITIONAL_NAMES = { - "Cryptography_HAS_SET_CERT_CB": cryptography_has_set_cert_cb, - "Cryptography_HAS_SSL_ST": cryptography_has_ssl_st, - "Cryptography_HAS_TLS_ST": cryptography_has_tls_st, - "Cryptography_HAS_SIGALGS": cryptography_has_ssl_sigalgs, - "Cryptography_HAS_PSK": cryptography_has_psk, - "Cryptography_HAS_PSK_TLSv1_3": cryptography_has_psk_tlsv13, - "Cryptography_HAS_CUSTOM_EXT": cryptography_has_custom_ext, - "Cryptography_HAS_TLSv1_3_FUNCTIONS": cryptography_has_tlsv13_functions, - "Cryptography_HAS_ENGINE": cryptography_has_engine, - "Cryptography_HAS_VERIFIED_CHAIN": cryptography_has_verified_chain, - "Cryptography_HAS_SRTP": cryptography_has_srtp, - "Cryptography_HAS_OP_NO_RENEGOTIATION": ( - cryptography_has_op_no_renegotiation - ), - "Cryptography_HAS_DTLS_GET_DATA_MTU": cryptography_has_dtls_get_data_mtu, - "Cryptography_HAS_SSL_COOKIE": cryptography_has_ssl_cookie, - "Cryptography_HAS_PRIME_CHECKS": cryptography_has_prime_checks, - "Cryptography_HAS_UNEXPECTED_EOF_WHILE_READING": ( - cryptography_has_unexpected_eof_while_reading - ), - "Cryptography_HAS_SSL_OP_IGNORE_UNEXPECTED_EOF": ( - cryptography_has_ssl_op_ignore_unexpected_eof - ), - "Cryptography_HAS_GET_EXTMS_SUPPORT": cryptography_has_get_extms_support, -} diff --git a/env/lib/python3.10/site-packages/cryptography/hazmat/bindings/openssl/binding.py b/env/lib/python3.10/site-packages/cryptography/hazmat/bindings/openssl/binding.py deleted file mode 100644 index d4dfeef..0000000 --- a/env/lib/python3.10/site-packages/cryptography/hazmat/bindings/openssl/binding.py +++ /dev/null @@ -1,121 +0,0 @@ -# This file is dual licensed under the terms of the Apache License, Version -# 2.0, and the BSD License. See the LICENSE file in the root of this repository -# for complete details. - -from __future__ import annotations - -import os -import sys -import threading -import types -import typing -import warnings - -import cryptography -from cryptography.exceptions import InternalError -from cryptography.hazmat.bindings._rust import _openssl, openssl -from cryptography.hazmat.bindings.openssl._conditional import CONDITIONAL_NAMES - - -def _openssl_assert(ok: bool) -> None: - if not ok: - errors = openssl.capture_error_stack() - - raise InternalError( - "Unknown OpenSSL error. This error is commonly encountered when " - "another library is not cleaning up the OpenSSL error stack. If " - "you are using cryptography with another library that uses " - "OpenSSL try disabling it before reporting a bug. Otherwise " - "please file an issue at https://github.com/pyca/cryptography/" - "issues with information on how to reproduce " - f"this. ({errors!r})", - errors, - ) - - -def build_conditional_library( - lib: typing.Any, - conditional_names: dict[str, typing.Callable[[], list[str]]], -) -> typing.Any: - conditional_lib = types.ModuleType("lib") - conditional_lib._original_lib = lib # type: ignore[attr-defined] - excluded_names = set() - for condition, names_cb in conditional_names.items(): - if not getattr(lib, condition): - excluded_names.update(names_cb()) - - for attr in dir(lib): - if attr not in excluded_names: - setattr(conditional_lib, attr, getattr(lib, attr)) - - return conditional_lib - - -class Binding: - """ - OpenSSL API wrapper. - """ - - lib: typing.ClassVar = None - ffi = _openssl.ffi - _lib_loaded = False - _init_lock = threading.Lock() - - def __init__(self) -> None: - self._ensure_ffi_initialized() - - @classmethod - def _ensure_ffi_initialized(cls) -> None: - with cls._init_lock: - if not cls._lib_loaded: - cls.lib = build_conditional_library( - _openssl.lib, CONDITIONAL_NAMES - ) - cls._lib_loaded = True - - @classmethod - def init_static_locks(cls) -> None: - cls._ensure_ffi_initialized() - - -def _verify_package_version(version: str) -> None: - # Occasionally we run into situations where the version of the Python - # package does not match the version of the shared object that is loaded. - # This may occur in environments where multiple versions of cryptography - # are installed and available in the python path. To avoid errors cropping - # up later this code checks that the currently imported package and the - # shared object that were loaded have the same version and raise an - # ImportError if they do not - so_package_version = _openssl.ffi.string( - _openssl.lib.CRYPTOGRAPHY_PACKAGE_VERSION - ) - if version.encode("ascii") != so_package_version: - raise ImportError( - "The version of cryptography does not match the loaded " - "shared object. This can happen if you have multiple copies of " - "cryptography installed in your Python path. Please try creating " - "a new virtual environment to resolve this issue. " - f"Loaded python version: {version}, " - f"shared object version: {so_package_version}" - ) - - _openssl_assert( - _openssl.lib.OpenSSL_version_num() == openssl.openssl_version(), - ) - - -_verify_package_version(cryptography.__version__) - -Binding.init_static_locks() - -if ( - sys.platform == "win32" - and os.environ.get("PROCESSOR_ARCHITEW6432") is not None -): - warnings.warn( - "You are using cryptography on a 32-bit Python on a 64-bit Windows " - "Operating System. Cryptography will be significantly faster if you " - "switch to using a 64-bit Python.", - UserWarning, - stacklevel=2, - ) diff --git a/env/lib/python3.10/site-packages/cryptography/hazmat/decrepit/__init__.py b/env/lib/python3.10/site-packages/cryptography/hazmat/decrepit/__init__.py deleted file mode 100644 index 41d7318..0000000 --- a/env/lib/python3.10/site-packages/cryptography/hazmat/decrepit/__init__.py +++ /dev/null @@ -1,5 +0,0 @@ -# This file is dual licensed under the terms of the Apache License, Version -# 2.0, and the BSD License. See the LICENSE file in the root of this repository -# for complete details. - -from __future__ import annotations diff --git a/env/lib/python3.10/site-packages/cryptography/hazmat/decrepit/__pycache__/__init__.cpython-310.pyc b/env/lib/python3.10/site-packages/cryptography/hazmat/decrepit/__pycache__/__init__.cpython-310.pyc deleted file mode 100644 index 5187231..0000000 Binary files a/env/lib/python3.10/site-packages/cryptography/hazmat/decrepit/__pycache__/__init__.cpython-310.pyc and /dev/null differ diff --git a/env/lib/python3.10/site-packages/cryptography/hazmat/decrepit/ciphers/__init__.py b/env/lib/python3.10/site-packages/cryptography/hazmat/decrepit/ciphers/__init__.py deleted file mode 100644 index 41d7318..0000000 --- a/env/lib/python3.10/site-packages/cryptography/hazmat/decrepit/ciphers/__init__.py +++ /dev/null @@ -1,5 +0,0 @@ -# This file is dual licensed under the terms of the Apache License, Version -# 2.0, and the BSD License. See the LICENSE file in the root of this repository -# for complete details. - -from __future__ import annotations diff --git a/env/lib/python3.10/site-packages/cryptography/hazmat/decrepit/ciphers/__pycache__/__init__.cpython-310.pyc b/env/lib/python3.10/site-packages/cryptography/hazmat/decrepit/ciphers/__pycache__/__init__.cpython-310.pyc deleted file mode 100644 index 677b95d..0000000 Binary files a/env/lib/python3.10/site-packages/cryptography/hazmat/decrepit/ciphers/__pycache__/__init__.cpython-310.pyc and /dev/null differ diff --git a/env/lib/python3.10/site-packages/cryptography/hazmat/decrepit/ciphers/__pycache__/algorithms.cpython-310.pyc b/env/lib/python3.10/site-packages/cryptography/hazmat/decrepit/ciphers/__pycache__/algorithms.cpython-310.pyc deleted file mode 100644 index 8445eae..0000000 Binary files a/env/lib/python3.10/site-packages/cryptography/hazmat/decrepit/ciphers/__pycache__/algorithms.cpython-310.pyc and /dev/null differ diff --git a/env/lib/python3.10/site-packages/cryptography/hazmat/decrepit/ciphers/algorithms.py b/env/lib/python3.10/site-packages/cryptography/hazmat/decrepit/ciphers/algorithms.py deleted file mode 100644 index a7d4aa3..0000000 --- a/env/lib/python3.10/site-packages/cryptography/hazmat/decrepit/ciphers/algorithms.py +++ /dev/null @@ -1,107 +0,0 @@ -# This file is dual licensed under the terms of the Apache License, Version -# 2.0, and the BSD License. See the LICENSE file in the root of this repository -# for complete details. - -from __future__ import annotations - -from cryptography.hazmat.primitives._cipheralgorithm import ( - BlockCipherAlgorithm, - CipherAlgorithm, - _verify_key_size, -) - - -class ARC4(CipherAlgorithm): - name = "RC4" - key_sizes = frozenset([40, 56, 64, 80, 128, 160, 192, 256]) - - def __init__(self, key: bytes): - self.key = _verify_key_size(self, key) - - @property - def key_size(self) -> int: - return len(self.key) * 8 - - -class TripleDES(BlockCipherAlgorithm): - name = "3DES" - block_size = 64 - key_sizes = frozenset([64, 128, 192]) - - def __init__(self, key: bytes): - if len(key) == 8: - key += key + key - elif len(key) == 16: - key += key[:8] - self.key = _verify_key_size(self, key) - - @property - def key_size(self) -> int: - return len(self.key) * 8 - - -class Blowfish(BlockCipherAlgorithm): - name = "Blowfish" - block_size = 64 - key_sizes = frozenset(range(32, 449, 8)) - - def __init__(self, key: bytes): - self.key = _verify_key_size(self, key) - - @property - def key_size(self) -> int: - return len(self.key) * 8 - - -class CAST5(BlockCipherAlgorithm): - name = "CAST5" - block_size = 64 - key_sizes = frozenset(range(40, 129, 8)) - - def __init__(self, key: bytes): - self.key = _verify_key_size(self, key) - - @property - def key_size(self) -> int: - return len(self.key) * 8 - - -class SEED(BlockCipherAlgorithm): - name = "SEED" - block_size = 128 - key_sizes = frozenset([128]) - - def __init__(self, key: bytes): - self.key = _verify_key_size(self, key) - - @property - def key_size(self) -> int: - return len(self.key) * 8 - - -class IDEA(BlockCipherAlgorithm): - name = "IDEA" - block_size = 64 - key_sizes = frozenset([128]) - - def __init__(self, key: bytes): - self.key = _verify_key_size(self, key) - - @property - def key_size(self) -> int: - return len(self.key) * 8 - - -# This class only allows RC2 with a 128-bit key. No support for -# effective key bits or other key sizes is provided. -class RC2(BlockCipherAlgorithm): - name = "RC2" - block_size = 64 - key_sizes = frozenset([128]) - - def __init__(self, key: bytes): - self.key = _verify_key_size(self, key) - - @property - def key_size(self) -> int: - return len(self.key) * 8 diff --git a/env/lib/python3.10/site-packages/cryptography/hazmat/primitives/__init__.py b/env/lib/python3.10/site-packages/cryptography/hazmat/primitives/__init__.py deleted file mode 100644 index b509336..0000000 --- a/env/lib/python3.10/site-packages/cryptography/hazmat/primitives/__init__.py +++ /dev/null @@ -1,3 +0,0 @@ -# This file is dual licensed under the terms of the Apache License, Version -# 2.0, and the BSD License. See the LICENSE file in the root of this repository -# for complete details. diff --git a/env/lib/python3.10/site-packages/cryptography/hazmat/primitives/__pycache__/__init__.cpython-310.pyc b/env/lib/python3.10/site-packages/cryptography/hazmat/primitives/__pycache__/__init__.cpython-310.pyc deleted file mode 100644 index cd1e755..0000000 Binary files a/env/lib/python3.10/site-packages/cryptography/hazmat/primitives/__pycache__/__init__.cpython-310.pyc and /dev/null differ diff --git a/env/lib/python3.10/site-packages/cryptography/hazmat/primitives/__pycache__/_asymmetric.cpython-310.pyc b/env/lib/python3.10/site-packages/cryptography/hazmat/primitives/__pycache__/_asymmetric.cpython-310.pyc deleted file mode 100644 index 1484e71..0000000 Binary files a/env/lib/python3.10/site-packages/cryptography/hazmat/primitives/__pycache__/_asymmetric.cpython-310.pyc and /dev/null differ diff --git a/env/lib/python3.10/site-packages/cryptography/hazmat/primitives/__pycache__/_cipheralgorithm.cpython-310.pyc b/env/lib/python3.10/site-packages/cryptography/hazmat/primitives/__pycache__/_cipheralgorithm.cpython-310.pyc deleted file mode 100644 index 522a848..0000000 Binary files a/env/lib/python3.10/site-packages/cryptography/hazmat/primitives/__pycache__/_cipheralgorithm.cpython-310.pyc and /dev/null differ diff --git a/env/lib/python3.10/site-packages/cryptography/hazmat/primitives/__pycache__/_serialization.cpython-310.pyc b/env/lib/python3.10/site-packages/cryptography/hazmat/primitives/__pycache__/_serialization.cpython-310.pyc deleted file mode 100644 index dfb47a6..0000000 Binary files a/env/lib/python3.10/site-packages/cryptography/hazmat/primitives/__pycache__/_serialization.cpython-310.pyc and /dev/null differ diff --git a/env/lib/python3.10/site-packages/cryptography/hazmat/primitives/__pycache__/cmac.cpython-310.pyc b/env/lib/python3.10/site-packages/cryptography/hazmat/primitives/__pycache__/cmac.cpython-310.pyc deleted file mode 100644 index 0172b72..0000000 Binary files a/env/lib/python3.10/site-packages/cryptography/hazmat/primitives/__pycache__/cmac.cpython-310.pyc and /dev/null differ diff --git a/env/lib/python3.10/site-packages/cryptography/hazmat/primitives/__pycache__/constant_time.cpython-310.pyc b/env/lib/python3.10/site-packages/cryptography/hazmat/primitives/__pycache__/constant_time.cpython-310.pyc deleted file mode 100644 index 425bab9..0000000 Binary files a/env/lib/python3.10/site-packages/cryptography/hazmat/primitives/__pycache__/constant_time.cpython-310.pyc and /dev/null differ diff --git a/env/lib/python3.10/site-packages/cryptography/hazmat/primitives/__pycache__/hashes.cpython-310.pyc b/env/lib/python3.10/site-packages/cryptography/hazmat/primitives/__pycache__/hashes.cpython-310.pyc deleted file mode 100644 index fbb5dbb..0000000 Binary files a/env/lib/python3.10/site-packages/cryptography/hazmat/primitives/__pycache__/hashes.cpython-310.pyc and /dev/null differ diff --git a/env/lib/python3.10/site-packages/cryptography/hazmat/primitives/__pycache__/hmac.cpython-310.pyc b/env/lib/python3.10/site-packages/cryptography/hazmat/primitives/__pycache__/hmac.cpython-310.pyc deleted file mode 100644 index d408c7a..0000000 Binary files a/env/lib/python3.10/site-packages/cryptography/hazmat/primitives/__pycache__/hmac.cpython-310.pyc and /dev/null differ diff --git a/env/lib/python3.10/site-packages/cryptography/hazmat/primitives/__pycache__/keywrap.cpython-310.pyc b/env/lib/python3.10/site-packages/cryptography/hazmat/primitives/__pycache__/keywrap.cpython-310.pyc deleted file mode 100644 index da4136a..0000000 Binary files a/env/lib/python3.10/site-packages/cryptography/hazmat/primitives/__pycache__/keywrap.cpython-310.pyc and /dev/null differ diff --git a/env/lib/python3.10/site-packages/cryptography/hazmat/primitives/__pycache__/padding.cpython-310.pyc b/env/lib/python3.10/site-packages/cryptography/hazmat/primitives/__pycache__/padding.cpython-310.pyc deleted file mode 100644 index 23bd41a..0000000 Binary files a/env/lib/python3.10/site-packages/cryptography/hazmat/primitives/__pycache__/padding.cpython-310.pyc and /dev/null differ diff --git a/env/lib/python3.10/site-packages/cryptography/hazmat/primitives/__pycache__/poly1305.cpython-310.pyc b/env/lib/python3.10/site-packages/cryptography/hazmat/primitives/__pycache__/poly1305.cpython-310.pyc deleted file mode 100644 index 8353684..0000000 Binary files a/env/lib/python3.10/site-packages/cryptography/hazmat/primitives/__pycache__/poly1305.cpython-310.pyc and /dev/null differ diff --git a/env/lib/python3.10/site-packages/cryptography/hazmat/primitives/_asymmetric.py b/env/lib/python3.10/site-packages/cryptography/hazmat/primitives/_asymmetric.py deleted file mode 100644 index ea55ffd..0000000 --- a/env/lib/python3.10/site-packages/cryptography/hazmat/primitives/_asymmetric.py +++ /dev/null @@ -1,19 +0,0 @@ -# This file is dual licensed under the terms of the Apache License, Version -# 2.0, and the BSD License. See the LICENSE file in the root of this repository -# for complete details. - -from __future__ import annotations - -import abc - -# This exists to break an import cycle. It is normally accessible from the -# asymmetric padding module. - - -class AsymmetricPadding(metaclass=abc.ABCMeta): - @property - @abc.abstractmethod - def name(self) -> str: - """ - A string naming this padding (e.g. "PSS", "PKCS1"). - """ diff --git a/env/lib/python3.10/site-packages/cryptography/hazmat/primitives/_cipheralgorithm.py b/env/lib/python3.10/site-packages/cryptography/hazmat/primitives/_cipheralgorithm.py deleted file mode 100644 index 588a616..0000000 --- a/env/lib/python3.10/site-packages/cryptography/hazmat/primitives/_cipheralgorithm.py +++ /dev/null @@ -1,58 +0,0 @@ -# This file is dual licensed under the terms of the Apache License, Version -# 2.0, and the BSD License. See the LICENSE file in the root of this repository -# for complete details. - -from __future__ import annotations - -import abc - -from cryptography import utils - -# This exists to break an import cycle. It is normally accessible from the -# ciphers module. - - -class CipherAlgorithm(metaclass=abc.ABCMeta): - @property - @abc.abstractmethod - def name(self) -> str: - """ - A string naming this mode (e.g. "AES", "Camellia"). - """ - - @property - @abc.abstractmethod - def key_sizes(self) -> frozenset[int]: - """ - Valid key sizes for this algorithm in bits - """ - - @property - @abc.abstractmethod - def key_size(self) -> int: - """ - The size of the key being used as an integer in bits (e.g. 128, 256). - """ - - -class BlockCipherAlgorithm(CipherAlgorithm): - key: bytes - - @property - @abc.abstractmethod - def block_size(self) -> int: - """ - The size of a block as an integer in bits (e.g. 64, 128). - """ - - -def _verify_key_size(algorithm: CipherAlgorithm, key: bytes) -> bytes: - # Verify that the key is instance of bytes - utils._check_byteslike("key", key) - - # Verify that the key size matches the expected key size - if len(key) * 8 not in algorithm.key_sizes: - raise ValueError( - f"Invalid key size ({len(key) * 8}) for {algorithm.name}." - ) - return key diff --git a/env/lib/python3.10/site-packages/cryptography/hazmat/primitives/_serialization.py b/env/lib/python3.10/site-packages/cryptography/hazmat/primitives/_serialization.py deleted file mode 100644 index 4615772..0000000 --- a/env/lib/python3.10/site-packages/cryptography/hazmat/primitives/_serialization.py +++ /dev/null @@ -1,169 +0,0 @@ -# This file is dual licensed under the terms of the Apache License, Version -# 2.0, and the BSD License. See the LICENSE file in the root of this repository -# for complete details. - -from __future__ import annotations - -import abc - -from cryptography import utils -from cryptography.hazmat.primitives.hashes import HashAlgorithm - -# This exists to break an import cycle. These classes are normally accessible -# from the serialization module. - - -class PBES(utils.Enum): - PBESv1SHA1And3KeyTripleDESCBC = "PBESv1 using SHA1 and 3-Key TripleDES" - PBESv2SHA256AndAES256CBC = "PBESv2 using SHA256 PBKDF2 and AES256 CBC" - - -class Encoding(utils.Enum): - PEM = "PEM" - DER = "DER" - OpenSSH = "OpenSSH" - Raw = "Raw" - X962 = "ANSI X9.62" - SMIME = "S/MIME" - - -class PrivateFormat(utils.Enum): - PKCS8 = "PKCS8" - TraditionalOpenSSL = "TraditionalOpenSSL" - Raw = "Raw" - OpenSSH = "OpenSSH" - PKCS12 = "PKCS12" - - def encryption_builder(self) -> KeySerializationEncryptionBuilder: - if self not in (PrivateFormat.OpenSSH, PrivateFormat.PKCS12): - raise ValueError( - "encryption_builder only supported with PrivateFormat.OpenSSH" - " and PrivateFormat.PKCS12" - ) - return KeySerializationEncryptionBuilder(self) - - -class PublicFormat(utils.Enum): - SubjectPublicKeyInfo = "X.509 subjectPublicKeyInfo with PKCS#1" - PKCS1 = "Raw PKCS#1" - OpenSSH = "OpenSSH" - Raw = "Raw" - CompressedPoint = "X9.62 Compressed Point" - UncompressedPoint = "X9.62 Uncompressed Point" - - -class ParameterFormat(utils.Enum): - PKCS3 = "PKCS3" - - -class KeySerializationEncryption(metaclass=abc.ABCMeta): - pass - - -class BestAvailableEncryption(KeySerializationEncryption): - def __init__(self, password: bytes): - if not isinstance(password, bytes) or len(password) == 0: - raise ValueError("Password must be 1 or more bytes.") - - self.password = password - - -class NoEncryption(KeySerializationEncryption): - pass - - -class KeySerializationEncryptionBuilder: - def __init__( - self, - format: PrivateFormat, - *, - _kdf_rounds: int | None = None, - _hmac_hash: HashAlgorithm | None = None, - _key_cert_algorithm: PBES | None = None, - ) -> None: - self._format = format - - self._kdf_rounds = _kdf_rounds - self._hmac_hash = _hmac_hash - self._key_cert_algorithm = _key_cert_algorithm - - def kdf_rounds(self, rounds: int) -> KeySerializationEncryptionBuilder: - if self._kdf_rounds is not None: - raise ValueError("kdf_rounds already set") - - if not isinstance(rounds, int): - raise TypeError("kdf_rounds must be an integer") - - if rounds < 1: - raise ValueError("kdf_rounds must be a positive integer") - - return KeySerializationEncryptionBuilder( - self._format, - _kdf_rounds=rounds, - _hmac_hash=self._hmac_hash, - _key_cert_algorithm=self._key_cert_algorithm, - ) - - def hmac_hash( - self, algorithm: HashAlgorithm - ) -> KeySerializationEncryptionBuilder: - if self._format is not PrivateFormat.PKCS12: - raise TypeError( - "hmac_hash only supported with PrivateFormat.PKCS12" - ) - - if self._hmac_hash is not None: - raise ValueError("hmac_hash already set") - return KeySerializationEncryptionBuilder( - self._format, - _kdf_rounds=self._kdf_rounds, - _hmac_hash=algorithm, - _key_cert_algorithm=self._key_cert_algorithm, - ) - - def key_cert_algorithm( - self, algorithm: PBES - ) -> KeySerializationEncryptionBuilder: - if self._format is not PrivateFormat.PKCS12: - raise TypeError( - "key_cert_algorithm only supported with " - "PrivateFormat.PKCS12" - ) - if self._key_cert_algorithm is not None: - raise ValueError("key_cert_algorithm already set") - return KeySerializationEncryptionBuilder( - self._format, - _kdf_rounds=self._kdf_rounds, - _hmac_hash=self._hmac_hash, - _key_cert_algorithm=algorithm, - ) - - def build(self, password: bytes) -> KeySerializationEncryption: - if not isinstance(password, bytes) or len(password) == 0: - raise ValueError("Password must be 1 or more bytes.") - - return _KeySerializationEncryption( - self._format, - password, - kdf_rounds=self._kdf_rounds, - hmac_hash=self._hmac_hash, - key_cert_algorithm=self._key_cert_algorithm, - ) - - -class _KeySerializationEncryption(KeySerializationEncryption): - def __init__( - self, - format: PrivateFormat, - password: bytes, - *, - kdf_rounds: int | None, - hmac_hash: HashAlgorithm | None, - key_cert_algorithm: PBES | None, - ): - self._format = format - self.password = password - - self._kdf_rounds = kdf_rounds - self._hmac_hash = hmac_hash - self._key_cert_algorithm = key_cert_algorithm diff --git a/env/lib/python3.10/site-packages/cryptography/hazmat/primitives/asymmetric/__init__.py b/env/lib/python3.10/site-packages/cryptography/hazmat/primitives/asymmetric/__init__.py deleted file mode 100644 index b509336..0000000 --- a/env/lib/python3.10/site-packages/cryptography/hazmat/primitives/asymmetric/__init__.py +++ /dev/null @@ -1,3 +0,0 @@ -# This file is dual licensed under the terms of the Apache License, Version -# 2.0, and the BSD License. See the LICENSE file in the root of this repository -# for complete details. diff --git a/env/lib/python3.10/site-packages/cryptography/hazmat/primitives/asymmetric/__pycache__/__init__.cpython-310.pyc b/env/lib/python3.10/site-packages/cryptography/hazmat/primitives/asymmetric/__pycache__/__init__.cpython-310.pyc deleted file mode 100644 index a230043..0000000 Binary files a/env/lib/python3.10/site-packages/cryptography/hazmat/primitives/asymmetric/__pycache__/__init__.cpython-310.pyc and /dev/null differ diff --git a/env/lib/python3.10/site-packages/cryptography/hazmat/primitives/asymmetric/__pycache__/dh.cpython-310.pyc b/env/lib/python3.10/site-packages/cryptography/hazmat/primitives/asymmetric/__pycache__/dh.cpython-310.pyc deleted file mode 100644 index ce50b7b..0000000 Binary files a/env/lib/python3.10/site-packages/cryptography/hazmat/primitives/asymmetric/__pycache__/dh.cpython-310.pyc and /dev/null differ diff --git a/env/lib/python3.10/site-packages/cryptography/hazmat/primitives/asymmetric/__pycache__/dsa.cpython-310.pyc b/env/lib/python3.10/site-packages/cryptography/hazmat/primitives/asymmetric/__pycache__/dsa.cpython-310.pyc deleted file mode 100644 index 8a446b5..0000000 Binary files a/env/lib/python3.10/site-packages/cryptography/hazmat/primitives/asymmetric/__pycache__/dsa.cpython-310.pyc and /dev/null differ diff --git a/env/lib/python3.10/site-packages/cryptography/hazmat/primitives/asymmetric/__pycache__/ec.cpython-310.pyc b/env/lib/python3.10/site-packages/cryptography/hazmat/primitives/asymmetric/__pycache__/ec.cpython-310.pyc deleted file mode 100644 index 2509929..0000000 Binary files a/env/lib/python3.10/site-packages/cryptography/hazmat/primitives/asymmetric/__pycache__/ec.cpython-310.pyc and /dev/null differ diff --git a/env/lib/python3.10/site-packages/cryptography/hazmat/primitives/asymmetric/__pycache__/ed25519.cpython-310.pyc b/env/lib/python3.10/site-packages/cryptography/hazmat/primitives/asymmetric/__pycache__/ed25519.cpython-310.pyc deleted file mode 100644 index 3b29574..0000000 Binary files a/env/lib/python3.10/site-packages/cryptography/hazmat/primitives/asymmetric/__pycache__/ed25519.cpython-310.pyc and /dev/null differ diff --git a/env/lib/python3.10/site-packages/cryptography/hazmat/primitives/asymmetric/__pycache__/ed448.cpython-310.pyc b/env/lib/python3.10/site-packages/cryptography/hazmat/primitives/asymmetric/__pycache__/ed448.cpython-310.pyc deleted file mode 100644 index 6f31556..0000000 Binary files a/env/lib/python3.10/site-packages/cryptography/hazmat/primitives/asymmetric/__pycache__/ed448.cpython-310.pyc and /dev/null differ diff --git a/env/lib/python3.10/site-packages/cryptography/hazmat/primitives/asymmetric/__pycache__/padding.cpython-310.pyc b/env/lib/python3.10/site-packages/cryptography/hazmat/primitives/asymmetric/__pycache__/padding.cpython-310.pyc deleted file mode 100644 index 72088ce..0000000 Binary files a/env/lib/python3.10/site-packages/cryptography/hazmat/primitives/asymmetric/__pycache__/padding.cpython-310.pyc and /dev/null differ diff --git a/env/lib/python3.10/site-packages/cryptography/hazmat/primitives/asymmetric/__pycache__/rsa.cpython-310.pyc b/env/lib/python3.10/site-packages/cryptography/hazmat/primitives/asymmetric/__pycache__/rsa.cpython-310.pyc deleted file mode 100644 index ebf52dd..0000000 Binary files a/env/lib/python3.10/site-packages/cryptography/hazmat/primitives/asymmetric/__pycache__/rsa.cpython-310.pyc and /dev/null differ diff --git a/env/lib/python3.10/site-packages/cryptography/hazmat/primitives/asymmetric/__pycache__/types.cpython-310.pyc b/env/lib/python3.10/site-packages/cryptography/hazmat/primitives/asymmetric/__pycache__/types.cpython-310.pyc deleted file mode 100644 index 525ea43..0000000 Binary files a/env/lib/python3.10/site-packages/cryptography/hazmat/primitives/asymmetric/__pycache__/types.cpython-310.pyc and /dev/null differ diff --git a/env/lib/python3.10/site-packages/cryptography/hazmat/primitives/asymmetric/__pycache__/utils.cpython-310.pyc b/env/lib/python3.10/site-packages/cryptography/hazmat/primitives/asymmetric/__pycache__/utils.cpython-310.pyc deleted file mode 100644 index b389ceb..0000000 Binary files a/env/lib/python3.10/site-packages/cryptography/hazmat/primitives/asymmetric/__pycache__/utils.cpython-310.pyc and /dev/null differ diff --git a/env/lib/python3.10/site-packages/cryptography/hazmat/primitives/asymmetric/__pycache__/x25519.cpython-310.pyc b/env/lib/python3.10/site-packages/cryptography/hazmat/primitives/asymmetric/__pycache__/x25519.cpython-310.pyc deleted file mode 100644 index c499ca4..0000000 Binary files a/env/lib/python3.10/site-packages/cryptography/hazmat/primitives/asymmetric/__pycache__/x25519.cpython-310.pyc and /dev/null differ diff --git a/env/lib/python3.10/site-packages/cryptography/hazmat/primitives/asymmetric/__pycache__/x448.cpython-310.pyc b/env/lib/python3.10/site-packages/cryptography/hazmat/primitives/asymmetric/__pycache__/x448.cpython-310.pyc deleted file mode 100644 index f5ceb1f..0000000 Binary files a/env/lib/python3.10/site-packages/cryptography/hazmat/primitives/asymmetric/__pycache__/x448.cpython-310.pyc and /dev/null differ diff --git a/env/lib/python3.10/site-packages/cryptography/hazmat/primitives/asymmetric/dh.py b/env/lib/python3.10/site-packages/cryptography/hazmat/primitives/asymmetric/dh.py deleted file mode 100644 index 31c9748..0000000 --- a/env/lib/python3.10/site-packages/cryptography/hazmat/primitives/asymmetric/dh.py +++ /dev/null @@ -1,135 +0,0 @@ -# This file is dual licensed under the terms of the Apache License, Version -# 2.0, and the BSD License. See the LICENSE file in the root of this repository -# for complete details. - -from __future__ import annotations - -import abc - -from cryptography.hazmat.bindings._rust import openssl as rust_openssl -from cryptography.hazmat.primitives import _serialization - -generate_parameters = rust_openssl.dh.generate_parameters - - -DHPrivateNumbers = rust_openssl.dh.DHPrivateNumbers -DHPublicNumbers = rust_openssl.dh.DHPublicNumbers -DHParameterNumbers = rust_openssl.dh.DHParameterNumbers - - -class DHParameters(metaclass=abc.ABCMeta): - @abc.abstractmethod - def generate_private_key(self) -> DHPrivateKey: - """ - Generates and returns a DHPrivateKey. - """ - - @abc.abstractmethod - def parameter_bytes( - self, - encoding: _serialization.Encoding, - format: _serialization.ParameterFormat, - ) -> bytes: - """ - Returns the parameters serialized as bytes. - """ - - @abc.abstractmethod - def parameter_numbers(self) -> DHParameterNumbers: - """ - Returns a DHParameterNumbers. - """ - - -DHParametersWithSerialization = DHParameters -DHParameters.register(rust_openssl.dh.DHParameters) - - -class DHPublicKey(metaclass=abc.ABCMeta): - @property - @abc.abstractmethod - def key_size(self) -> int: - """ - The bit length of the prime modulus. - """ - - @abc.abstractmethod - def parameters(self) -> DHParameters: - """ - The DHParameters object associated with this public key. - """ - - @abc.abstractmethod - def public_numbers(self) -> DHPublicNumbers: - """ - Returns a DHPublicNumbers. - """ - - @abc.abstractmethod - def public_bytes( - self, - encoding: _serialization.Encoding, - format: _serialization.PublicFormat, - ) -> bytes: - """ - Returns the key serialized as bytes. - """ - - @abc.abstractmethod - def __eq__(self, other: object) -> bool: - """ - Checks equality. - """ - - -DHPublicKeyWithSerialization = DHPublicKey -DHPublicKey.register(rust_openssl.dh.DHPublicKey) - - -class DHPrivateKey(metaclass=abc.ABCMeta): - @property - @abc.abstractmethod - def key_size(self) -> int: - """ - The bit length of the prime modulus. - """ - - @abc.abstractmethod - def public_key(self) -> DHPublicKey: - """ - The DHPublicKey associated with this private key. - """ - - @abc.abstractmethod - def parameters(self) -> DHParameters: - """ - The DHParameters object associated with this private key. - """ - - @abc.abstractmethod - def exchange(self, peer_public_key: DHPublicKey) -> bytes: - """ - Given peer's DHPublicKey, carry out the key exchange and - return shared key as bytes. - """ - - @abc.abstractmethod - def private_numbers(self) -> DHPrivateNumbers: - """ - Returns a DHPrivateNumbers. - """ - - @abc.abstractmethod - def private_bytes( - self, - encoding: _serialization.Encoding, - format: _serialization.PrivateFormat, - encryption_algorithm: _serialization.KeySerializationEncryption, - ) -> bytes: - """ - Returns the key serialized as bytes. - """ - - -DHPrivateKeyWithSerialization = DHPrivateKey -DHPrivateKey.register(rust_openssl.dh.DHPrivateKey) diff --git a/env/lib/python3.10/site-packages/cryptography/hazmat/primitives/asymmetric/dsa.py b/env/lib/python3.10/site-packages/cryptography/hazmat/primitives/asymmetric/dsa.py deleted file mode 100644 index 6dd34c0..0000000 --- a/env/lib/python3.10/site-packages/cryptography/hazmat/primitives/asymmetric/dsa.py +++ /dev/null @@ -1,154 +0,0 @@ -# This file is dual licensed under the terms of the Apache License, Version -# 2.0, and the BSD License. See the LICENSE file in the root of this repository -# for complete details. - -from __future__ import annotations - -import abc -import typing - -from cryptography.hazmat.bindings._rust import openssl as rust_openssl -from cryptography.hazmat.primitives import _serialization, hashes -from cryptography.hazmat.primitives.asymmetric import utils as asym_utils - - -class DSAParameters(metaclass=abc.ABCMeta): - @abc.abstractmethod - def generate_private_key(self) -> DSAPrivateKey: - """ - Generates and returns a DSAPrivateKey. - """ - - @abc.abstractmethod - def parameter_numbers(self) -> DSAParameterNumbers: - """ - Returns a DSAParameterNumbers. - """ - - -DSAParametersWithNumbers = DSAParameters -DSAParameters.register(rust_openssl.dsa.DSAParameters) - - -class DSAPrivateKey(metaclass=abc.ABCMeta): - @property - @abc.abstractmethod - def key_size(self) -> int: - """ - The bit length of the prime modulus. - """ - - @abc.abstractmethod - def public_key(self) -> DSAPublicKey: - """ - The DSAPublicKey associated with this private key. - """ - - @abc.abstractmethod - def parameters(self) -> DSAParameters: - """ - The DSAParameters object associated with this private key. - """ - - @abc.abstractmethod - def sign( - self, - data: bytes, - algorithm: asym_utils.Prehashed | hashes.HashAlgorithm, - ) -> bytes: - """ - Signs the data - """ - - @abc.abstractmethod - def private_numbers(self) -> DSAPrivateNumbers: - """ - Returns a DSAPrivateNumbers. - """ - - @abc.abstractmethod - def private_bytes( - self, - encoding: _serialization.Encoding, - format: _serialization.PrivateFormat, - encryption_algorithm: _serialization.KeySerializationEncryption, - ) -> bytes: - """ - Returns the key serialized as bytes. - """ - - -DSAPrivateKeyWithSerialization = DSAPrivateKey -DSAPrivateKey.register(rust_openssl.dsa.DSAPrivateKey) - - -class DSAPublicKey(metaclass=abc.ABCMeta): - @property - @abc.abstractmethod - def key_size(self) -> int: - """ - The bit length of the prime modulus. - """ - - @abc.abstractmethod - def parameters(self) -> DSAParameters: - """ - The DSAParameters object associated with this public key. - """ - - @abc.abstractmethod - def public_numbers(self) -> DSAPublicNumbers: - """ - Returns a DSAPublicNumbers. - """ - - @abc.abstractmethod - def public_bytes( - self, - encoding: _serialization.Encoding, - format: _serialization.PublicFormat, - ) -> bytes: - """ - Returns the key serialized as bytes. - """ - - @abc.abstractmethod - def verify( - self, - signature: bytes, - data: bytes, - algorithm: asym_utils.Prehashed | hashes.HashAlgorithm, - ) -> None: - """ - Verifies the signature of the data. - """ - - @abc.abstractmethod - def __eq__(self, other: object) -> bool: - """ - Checks equality. - """ - - -DSAPublicKeyWithSerialization = DSAPublicKey -DSAPublicKey.register(rust_openssl.dsa.DSAPublicKey) - -DSAPrivateNumbers = rust_openssl.dsa.DSAPrivateNumbers -DSAPublicNumbers = rust_openssl.dsa.DSAPublicNumbers -DSAParameterNumbers = rust_openssl.dsa.DSAParameterNumbers - - -def generate_parameters( - key_size: int, backend: typing.Any = None -) -> DSAParameters: - if key_size not in (1024, 2048, 3072, 4096): - raise ValueError("Key size must be 1024, 2048, 3072, or 4096 bits.") - - return rust_openssl.dsa.generate_parameters(key_size) - - -def generate_private_key( - key_size: int, backend: typing.Any = None -) -> DSAPrivateKey: - parameters = generate_parameters(key_size) - return parameters.generate_private_key() diff --git a/env/lib/python3.10/site-packages/cryptography/hazmat/primitives/asymmetric/ec.py b/env/lib/python3.10/site-packages/cryptography/hazmat/primitives/asymmetric/ec.py deleted file mode 100644 index da1fbea..0000000 --- a/env/lib/python3.10/site-packages/cryptography/hazmat/primitives/asymmetric/ec.py +++ /dev/null @@ -1,403 +0,0 @@ -# This file is dual licensed under the terms of the Apache License, Version -# 2.0, and the BSD License. See the LICENSE file in the root of this repository -# for complete details. - -from __future__ import annotations - -import abc -import typing - -from cryptography import utils -from cryptography.exceptions import UnsupportedAlgorithm, _Reasons -from cryptography.hazmat._oid import ObjectIdentifier -from cryptography.hazmat.bindings._rust import openssl as rust_openssl -from cryptography.hazmat.primitives import _serialization, hashes -from cryptography.hazmat.primitives.asymmetric import utils as asym_utils - - -class EllipticCurveOID: - SECP192R1 = ObjectIdentifier("1.2.840.10045.3.1.1") - SECP224R1 = ObjectIdentifier("1.3.132.0.33") - SECP256K1 = ObjectIdentifier("1.3.132.0.10") - SECP256R1 = ObjectIdentifier("1.2.840.10045.3.1.7") - SECP384R1 = ObjectIdentifier("1.3.132.0.34") - SECP521R1 = ObjectIdentifier("1.3.132.0.35") - BRAINPOOLP256R1 = ObjectIdentifier("1.3.36.3.3.2.8.1.1.7") - BRAINPOOLP384R1 = ObjectIdentifier("1.3.36.3.3.2.8.1.1.11") - BRAINPOOLP512R1 = ObjectIdentifier("1.3.36.3.3.2.8.1.1.13") - SECT163K1 = ObjectIdentifier("1.3.132.0.1") - SECT163R2 = ObjectIdentifier("1.3.132.0.15") - SECT233K1 = ObjectIdentifier("1.3.132.0.26") - SECT233R1 = ObjectIdentifier("1.3.132.0.27") - SECT283K1 = ObjectIdentifier("1.3.132.0.16") - SECT283R1 = ObjectIdentifier("1.3.132.0.17") - SECT409K1 = ObjectIdentifier("1.3.132.0.36") - SECT409R1 = ObjectIdentifier("1.3.132.0.37") - SECT571K1 = ObjectIdentifier("1.3.132.0.38") - SECT571R1 = ObjectIdentifier("1.3.132.0.39") - - -class EllipticCurve(metaclass=abc.ABCMeta): - @property - @abc.abstractmethod - def name(self) -> str: - """ - The name of the curve. e.g. secp256r1. - """ - - @property - @abc.abstractmethod - def key_size(self) -> int: - """ - Bit size of a secret scalar for the curve. - """ - - -class EllipticCurveSignatureAlgorithm(metaclass=abc.ABCMeta): - @property - @abc.abstractmethod - def algorithm( - self, - ) -> asym_utils.Prehashed | hashes.HashAlgorithm: - """ - The digest algorithm used with this signature. - """ - - -class EllipticCurvePrivateKey(metaclass=abc.ABCMeta): - @abc.abstractmethod - def exchange( - self, algorithm: ECDH, peer_public_key: EllipticCurvePublicKey - ) -> bytes: - """ - Performs a key exchange operation using the provided algorithm with the - provided peer's public key. - """ - - @abc.abstractmethod - def public_key(self) -> EllipticCurvePublicKey: - """ - The EllipticCurvePublicKey for this private key. - """ - - @property - @abc.abstractmethod - def curve(self) -> EllipticCurve: - """ - The EllipticCurve that this key is on. - """ - - @property - @abc.abstractmethod - def key_size(self) -> int: - """ - Bit size of a secret scalar for the curve. - """ - - @abc.abstractmethod - def sign( - self, - data: bytes, - signature_algorithm: EllipticCurveSignatureAlgorithm, - ) -> bytes: - """ - Signs the data - """ - - @abc.abstractmethod - def private_numbers(self) -> EllipticCurvePrivateNumbers: - """ - Returns an EllipticCurvePrivateNumbers. - """ - - @abc.abstractmethod - def private_bytes( - self, - encoding: _serialization.Encoding, - format: _serialization.PrivateFormat, - encryption_algorithm: _serialization.KeySerializationEncryption, - ) -> bytes: - """ - Returns the key serialized as bytes. - """ - - -EllipticCurvePrivateKeyWithSerialization = EllipticCurvePrivateKey -EllipticCurvePrivateKey.register(rust_openssl.ec.ECPrivateKey) - - -class EllipticCurvePublicKey(metaclass=abc.ABCMeta): - @property - @abc.abstractmethod - def curve(self) -> EllipticCurve: - """ - The EllipticCurve that this key is on. - """ - - @property - @abc.abstractmethod - def key_size(self) -> int: - """ - Bit size of a secret scalar for the curve. - """ - - @abc.abstractmethod - def public_numbers(self) -> EllipticCurvePublicNumbers: - """ - Returns an EllipticCurvePublicNumbers. - """ - - @abc.abstractmethod - def public_bytes( - self, - encoding: _serialization.Encoding, - format: _serialization.PublicFormat, - ) -> bytes: - """ - Returns the key serialized as bytes. - """ - - @abc.abstractmethod - def verify( - self, - signature: bytes, - data: bytes, - signature_algorithm: EllipticCurveSignatureAlgorithm, - ) -> None: - """ - Verifies the signature of the data. - """ - - @classmethod - def from_encoded_point( - cls, curve: EllipticCurve, data: bytes - ) -> EllipticCurvePublicKey: - utils._check_bytes("data", data) - - if len(data) == 0: - raise ValueError("data must not be an empty byte string") - - if data[0] not in [0x02, 0x03, 0x04]: - raise ValueError("Unsupported elliptic curve point type") - - return rust_openssl.ec.from_public_bytes(curve, data) - - @abc.abstractmethod - def __eq__(self, other: object) -> bool: - """ - Checks equality. - """ - - -EllipticCurvePublicKeyWithSerialization = EllipticCurvePublicKey -EllipticCurvePublicKey.register(rust_openssl.ec.ECPublicKey) - -EllipticCurvePrivateNumbers = rust_openssl.ec.EllipticCurvePrivateNumbers -EllipticCurvePublicNumbers = rust_openssl.ec.EllipticCurvePublicNumbers - - -class SECT571R1(EllipticCurve): - name = "sect571r1" - key_size = 570 - - -class SECT409R1(EllipticCurve): - name = "sect409r1" - key_size = 409 - - -class SECT283R1(EllipticCurve): - name = "sect283r1" - key_size = 283 - - -class SECT233R1(EllipticCurve): - name = "sect233r1" - key_size = 233 - - -class SECT163R2(EllipticCurve): - name = "sect163r2" - key_size = 163 - - -class SECT571K1(EllipticCurve): - name = "sect571k1" - key_size = 571 - - -class SECT409K1(EllipticCurve): - name = "sect409k1" - key_size = 409 - - -class SECT283K1(EllipticCurve): - name = "sect283k1" - key_size = 283 - - -class SECT233K1(EllipticCurve): - name = "sect233k1" - key_size = 233 - - -class SECT163K1(EllipticCurve): - name = "sect163k1" - key_size = 163 - - -class SECP521R1(EllipticCurve): - name = "secp521r1" - key_size = 521 - - -class SECP384R1(EllipticCurve): - name = "secp384r1" - key_size = 384 - - -class SECP256R1(EllipticCurve): - name = "secp256r1" - key_size = 256 - - -class SECP256K1(EllipticCurve): - name = "secp256k1" - key_size = 256 - - -class SECP224R1(EllipticCurve): - name = "secp224r1" - key_size = 224 - - -class SECP192R1(EllipticCurve): - name = "secp192r1" - key_size = 192 - - -class BrainpoolP256R1(EllipticCurve): - name = "brainpoolP256r1" - key_size = 256 - - -class BrainpoolP384R1(EllipticCurve): - name = "brainpoolP384r1" - key_size = 384 - - -class BrainpoolP512R1(EllipticCurve): - name = "brainpoolP512r1" - key_size = 512 - - -_CURVE_TYPES: dict[str, EllipticCurve] = { - "prime192v1": SECP192R1(), - "prime256v1": SECP256R1(), - "secp192r1": SECP192R1(), - "secp224r1": SECP224R1(), - "secp256r1": SECP256R1(), - "secp384r1": SECP384R1(), - "secp521r1": SECP521R1(), - "secp256k1": SECP256K1(), - "sect163k1": SECT163K1(), - "sect233k1": SECT233K1(), - "sect283k1": SECT283K1(), - "sect409k1": SECT409K1(), - "sect571k1": SECT571K1(), - "sect163r2": SECT163R2(), - "sect233r1": SECT233R1(), - "sect283r1": SECT283R1(), - "sect409r1": SECT409R1(), - "sect571r1": SECT571R1(), - "brainpoolP256r1": BrainpoolP256R1(), - "brainpoolP384r1": BrainpoolP384R1(), - "brainpoolP512r1": BrainpoolP512R1(), -} - - -class ECDSA(EllipticCurveSignatureAlgorithm): - def __init__( - self, - algorithm: asym_utils.Prehashed | hashes.HashAlgorithm, - deterministic_signing: bool = False, - ): - from cryptography.hazmat.backends.openssl.backend import backend - - if ( - deterministic_signing - and not backend.ecdsa_deterministic_supported() - ): - raise UnsupportedAlgorithm( - "ECDSA with deterministic signature (RFC 6979) is not " - "supported by this version of OpenSSL.", - _Reasons.UNSUPPORTED_PUBLIC_KEY_ALGORITHM, - ) - self._algorithm = algorithm - self._deterministic_signing = deterministic_signing - - @property - def algorithm( - self, - ) -> asym_utils.Prehashed | hashes.HashAlgorithm: - return self._algorithm - - @property - def deterministic_signing( - self, - ) -> bool: - return self._deterministic_signing - - -generate_private_key = rust_openssl.ec.generate_private_key - - -def derive_private_key( - private_value: int, - curve: EllipticCurve, - backend: typing.Any = None, -) -> EllipticCurvePrivateKey: - if not isinstance(private_value, int): - raise TypeError("private_value must be an integer type.") - - if private_value <= 0: - raise ValueError("private_value must be a positive integer.") - - return rust_openssl.ec.derive_private_key(private_value, curve) - - -class ECDH: - pass - - -_OID_TO_CURVE = { - EllipticCurveOID.SECP192R1: SECP192R1, - EllipticCurveOID.SECP224R1: SECP224R1, - EllipticCurveOID.SECP256K1: SECP256K1, - EllipticCurveOID.SECP256R1: SECP256R1, - EllipticCurveOID.SECP384R1: SECP384R1, - EllipticCurveOID.SECP521R1: SECP521R1, - EllipticCurveOID.BRAINPOOLP256R1: BrainpoolP256R1, - EllipticCurveOID.BRAINPOOLP384R1: BrainpoolP384R1, - EllipticCurveOID.BRAINPOOLP512R1: BrainpoolP512R1, - EllipticCurveOID.SECT163K1: SECT163K1, - EllipticCurveOID.SECT163R2: SECT163R2, - EllipticCurveOID.SECT233K1: SECT233K1, - EllipticCurveOID.SECT233R1: SECT233R1, - EllipticCurveOID.SECT283K1: SECT283K1, - EllipticCurveOID.SECT283R1: SECT283R1, - EllipticCurveOID.SECT409K1: SECT409K1, - EllipticCurveOID.SECT409R1: SECT409R1, - EllipticCurveOID.SECT571K1: SECT571K1, - EllipticCurveOID.SECT571R1: SECT571R1, -} - - -def get_curve_for_oid(oid: ObjectIdentifier) -> type[EllipticCurve]: - try: - return _OID_TO_CURVE[oid] - except KeyError: - raise LookupError( - "The provided object identifier has no matching elliptic " - "curve class" - ) diff --git a/env/lib/python3.10/site-packages/cryptography/hazmat/primitives/asymmetric/ed25519.py b/env/lib/python3.10/site-packages/cryptography/hazmat/primitives/asymmetric/ed25519.py deleted file mode 100644 index 3a26185..0000000 --- a/env/lib/python3.10/site-packages/cryptography/hazmat/primitives/asymmetric/ed25519.py +++ /dev/null @@ -1,116 +0,0 @@ -# This file is dual licensed under the terms of the Apache License, Version -# 2.0, and the BSD License. See the LICENSE file in the root of this repository -# for complete details. - -from __future__ import annotations - -import abc - -from cryptography.exceptions import UnsupportedAlgorithm, _Reasons -from cryptography.hazmat.bindings._rust import openssl as rust_openssl -from cryptography.hazmat.primitives import _serialization - - -class Ed25519PublicKey(metaclass=abc.ABCMeta): - @classmethod - def from_public_bytes(cls, data: bytes) -> Ed25519PublicKey: - from cryptography.hazmat.backends.openssl.backend import backend - - if not backend.ed25519_supported(): - raise UnsupportedAlgorithm( - "ed25519 is not supported by this version of OpenSSL.", - _Reasons.UNSUPPORTED_PUBLIC_KEY_ALGORITHM, - ) - - return rust_openssl.ed25519.from_public_bytes(data) - - @abc.abstractmethod - def public_bytes( - self, - encoding: _serialization.Encoding, - format: _serialization.PublicFormat, - ) -> bytes: - """ - The serialized bytes of the public key. - """ - - @abc.abstractmethod - def public_bytes_raw(self) -> bytes: - """ - The raw bytes of the public key. - Equivalent to public_bytes(Raw, Raw). - """ - - @abc.abstractmethod - def verify(self, signature: bytes, data: bytes) -> None: - """ - Verify the signature. - """ - - @abc.abstractmethod - def __eq__(self, other: object) -> bool: - """ - Checks equality. - """ - - -Ed25519PublicKey.register(rust_openssl.ed25519.Ed25519PublicKey) - - -class Ed25519PrivateKey(metaclass=abc.ABCMeta): - @classmethod - def generate(cls) -> Ed25519PrivateKey: - from cryptography.hazmat.backends.openssl.backend import backend - - if not backend.ed25519_supported(): - raise UnsupportedAlgorithm( - "ed25519 is not supported by this version of OpenSSL.", - _Reasons.UNSUPPORTED_PUBLIC_KEY_ALGORITHM, - ) - - return rust_openssl.ed25519.generate_key() - - @classmethod - def from_private_bytes(cls, data: bytes) -> Ed25519PrivateKey: - from cryptography.hazmat.backends.openssl.backend import backend - - if not backend.ed25519_supported(): - raise UnsupportedAlgorithm( - "ed25519 is not supported by this version of OpenSSL.", - _Reasons.UNSUPPORTED_PUBLIC_KEY_ALGORITHM, - ) - - return rust_openssl.ed25519.from_private_bytes(data) - - @abc.abstractmethod - def public_key(self) -> Ed25519PublicKey: - """ - The Ed25519PublicKey derived from the private key. - """ - - @abc.abstractmethod - def private_bytes( - self, - encoding: _serialization.Encoding, - format: _serialization.PrivateFormat, - encryption_algorithm: _serialization.KeySerializationEncryption, - ) -> bytes: - """ - The serialized bytes of the private key. - """ - - @abc.abstractmethod - def private_bytes_raw(self) -> bytes: - """ - The raw bytes of the private key. - Equivalent to private_bytes(Raw, Raw, NoEncryption()). - """ - - @abc.abstractmethod - def sign(self, data: bytes) -> bytes: - """ - Signs the data. - """ - - -Ed25519PrivateKey.register(rust_openssl.ed25519.Ed25519PrivateKey) diff --git a/env/lib/python3.10/site-packages/cryptography/hazmat/primitives/asymmetric/ed448.py b/env/lib/python3.10/site-packages/cryptography/hazmat/primitives/asymmetric/ed448.py deleted file mode 100644 index 78c82c4..0000000 --- a/env/lib/python3.10/site-packages/cryptography/hazmat/primitives/asymmetric/ed448.py +++ /dev/null @@ -1,118 +0,0 @@ -# This file is dual licensed under the terms of the Apache License, Version -# 2.0, and the BSD License. See the LICENSE file in the root of this repository -# for complete details. - -from __future__ import annotations - -import abc - -from cryptography.exceptions import UnsupportedAlgorithm, _Reasons -from cryptography.hazmat.bindings._rust import openssl as rust_openssl -from cryptography.hazmat.primitives import _serialization - - -class Ed448PublicKey(metaclass=abc.ABCMeta): - @classmethod - def from_public_bytes(cls, data: bytes) -> Ed448PublicKey: - from cryptography.hazmat.backends.openssl.backend import backend - - if not backend.ed448_supported(): - raise UnsupportedAlgorithm( - "ed448 is not supported by this version of OpenSSL.", - _Reasons.UNSUPPORTED_PUBLIC_KEY_ALGORITHM, - ) - - return rust_openssl.ed448.from_public_bytes(data) - - @abc.abstractmethod - def public_bytes( - self, - encoding: _serialization.Encoding, - format: _serialization.PublicFormat, - ) -> bytes: - """ - The serialized bytes of the public key. - """ - - @abc.abstractmethod - def public_bytes_raw(self) -> bytes: - """ - The raw bytes of the public key. - Equivalent to public_bytes(Raw, Raw). - """ - - @abc.abstractmethod - def verify(self, signature: bytes, data: bytes) -> None: - """ - Verify the signature. - """ - - @abc.abstractmethod - def __eq__(self, other: object) -> bool: - """ - Checks equality. - """ - - -if hasattr(rust_openssl, "ed448"): - Ed448PublicKey.register(rust_openssl.ed448.Ed448PublicKey) - - -class Ed448PrivateKey(metaclass=abc.ABCMeta): - @classmethod - def generate(cls) -> Ed448PrivateKey: - from cryptography.hazmat.backends.openssl.backend import backend - - if not backend.ed448_supported(): - raise UnsupportedAlgorithm( - "ed448 is not supported by this version of OpenSSL.", - _Reasons.UNSUPPORTED_PUBLIC_KEY_ALGORITHM, - ) - - return rust_openssl.ed448.generate_key() - - @classmethod - def from_private_bytes(cls, data: bytes) -> Ed448PrivateKey: - from cryptography.hazmat.backends.openssl.backend import backend - - if not backend.ed448_supported(): - raise UnsupportedAlgorithm( - "ed448 is not supported by this version of OpenSSL.", - _Reasons.UNSUPPORTED_PUBLIC_KEY_ALGORITHM, - ) - - return rust_openssl.ed448.from_private_bytes(data) - - @abc.abstractmethod - def public_key(self) -> Ed448PublicKey: - """ - The Ed448PublicKey derived from the private key. - """ - - @abc.abstractmethod - def sign(self, data: bytes) -> bytes: - """ - Signs the data. - """ - - @abc.abstractmethod - def private_bytes( - self, - encoding: _serialization.Encoding, - format: _serialization.PrivateFormat, - encryption_algorithm: _serialization.KeySerializationEncryption, - ) -> bytes: - """ - The serialized bytes of the private key. - """ - - @abc.abstractmethod - def private_bytes_raw(self) -> bytes: - """ - The raw bytes of the private key. - Equivalent to private_bytes(Raw, Raw, NoEncryption()). - """ - - -if hasattr(rust_openssl, "x448"): - Ed448PrivateKey.register(rust_openssl.ed448.Ed448PrivateKey) diff --git a/env/lib/python3.10/site-packages/cryptography/hazmat/primitives/asymmetric/padding.py b/env/lib/python3.10/site-packages/cryptography/hazmat/primitives/asymmetric/padding.py deleted file mode 100644 index b4babf4..0000000 --- a/env/lib/python3.10/site-packages/cryptography/hazmat/primitives/asymmetric/padding.py +++ /dev/null @@ -1,113 +0,0 @@ -# This file is dual licensed under the terms of the Apache License, Version -# 2.0, and the BSD License. See the LICENSE file in the root of this repository -# for complete details. - -from __future__ import annotations - -import abc - -from cryptography.hazmat.primitives import hashes -from cryptography.hazmat.primitives._asymmetric import ( - AsymmetricPadding as AsymmetricPadding, -) -from cryptography.hazmat.primitives.asymmetric import rsa - - -class PKCS1v15(AsymmetricPadding): - name = "EMSA-PKCS1-v1_5" - - -class _MaxLength: - "Sentinel value for `MAX_LENGTH`." - - -class _Auto: - "Sentinel value for `AUTO`." - - -class _DigestLength: - "Sentinel value for `DIGEST_LENGTH`." - - -class PSS(AsymmetricPadding): - MAX_LENGTH = _MaxLength() - AUTO = _Auto() - DIGEST_LENGTH = _DigestLength() - name = "EMSA-PSS" - _salt_length: int | _MaxLength | _Auto | _DigestLength - - def __init__( - self, - mgf: MGF, - salt_length: int | _MaxLength | _Auto | _DigestLength, - ) -> None: - self._mgf = mgf - - if not isinstance( - salt_length, (int, _MaxLength, _Auto, _DigestLength) - ): - raise TypeError( - "salt_length must be an integer, MAX_LENGTH, " - "DIGEST_LENGTH, or AUTO" - ) - - if isinstance(salt_length, int) and salt_length < 0: - raise ValueError("salt_length must be zero or greater.") - - self._salt_length = salt_length - - @property - def mgf(self) -> MGF: - return self._mgf - - -class OAEP(AsymmetricPadding): - name = "EME-OAEP" - - def __init__( - self, - mgf: MGF, - algorithm: hashes.HashAlgorithm, - label: bytes | None, - ): - if not isinstance(algorithm, hashes.HashAlgorithm): - raise TypeError("Expected instance of hashes.HashAlgorithm.") - - self._mgf = mgf - self._algorithm = algorithm - self._label = label - - @property - def algorithm(self) -> hashes.HashAlgorithm: - return self._algorithm - - @property - def mgf(self) -> MGF: - return self._mgf - - -class MGF(metaclass=abc.ABCMeta): - _algorithm: hashes.HashAlgorithm - - -class MGF1(MGF): - MAX_LENGTH = _MaxLength() - - def __init__(self, algorithm: hashes.HashAlgorithm): - if not isinstance(algorithm, hashes.HashAlgorithm): - raise TypeError("Expected instance of hashes.HashAlgorithm.") - - self._algorithm = algorithm - - -def calculate_max_pss_salt_length( - key: rsa.RSAPrivateKey | rsa.RSAPublicKey, - hash_algorithm: hashes.HashAlgorithm, -) -> int: - if not isinstance(key, (rsa.RSAPrivateKey, rsa.RSAPublicKey)): - raise TypeError("key must be an RSA public or private key") - # bit length - 1 per RFC 3447 - emlen = (key.key_size + 6) // 8 - salt_length = emlen - hash_algorithm.digest_size - 2 - assert salt_length >= 0 - return salt_length diff --git a/env/lib/python3.10/site-packages/cryptography/hazmat/primitives/asymmetric/rsa.py b/env/lib/python3.10/site-packages/cryptography/hazmat/primitives/asymmetric/rsa.py deleted file mode 100644 index 7a387b5..0000000 --- a/env/lib/python3.10/site-packages/cryptography/hazmat/primitives/asymmetric/rsa.py +++ /dev/null @@ -1,260 +0,0 @@ -# This file is dual licensed under the terms of the Apache License, Version -# 2.0, and the BSD License. See the LICENSE file in the root of this repository -# for complete details. - -from __future__ import annotations - -import abc -import typing -from math import gcd - -from cryptography.hazmat.bindings._rust import openssl as rust_openssl -from cryptography.hazmat.primitives import _serialization, hashes -from cryptography.hazmat.primitives._asymmetric import AsymmetricPadding -from cryptography.hazmat.primitives.asymmetric import utils as asym_utils - - -class RSAPrivateKey(metaclass=abc.ABCMeta): - @abc.abstractmethod - def decrypt(self, ciphertext: bytes, padding: AsymmetricPadding) -> bytes: - """ - Decrypts the provided ciphertext. - """ - - @property - @abc.abstractmethod - def key_size(self) -> int: - """ - The bit length of the public modulus. - """ - - @abc.abstractmethod - def public_key(self) -> RSAPublicKey: - """ - The RSAPublicKey associated with this private key. - """ - - @abc.abstractmethod - def sign( - self, - data: bytes, - padding: AsymmetricPadding, - algorithm: asym_utils.Prehashed | hashes.HashAlgorithm, - ) -> bytes: - """ - Signs the data. - """ - - @abc.abstractmethod - def private_numbers(self) -> RSAPrivateNumbers: - """ - Returns an RSAPrivateNumbers. - """ - - @abc.abstractmethod - def private_bytes( - self, - encoding: _serialization.Encoding, - format: _serialization.PrivateFormat, - encryption_algorithm: _serialization.KeySerializationEncryption, - ) -> bytes: - """ - Returns the key serialized as bytes. - """ - - -RSAPrivateKeyWithSerialization = RSAPrivateKey -RSAPrivateKey.register(rust_openssl.rsa.RSAPrivateKey) - - -class RSAPublicKey(metaclass=abc.ABCMeta): - @abc.abstractmethod - def encrypt(self, plaintext: bytes, padding: AsymmetricPadding) -> bytes: - """ - Encrypts the given plaintext. - """ - - @property - @abc.abstractmethod - def key_size(self) -> int: - """ - The bit length of the public modulus. - """ - - @abc.abstractmethod - def public_numbers(self) -> RSAPublicNumbers: - """ - Returns an RSAPublicNumbers - """ - - @abc.abstractmethod - def public_bytes( - self, - encoding: _serialization.Encoding, - format: _serialization.PublicFormat, - ) -> bytes: - """ - Returns the key serialized as bytes. - """ - - @abc.abstractmethod - def verify( - self, - signature: bytes, - data: bytes, - padding: AsymmetricPadding, - algorithm: asym_utils.Prehashed | hashes.HashAlgorithm, - ) -> None: - """ - Verifies the signature of the data. - """ - - @abc.abstractmethod - def recover_data_from_signature( - self, - signature: bytes, - padding: AsymmetricPadding, - algorithm: hashes.HashAlgorithm | None, - ) -> bytes: - """ - Recovers the original data from the signature. - """ - - @abc.abstractmethod - def __eq__(self, other: object) -> bool: - """ - Checks equality. - """ - - -RSAPublicKeyWithSerialization = RSAPublicKey -RSAPublicKey.register(rust_openssl.rsa.RSAPublicKey) - -RSAPrivateNumbers = rust_openssl.rsa.RSAPrivateNumbers -RSAPublicNumbers = rust_openssl.rsa.RSAPublicNumbers - - -def generate_private_key( - public_exponent: int, - key_size: int, - backend: typing.Any = None, -) -> RSAPrivateKey: - _verify_rsa_parameters(public_exponent, key_size) - return rust_openssl.rsa.generate_private_key(public_exponent, key_size) - - -def _verify_rsa_parameters(public_exponent: int, key_size: int) -> None: - if public_exponent not in (3, 65537): - raise ValueError( - "public_exponent must be either 3 (for legacy compatibility) or " - "65537. Almost everyone should choose 65537 here!" - ) - - if key_size < 1024: - raise ValueError("key_size must be at least 1024-bits.") - - -def _modinv(e: int, m: int) -> int: - """ - Modular Multiplicative Inverse. Returns x such that: (x*e) mod m == 1 - """ - x1, x2 = 1, 0 - a, b = e, m - while b > 0: - q, r = divmod(a, b) - xn = x1 - q * x2 - a, b, x1, x2 = b, r, x2, xn - return x1 % m - - -def rsa_crt_iqmp(p: int, q: int) -> int: - """ - Compute the CRT (q ** -1) % p value from RSA primes p and q. - """ - return _modinv(q, p) - - -def rsa_crt_dmp1(private_exponent: int, p: int) -> int: - """ - Compute the CRT private_exponent % (p - 1) value from the RSA - private_exponent (d) and p. - """ - return private_exponent % (p - 1) - - -def rsa_crt_dmq1(private_exponent: int, q: int) -> int: - """ - Compute the CRT private_exponent % (q - 1) value from the RSA - private_exponent (d) and q. - """ - return private_exponent % (q - 1) - - -def rsa_recover_private_exponent(e: int, p: int, q: int) -> int: - """ - Compute the RSA private_exponent (d) given the public exponent (e) - and the RSA primes p and q. - - This uses the Carmichael totient function to generate the - smallest possible working value of the private exponent. - """ - # This lambda_n is the Carmichael totient function. - # The original RSA paper uses the Euler totient function - # here: phi_n = (p - 1) * (q - 1) - # Either version of the private exponent will work, but the - # one generated by the older formulation may be larger - # than necessary. (lambda_n always divides phi_n) - # - # TODO: Replace with lcm(p - 1, q - 1) once the minimum - # supported Python version is >= 3.9. - lambda_n = (p - 1) * (q - 1) // gcd(p - 1, q - 1) - return _modinv(e, lambda_n) - - -# Controls the number of iterations rsa_recover_prime_factors will perform -# to obtain the prime factors. Each iteration increments by 2 so the actual -# maximum attempts is half this number. -_MAX_RECOVERY_ATTEMPTS = 1000 - - -def rsa_recover_prime_factors(n: int, e: int, d: int) -> tuple[int, int]: - """ - Compute factors p and q from the private exponent d. We assume that n has - no more than two factors. This function is adapted from code in PyCrypto. - """ - # See 8.2.2(i) in Handbook of Applied Cryptography. - ktot = d * e - 1 - # The quantity d*e-1 is a multiple of phi(n), even, - # and can be represented as t*2^s. - t = ktot - while t % 2 == 0: - t = t // 2 - # Cycle through all multiplicative inverses in Zn. - # The algorithm is non-deterministic, but there is a 50% chance - # any candidate a leads to successful factoring. - # See "Digitalized Signatures and Public Key Functions as Intractable - # as Factorization", M. Rabin, 1979 - spotted = False - a = 2 - while not spotted and a < _MAX_RECOVERY_ATTEMPTS: - k = t - # Cycle through all values a^{t*2^i}=a^k - while k < ktot: - cand = pow(a, k, n) - # Check if a^k is a non-trivial root of unity (mod n) - if cand != 1 and cand != (n - 1) and pow(cand, 2, n) == 1: - # We have found a number such that (cand-1)(cand+1)=0 (mod n). - # Either of the terms divides n. - p = gcd(cand + 1, n) - spotted = True - break - k *= 2 - # This value was not any good... let's try another! - a += 2 - if not spotted: - raise ValueError("Unable to compute factors p and q from exponent d.") - # Found ! - q, r = divmod(n, p) - assert r == 0 - p, q = sorted((p, q), reverse=True) - return (p, q) diff --git a/env/lib/python3.10/site-packages/cryptography/hazmat/primitives/asymmetric/types.py b/env/lib/python3.10/site-packages/cryptography/hazmat/primitives/asymmetric/types.py deleted file mode 100644 index 1fe4eaf..0000000 --- a/env/lib/python3.10/site-packages/cryptography/hazmat/primitives/asymmetric/types.py +++ /dev/null @@ -1,111 +0,0 @@ -# This file is dual licensed under the terms of the Apache License, Version -# 2.0, and the BSD License. See the LICENSE file in the root of this repository -# for complete details. - -from __future__ import annotations - -import typing - -from cryptography import utils -from cryptography.hazmat.primitives.asymmetric import ( - dh, - dsa, - ec, - ed448, - ed25519, - rsa, - x448, - x25519, -) - -# Every asymmetric key type -PublicKeyTypes = typing.Union[ - dh.DHPublicKey, - dsa.DSAPublicKey, - rsa.RSAPublicKey, - ec.EllipticCurvePublicKey, - ed25519.Ed25519PublicKey, - ed448.Ed448PublicKey, - x25519.X25519PublicKey, - x448.X448PublicKey, -] -PUBLIC_KEY_TYPES = PublicKeyTypes -utils.deprecated( - PUBLIC_KEY_TYPES, - __name__, - "Use PublicKeyTypes instead", - utils.DeprecatedIn40, - name="PUBLIC_KEY_TYPES", -) -# Every asymmetric key type -PrivateKeyTypes = typing.Union[ - dh.DHPrivateKey, - ed25519.Ed25519PrivateKey, - ed448.Ed448PrivateKey, - rsa.RSAPrivateKey, - dsa.DSAPrivateKey, - ec.EllipticCurvePrivateKey, - x25519.X25519PrivateKey, - x448.X448PrivateKey, -] -PRIVATE_KEY_TYPES = PrivateKeyTypes -utils.deprecated( - PRIVATE_KEY_TYPES, - __name__, - "Use PrivateKeyTypes instead", - utils.DeprecatedIn40, - name="PRIVATE_KEY_TYPES", -) -# Just the key types we allow to be used for x509 signing. This mirrors -# the certificate public key types -CertificateIssuerPrivateKeyTypes = typing.Union[ - ed25519.Ed25519PrivateKey, - ed448.Ed448PrivateKey, - rsa.RSAPrivateKey, - dsa.DSAPrivateKey, - ec.EllipticCurvePrivateKey, -] -CERTIFICATE_PRIVATE_KEY_TYPES = CertificateIssuerPrivateKeyTypes -utils.deprecated( - CERTIFICATE_PRIVATE_KEY_TYPES, - __name__, - "Use CertificateIssuerPrivateKeyTypes instead", - utils.DeprecatedIn40, - name="CERTIFICATE_PRIVATE_KEY_TYPES", -) -# Just the key types we allow to be used for x509 signing. This mirrors -# the certificate private key types -CertificateIssuerPublicKeyTypes = typing.Union[ - dsa.DSAPublicKey, - rsa.RSAPublicKey, - ec.EllipticCurvePublicKey, - ed25519.Ed25519PublicKey, - ed448.Ed448PublicKey, -] -CERTIFICATE_ISSUER_PUBLIC_KEY_TYPES = CertificateIssuerPublicKeyTypes -utils.deprecated( - CERTIFICATE_ISSUER_PUBLIC_KEY_TYPES, - __name__, - "Use CertificateIssuerPublicKeyTypes instead", - utils.DeprecatedIn40, - name="CERTIFICATE_ISSUER_PUBLIC_KEY_TYPES", -) -# This type removes DHPublicKey. x448/x25519 can be a public key -# but cannot be used in signing so they are allowed here. -CertificatePublicKeyTypes = typing.Union[ - dsa.DSAPublicKey, - rsa.RSAPublicKey, - ec.EllipticCurvePublicKey, - ed25519.Ed25519PublicKey, - ed448.Ed448PublicKey, - x25519.X25519PublicKey, - x448.X448PublicKey, -] -CERTIFICATE_PUBLIC_KEY_TYPES = CertificatePublicKeyTypes -utils.deprecated( - CERTIFICATE_PUBLIC_KEY_TYPES, - __name__, - "Use CertificatePublicKeyTypes instead", - utils.DeprecatedIn40, - name="CERTIFICATE_PUBLIC_KEY_TYPES", -) diff --git a/env/lib/python3.10/site-packages/cryptography/hazmat/primitives/asymmetric/utils.py b/env/lib/python3.10/site-packages/cryptography/hazmat/primitives/asymmetric/utils.py deleted file mode 100644 index 826b956..0000000 --- a/env/lib/python3.10/site-packages/cryptography/hazmat/primitives/asymmetric/utils.py +++ /dev/null @@ -1,24 +0,0 @@ -# This file is dual licensed under the terms of the Apache License, Version -# 2.0, and the BSD License. See the LICENSE file in the root of this repository -# for complete details. - -from __future__ import annotations - -from cryptography.hazmat.bindings._rust import asn1 -from cryptography.hazmat.primitives import hashes - -decode_dss_signature = asn1.decode_dss_signature -encode_dss_signature = asn1.encode_dss_signature - - -class Prehashed: - def __init__(self, algorithm: hashes.HashAlgorithm): - if not isinstance(algorithm, hashes.HashAlgorithm): - raise TypeError("Expected instance of HashAlgorithm.") - - self._algorithm = algorithm - self._digest_size = algorithm.digest_size - - @property - def digest_size(self) -> int: - return self._digest_size diff --git a/env/lib/python3.10/site-packages/cryptography/hazmat/primitives/asymmetric/x25519.py b/env/lib/python3.10/site-packages/cryptography/hazmat/primitives/asymmetric/x25519.py deleted file mode 100644 index 0cfa36e..0000000 --- a/env/lib/python3.10/site-packages/cryptography/hazmat/primitives/asymmetric/x25519.py +++ /dev/null @@ -1,109 +0,0 @@ -# This file is dual licensed under the terms of the Apache License, Version -# 2.0, and the BSD License. See the LICENSE file in the root of this repository -# for complete details. - -from __future__ import annotations - -import abc - -from cryptography.exceptions import UnsupportedAlgorithm, _Reasons -from cryptography.hazmat.bindings._rust import openssl as rust_openssl -from cryptography.hazmat.primitives import _serialization - - -class X25519PublicKey(metaclass=abc.ABCMeta): - @classmethod - def from_public_bytes(cls, data: bytes) -> X25519PublicKey: - from cryptography.hazmat.backends.openssl.backend import backend - - if not backend.x25519_supported(): - raise UnsupportedAlgorithm( - "X25519 is not supported by this version of OpenSSL.", - _Reasons.UNSUPPORTED_EXCHANGE_ALGORITHM, - ) - - return rust_openssl.x25519.from_public_bytes(data) - - @abc.abstractmethod - def public_bytes( - self, - encoding: _serialization.Encoding, - format: _serialization.PublicFormat, - ) -> bytes: - """ - The serialized bytes of the public key. - """ - - @abc.abstractmethod - def public_bytes_raw(self) -> bytes: - """ - The raw bytes of the public key. - Equivalent to public_bytes(Raw, Raw). - """ - - @abc.abstractmethod - def __eq__(self, other: object) -> bool: - """ - Checks equality. - """ - - -X25519PublicKey.register(rust_openssl.x25519.X25519PublicKey) - - -class X25519PrivateKey(metaclass=abc.ABCMeta): - @classmethod - def generate(cls) -> X25519PrivateKey: - from cryptography.hazmat.backends.openssl.backend import backend - - if not backend.x25519_supported(): - raise UnsupportedAlgorithm( - "X25519 is not supported by this version of OpenSSL.", - _Reasons.UNSUPPORTED_EXCHANGE_ALGORITHM, - ) - return rust_openssl.x25519.generate_key() - - @classmethod - def from_private_bytes(cls, data: bytes) -> X25519PrivateKey: - from cryptography.hazmat.backends.openssl.backend import backend - - if not backend.x25519_supported(): - raise UnsupportedAlgorithm( - "X25519 is not supported by this version of OpenSSL.", - _Reasons.UNSUPPORTED_EXCHANGE_ALGORITHM, - ) - - return rust_openssl.x25519.from_private_bytes(data) - - @abc.abstractmethod - def public_key(self) -> X25519PublicKey: - """ - Returns the public key associated with this private key - """ - - @abc.abstractmethod - def private_bytes( - self, - encoding: _serialization.Encoding, - format: _serialization.PrivateFormat, - encryption_algorithm: _serialization.KeySerializationEncryption, - ) -> bytes: - """ - The serialized bytes of the private key. - """ - - @abc.abstractmethod - def private_bytes_raw(self) -> bytes: - """ - The raw bytes of the private key. - Equivalent to private_bytes(Raw, Raw, NoEncryption()). - """ - - @abc.abstractmethod - def exchange(self, peer_public_key: X25519PublicKey) -> bytes: - """ - Performs a key exchange operation using the provided peer's public key. - """ - - -X25519PrivateKey.register(rust_openssl.x25519.X25519PrivateKey) diff --git a/env/lib/python3.10/site-packages/cryptography/hazmat/primitives/asymmetric/x448.py b/env/lib/python3.10/site-packages/cryptography/hazmat/primitives/asymmetric/x448.py deleted file mode 100644 index 86086ab..0000000 --- a/env/lib/python3.10/site-packages/cryptography/hazmat/primitives/asymmetric/x448.py +++ /dev/null @@ -1,112 +0,0 @@ -# This file is dual licensed under the terms of the Apache License, Version -# 2.0, and the BSD License. See the LICENSE file in the root of this repository -# for complete details. - -from __future__ import annotations - -import abc - -from cryptography.exceptions import UnsupportedAlgorithm, _Reasons -from cryptography.hazmat.bindings._rust import openssl as rust_openssl -from cryptography.hazmat.primitives import _serialization - - -class X448PublicKey(metaclass=abc.ABCMeta): - @classmethod - def from_public_bytes(cls, data: bytes) -> X448PublicKey: - from cryptography.hazmat.backends.openssl.backend import backend - - if not backend.x448_supported(): - raise UnsupportedAlgorithm( - "X448 is not supported by this version of OpenSSL.", - _Reasons.UNSUPPORTED_EXCHANGE_ALGORITHM, - ) - - return rust_openssl.x448.from_public_bytes(data) - - @abc.abstractmethod - def public_bytes( - self, - encoding: _serialization.Encoding, - format: _serialization.PublicFormat, - ) -> bytes: - """ - The serialized bytes of the public key. - """ - - @abc.abstractmethod - def public_bytes_raw(self) -> bytes: - """ - The raw bytes of the public key. - Equivalent to public_bytes(Raw, Raw). - """ - - @abc.abstractmethod - def __eq__(self, other: object) -> bool: - """ - Checks equality. - """ - - -if hasattr(rust_openssl, "x448"): - X448PublicKey.register(rust_openssl.x448.X448PublicKey) - - -class X448PrivateKey(metaclass=abc.ABCMeta): - @classmethod - def generate(cls) -> X448PrivateKey: - from cryptography.hazmat.backends.openssl.backend import backend - - if not backend.x448_supported(): - raise UnsupportedAlgorithm( - "X448 is not supported by this version of OpenSSL.", - _Reasons.UNSUPPORTED_EXCHANGE_ALGORITHM, - ) - - return rust_openssl.x448.generate_key() - - @classmethod - def from_private_bytes(cls, data: bytes) -> X448PrivateKey: - from cryptography.hazmat.backends.openssl.backend import backend - - if not backend.x448_supported(): - raise UnsupportedAlgorithm( - "X448 is not supported by this version of OpenSSL.", - _Reasons.UNSUPPORTED_EXCHANGE_ALGORITHM, - ) - - return rust_openssl.x448.from_private_bytes(data) - - @abc.abstractmethod - def public_key(self) -> X448PublicKey: - """ - Returns the public key associated with this private key - """ - - @abc.abstractmethod - def private_bytes( - self, - encoding: _serialization.Encoding, - format: _serialization.PrivateFormat, - encryption_algorithm: _serialization.KeySerializationEncryption, - ) -> bytes: - """ - The serialized bytes of the private key. - """ - - @abc.abstractmethod - def private_bytes_raw(self) -> bytes: - """ - The raw bytes of the private key. - Equivalent to private_bytes(Raw, Raw, NoEncryption()). - """ - - @abc.abstractmethod - def exchange(self, peer_public_key: X448PublicKey) -> bytes: - """ - Performs a key exchange operation using the provided peer's public key. - """ - - -if hasattr(rust_openssl, "x448"): - X448PrivateKey.register(rust_openssl.x448.X448PrivateKey) diff --git a/env/lib/python3.10/site-packages/cryptography/hazmat/primitives/ciphers/__init__.py b/env/lib/python3.10/site-packages/cryptography/hazmat/primitives/ciphers/__init__.py deleted file mode 100644 index 10c15d0..0000000 --- a/env/lib/python3.10/site-packages/cryptography/hazmat/primitives/ciphers/__init__.py +++ /dev/null @@ -1,27 +0,0 @@ -# This file is dual licensed under the terms of the Apache License, Version -# 2.0, and the BSD License. See the LICENSE file in the root of this repository -# for complete details. - -from __future__ import annotations - -from cryptography.hazmat.primitives._cipheralgorithm import ( - BlockCipherAlgorithm, - CipherAlgorithm, -) -from cryptography.hazmat.primitives.ciphers.base import ( - AEADCipherContext, - AEADDecryptionContext, - AEADEncryptionContext, - Cipher, - CipherContext, -) - -__all__ = [ - "AEADCipherContext", - "AEADDecryptionContext", - "AEADEncryptionContext", - "BlockCipherAlgorithm", - "Cipher", - "CipherAlgorithm", - "CipherContext", -] diff --git a/env/lib/python3.10/site-packages/cryptography/hazmat/primitives/ciphers/__pycache__/__init__.cpython-310.pyc b/env/lib/python3.10/site-packages/cryptography/hazmat/primitives/ciphers/__pycache__/__init__.cpython-310.pyc deleted file mode 100644 index 931006a..0000000 Binary files a/env/lib/python3.10/site-packages/cryptography/hazmat/primitives/ciphers/__pycache__/__init__.cpython-310.pyc and /dev/null differ diff --git a/env/lib/python3.10/site-packages/cryptography/hazmat/primitives/ciphers/__pycache__/aead.cpython-310.pyc b/env/lib/python3.10/site-packages/cryptography/hazmat/primitives/ciphers/__pycache__/aead.cpython-310.pyc deleted file mode 100644 index fd5d1f7..0000000 Binary files a/env/lib/python3.10/site-packages/cryptography/hazmat/primitives/ciphers/__pycache__/aead.cpython-310.pyc and /dev/null differ diff --git a/env/lib/python3.10/site-packages/cryptography/hazmat/primitives/ciphers/__pycache__/algorithms.cpython-310.pyc b/env/lib/python3.10/site-packages/cryptography/hazmat/primitives/ciphers/__pycache__/algorithms.cpython-310.pyc deleted file mode 100644 index e33735c..0000000 Binary files a/env/lib/python3.10/site-packages/cryptography/hazmat/primitives/ciphers/__pycache__/algorithms.cpython-310.pyc and /dev/null differ diff --git a/env/lib/python3.10/site-packages/cryptography/hazmat/primitives/ciphers/__pycache__/base.cpython-310.pyc b/env/lib/python3.10/site-packages/cryptography/hazmat/primitives/ciphers/__pycache__/base.cpython-310.pyc deleted file mode 100644 index f59afca..0000000 Binary files a/env/lib/python3.10/site-packages/cryptography/hazmat/primitives/ciphers/__pycache__/base.cpython-310.pyc and /dev/null differ diff --git a/env/lib/python3.10/site-packages/cryptography/hazmat/primitives/ciphers/__pycache__/modes.cpython-310.pyc b/env/lib/python3.10/site-packages/cryptography/hazmat/primitives/ciphers/__pycache__/modes.cpython-310.pyc deleted file mode 100644 index 67abe30..0000000 Binary files a/env/lib/python3.10/site-packages/cryptography/hazmat/primitives/ciphers/__pycache__/modes.cpython-310.pyc and /dev/null differ diff --git a/env/lib/python3.10/site-packages/cryptography/hazmat/primitives/ciphers/aead.py b/env/lib/python3.10/site-packages/cryptography/hazmat/primitives/ciphers/aead.py deleted file mode 100644 index c8a582d..0000000 --- a/env/lib/python3.10/site-packages/cryptography/hazmat/primitives/ciphers/aead.py +++ /dev/null @@ -1,23 +0,0 @@ -# This file is dual licensed under the terms of the Apache License, Version -# 2.0, and the BSD License. See the LICENSE file in the root of this repository -# for complete details. - -from __future__ import annotations - -from cryptography.hazmat.bindings._rust import openssl as rust_openssl - -__all__ = [ - "AESCCM", - "AESGCM", - "AESGCMSIV", - "AESOCB3", - "AESSIV", - "ChaCha20Poly1305", -] - -AESGCM = rust_openssl.aead.AESGCM -ChaCha20Poly1305 = rust_openssl.aead.ChaCha20Poly1305 -AESCCM = rust_openssl.aead.AESCCM -AESSIV = rust_openssl.aead.AESSIV -AESOCB3 = rust_openssl.aead.AESOCB3 -AESGCMSIV = rust_openssl.aead.AESGCMSIV diff --git a/env/lib/python3.10/site-packages/cryptography/hazmat/primitives/ciphers/algorithms.py b/env/lib/python3.10/site-packages/cryptography/hazmat/primitives/ciphers/algorithms.py deleted file mode 100644 index 1051ba3..0000000 --- a/env/lib/python3.10/site-packages/cryptography/hazmat/primitives/ciphers/algorithms.py +++ /dev/null @@ -1,177 +0,0 @@ -# This file is dual licensed under the terms of the Apache License, Version -# 2.0, and the BSD License. See the LICENSE file in the root of this repository -# for complete details. - -from __future__ import annotations - -from cryptography import utils -from cryptography.hazmat.decrepit.ciphers.algorithms import ( - ARC4 as ARC4, -) -from cryptography.hazmat.decrepit.ciphers.algorithms import ( - CAST5 as CAST5, -) -from cryptography.hazmat.decrepit.ciphers.algorithms import ( - IDEA as IDEA, -) -from cryptography.hazmat.decrepit.ciphers.algorithms import ( - SEED as SEED, -) -from cryptography.hazmat.decrepit.ciphers.algorithms import ( - Blowfish as Blowfish, -) -from cryptography.hazmat.decrepit.ciphers.algorithms import ( - TripleDES as TripleDES, -) -from cryptography.hazmat.primitives._cipheralgorithm import _verify_key_size -from cryptography.hazmat.primitives.ciphers import ( - BlockCipherAlgorithm, - CipherAlgorithm, -) - - -class AES(BlockCipherAlgorithm): - name = "AES" - block_size = 128 - # 512 added to support AES-256-XTS, which uses 512-bit keys - key_sizes = frozenset([128, 192, 256, 512]) - - def __init__(self, key: bytes): - self.key = _verify_key_size(self, key) - - @property - def key_size(self) -> int: - return len(self.key) * 8 - - -class AES128(BlockCipherAlgorithm): - name = "AES" - block_size = 128 - key_sizes = frozenset([128]) - key_size = 128 - - def __init__(self, key: bytes): - self.key = _verify_key_size(self, key) - - -class AES256(BlockCipherAlgorithm): - name = "AES" - block_size = 128 - key_sizes = frozenset([256]) - key_size = 256 - - def __init__(self, key: bytes): - self.key = _verify_key_size(self, key) - - -class Camellia(BlockCipherAlgorithm): - name = "camellia" - block_size = 128 - key_sizes = frozenset([128, 192, 256]) - - def __init__(self, key: bytes): - self.key = _verify_key_size(self, key) - - @property - def key_size(self) -> int: - return len(self.key) * 8 - - -utils.deprecated( - ARC4, - __name__, - "ARC4 has been moved to " - "cryptography.hazmat.decrepit.ciphers.algorithms.ARC4 and " - "will be removed from this module in 48.0.0.", - utils.DeprecatedIn43, - name="ARC4", -) - - -utils.deprecated( - TripleDES, - __name__, - "TripleDES has been moved to " - "cryptography.hazmat.decrepit.ciphers.algorithms.TripleDES and " - "will be removed from this module in 48.0.0.", - utils.DeprecatedIn43, - name="TripleDES", -) - -utils.deprecated( - Blowfish, - __name__, - "Blowfish has been moved to " - "cryptography.hazmat.decrepit.ciphers.algorithms.Blowfish and " - "will be removed from this module in 45.0.0.", - utils.DeprecatedIn37, - name="Blowfish", -) - - -utils.deprecated( - CAST5, - __name__, - "CAST5 has been moved to " - "cryptography.hazmat.decrepit.ciphers.algorithms.CAST5 and " - "will be removed from this module in 45.0.0.", - utils.DeprecatedIn37, - name="CAST5", -) - - -utils.deprecated( - IDEA, - __name__, - "IDEA has been moved to " - "cryptography.hazmat.decrepit.ciphers.algorithms.IDEA and " - "will be removed from this module in 45.0.0.", - utils.DeprecatedIn37, - name="IDEA", -) - - -utils.deprecated( - SEED, - __name__, - "SEED has been moved to " - "cryptography.hazmat.decrepit.ciphers.algorithms.SEED and " - "will be removed from this module in 45.0.0.", - utils.DeprecatedIn37, - name="SEED", -) - - -class ChaCha20(CipherAlgorithm): - name = "ChaCha20" - key_sizes = frozenset([256]) - - def __init__(self, key: bytes, nonce: bytes): - self.key = _verify_key_size(self, key) - utils._check_byteslike("nonce", nonce) - - if len(nonce) != 16: - raise ValueError("nonce must be 128-bits (16 bytes)") - - self._nonce = nonce - - @property - def nonce(self) -> bytes: - return self._nonce - - @property - def key_size(self) -> int: - return len(self.key) * 8 - - -class SM4(BlockCipherAlgorithm): - name = "SM4" - block_size = 128 - key_sizes = frozenset([128]) - - def __init__(self, key: bytes): - self.key = _verify_key_size(self, key) - - @property - def key_size(self) -> int: - return len(self.key) * 8 diff --git a/env/lib/python3.10/site-packages/cryptography/hazmat/primitives/ciphers/base.py b/env/lib/python3.10/site-packages/cryptography/hazmat/primitives/ciphers/base.py deleted file mode 100644 index ebfa805..0000000 --- a/env/lib/python3.10/site-packages/cryptography/hazmat/primitives/ciphers/base.py +++ /dev/null @@ -1,145 +0,0 @@ -# This file is dual licensed under the terms of the Apache License, Version -# 2.0, and the BSD License. See the LICENSE file in the root of this repository -# for complete details. - -from __future__ import annotations - -import abc -import typing - -from cryptography.hazmat.bindings._rust import openssl as rust_openssl -from cryptography.hazmat.primitives._cipheralgorithm import CipherAlgorithm -from cryptography.hazmat.primitives.ciphers import modes - - -class CipherContext(metaclass=abc.ABCMeta): - @abc.abstractmethod - def update(self, data: bytes) -> bytes: - """ - Processes the provided bytes through the cipher and returns the results - as bytes. - """ - - @abc.abstractmethod - def update_into(self, data: bytes, buf: bytes) -> int: - """ - Processes the provided bytes and writes the resulting data into the - provided buffer. Returns the number of bytes written. - """ - - @abc.abstractmethod - def finalize(self) -> bytes: - """ - Returns the results of processing the final block as bytes. - """ - - @abc.abstractmethod - def reset_nonce(self, nonce: bytes) -> None: - """ - Resets the nonce for the cipher context to the provided value. - Raises an exception if it does not support reset or if the - provided nonce does not have a valid length. - """ - - -class AEADCipherContext(CipherContext, metaclass=abc.ABCMeta): - @abc.abstractmethod - def authenticate_additional_data(self, data: bytes) -> None: - """ - Authenticates the provided bytes. - """ - - -class AEADDecryptionContext(AEADCipherContext, metaclass=abc.ABCMeta): - @abc.abstractmethod - def finalize_with_tag(self, tag: bytes) -> bytes: - """ - Returns the results of processing the final block as bytes and allows - delayed passing of the authentication tag. - """ - - -class AEADEncryptionContext(AEADCipherContext, metaclass=abc.ABCMeta): - @property - @abc.abstractmethod - def tag(self) -> bytes: - """ - Returns tag bytes. This is only available after encryption is - finalized. - """ - - -Mode = typing.TypeVar( - "Mode", bound=typing.Optional[modes.Mode], covariant=True -) - - -class Cipher(typing.Generic[Mode]): - def __init__( - self, - algorithm: CipherAlgorithm, - mode: Mode, - backend: typing.Any = None, - ) -> None: - if not isinstance(algorithm, CipherAlgorithm): - raise TypeError("Expected interface of CipherAlgorithm.") - - if mode is not None: - # mypy needs this assert to narrow the type from our generic - # type. Maybe it won't some time in the future. - assert isinstance(mode, modes.Mode) - mode.validate_for_algorithm(algorithm) - - self.algorithm = algorithm - self.mode = mode - - @typing.overload - def encryptor( - self: Cipher[modes.ModeWithAuthenticationTag], - ) -> AEADEncryptionContext: ... - - @typing.overload - def encryptor( - self: _CIPHER_TYPE, - ) -> CipherContext: ... - - def encryptor(self): - if isinstance(self.mode, modes.ModeWithAuthenticationTag): - if self.mode.tag is not None: - raise ValueError( - "Authentication tag must be None when encrypting." - ) - - return rust_openssl.ciphers.create_encryption_ctx( - self.algorithm, self.mode - ) - - @typing.overload - def decryptor( - self: Cipher[modes.ModeWithAuthenticationTag], - ) -> AEADDecryptionContext: ... - - @typing.overload - def decryptor( - self: _CIPHER_TYPE, - ) -> CipherContext: ... - - def decryptor(self): - return rust_openssl.ciphers.create_decryption_ctx( - self.algorithm, self.mode - ) - - -_CIPHER_TYPE = Cipher[ - typing.Union[ - modes.ModeWithNonce, - modes.ModeWithTweak, - None, - modes.ECB, - modes.ModeWithInitializationVector, - ] -] - -CipherContext.register(rust_openssl.ciphers.CipherContext) -AEADEncryptionContext.register(rust_openssl.ciphers.AEADEncryptionContext) -AEADDecryptionContext.register(rust_openssl.ciphers.AEADDecryptionContext) diff --git a/env/lib/python3.10/site-packages/cryptography/hazmat/primitives/ciphers/modes.py b/env/lib/python3.10/site-packages/cryptography/hazmat/primitives/ciphers/modes.py deleted file mode 100644 index 1dd2cc1..0000000 --- a/env/lib/python3.10/site-packages/cryptography/hazmat/primitives/ciphers/modes.py +++ /dev/null @@ -1,268 +0,0 @@ -# This file is dual licensed under the terms of the Apache License, Version -# 2.0, and the BSD License. See the LICENSE file in the root of this repository -# for complete details. - -from __future__ import annotations - -import abc - -from cryptography import utils -from cryptography.exceptions import UnsupportedAlgorithm, _Reasons -from cryptography.hazmat.primitives._cipheralgorithm import ( - BlockCipherAlgorithm, - CipherAlgorithm, -) -from cryptography.hazmat.primitives.ciphers import algorithms - - -class Mode(metaclass=abc.ABCMeta): - @property - @abc.abstractmethod - def name(self) -> str: - """ - A string naming this mode (e.g. "ECB", "CBC"). - """ - - @abc.abstractmethod - def validate_for_algorithm(self, algorithm: CipherAlgorithm) -> None: - """ - Checks that all the necessary invariants of this (mode, algorithm) - combination are met. - """ - - -class ModeWithInitializationVector(Mode, metaclass=abc.ABCMeta): - @property - @abc.abstractmethod - def initialization_vector(self) -> bytes: - """ - The value of the initialization vector for this mode as bytes. - """ - - -class ModeWithTweak(Mode, metaclass=abc.ABCMeta): - @property - @abc.abstractmethod - def tweak(self) -> bytes: - """ - The value of the tweak for this mode as bytes. - """ - - -class ModeWithNonce(Mode, metaclass=abc.ABCMeta): - @property - @abc.abstractmethod - def nonce(self) -> bytes: - """ - The value of the nonce for this mode as bytes. - """ - - -class ModeWithAuthenticationTag(Mode, metaclass=abc.ABCMeta): - @property - @abc.abstractmethod - def tag(self) -> bytes | None: - """ - The value of the tag supplied to the constructor of this mode. - """ - - -def _check_aes_key_length(self: Mode, algorithm: CipherAlgorithm) -> None: - if algorithm.key_size > 256 and algorithm.name == "AES": - raise ValueError( - "Only 128, 192, and 256 bit keys are allowed for this AES mode" - ) - - -def _check_iv_length( - self: ModeWithInitializationVector, algorithm: BlockCipherAlgorithm -) -> None: - iv_len = len(self.initialization_vector) - if iv_len * 8 != algorithm.block_size: - raise ValueError(f"Invalid IV size ({iv_len}) for {self.name}.") - - -def _check_nonce_length( - nonce: bytes, name: str, algorithm: CipherAlgorithm -) -> None: - if not isinstance(algorithm, BlockCipherAlgorithm): - raise UnsupportedAlgorithm( - f"{name} requires a block cipher algorithm", - _Reasons.UNSUPPORTED_CIPHER, - ) - if len(nonce) * 8 != algorithm.block_size: - raise ValueError(f"Invalid nonce size ({len(nonce)}) for {name}.") - - -def _check_iv_and_key_length( - self: ModeWithInitializationVector, algorithm: CipherAlgorithm -) -> None: - if not isinstance(algorithm, BlockCipherAlgorithm): - raise UnsupportedAlgorithm( - f"{self} requires a block cipher algorithm", - _Reasons.UNSUPPORTED_CIPHER, - ) - _check_aes_key_length(self, algorithm) - _check_iv_length(self, algorithm) - - -class CBC(ModeWithInitializationVector): - name = "CBC" - - def __init__(self, initialization_vector: bytes): - utils._check_byteslike("initialization_vector", initialization_vector) - self._initialization_vector = initialization_vector - - @property - def initialization_vector(self) -> bytes: - return self._initialization_vector - - validate_for_algorithm = _check_iv_and_key_length - - -class XTS(ModeWithTweak): - name = "XTS" - - def __init__(self, tweak: bytes): - utils._check_byteslike("tweak", tweak) - - if len(tweak) != 16: - raise ValueError("tweak must be 128-bits (16 bytes)") - - self._tweak = tweak - - @property - def tweak(self) -> bytes: - return self._tweak - - def validate_for_algorithm(self, algorithm: CipherAlgorithm) -> None: - if isinstance(algorithm, (algorithms.AES128, algorithms.AES256)): - raise TypeError( - "The AES128 and AES256 classes do not support XTS, please use " - "the standard AES class instead." - ) - - if algorithm.key_size not in (256, 512): - raise ValueError( - "The XTS specification requires a 256-bit key for AES-128-XTS" - " and 512-bit key for AES-256-XTS" - ) - - -class ECB(Mode): - name = "ECB" - - validate_for_algorithm = _check_aes_key_length - - -class OFB(ModeWithInitializationVector): - name = "OFB" - - def __init__(self, initialization_vector: bytes): - utils._check_byteslike("initialization_vector", initialization_vector) - self._initialization_vector = initialization_vector - - @property - def initialization_vector(self) -> bytes: - return self._initialization_vector - - validate_for_algorithm = _check_iv_and_key_length - - -class CFB(ModeWithInitializationVector): - name = "CFB" - - def __init__(self, initialization_vector: bytes): - utils._check_byteslike("initialization_vector", initialization_vector) - self._initialization_vector = initialization_vector - - @property - def initialization_vector(self) -> bytes: - return self._initialization_vector - - validate_for_algorithm = _check_iv_and_key_length - - -class CFB8(ModeWithInitializationVector): - name = "CFB8" - - def __init__(self, initialization_vector: bytes): - utils._check_byteslike("initialization_vector", initialization_vector) - self._initialization_vector = initialization_vector - - @property - def initialization_vector(self) -> bytes: - return self._initialization_vector - - validate_for_algorithm = _check_iv_and_key_length - - -class CTR(ModeWithNonce): - name = "CTR" - - def __init__(self, nonce: bytes): - utils._check_byteslike("nonce", nonce) - self._nonce = nonce - - @property - def nonce(self) -> bytes: - return self._nonce - - def validate_for_algorithm(self, algorithm: CipherAlgorithm) -> None: - _check_aes_key_length(self, algorithm) - _check_nonce_length(self.nonce, self.name, algorithm) - - -class GCM(ModeWithInitializationVector, ModeWithAuthenticationTag): - name = "GCM" - _MAX_ENCRYPTED_BYTES = (2**39 - 256) // 8 - _MAX_AAD_BYTES = (2**64) // 8 - - def __init__( - self, - initialization_vector: bytes, - tag: bytes | None = None, - min_tag_length: int = 16, - ): - # OpenSSL 3.0.0 constrains GCM IVs to [64, 1024] bits inclusive - # This is a sane limit anyway so we'll enforce it here. - utils._check_byteslike("initialization_vector", initialization_vector) - if len(initialization_vector) < 8 or len(initialization_vector) > 128: - raise ValueError( - "initialization_vector must be between 8 and 128 bytes (64 " - "and 1024 bits)." - ) - self._initialization_vector = initialization_vector - if tag is not None: - utils._check_bytes("tag", tag) - if min_tag_length < 4: - raise ValueError("min_tag_length must be >= 4") - if len(tag) < min_tag_length: - raise ValueError( - f"Authentication tag must be {min_tag_length} bytes or " - "longer." - ) - self._tag = tag - self._min_tag_length = min_tag_length - - @property - def tag(self) -> bytes | None: - return self._tag - - @property - def initialization_vector(self) -> bytes: - return self._initialization_vector - - def validate_for_algorithm(self, algorithm: CipherAlgorithm) -> None: - _check_aes_key_length(self, algorithm) - if not isinstance(algorithm, BlockCipherAlgorithm): - raise UnsupportedAlgorithm( - "GCM requires a block cipher algorithm", - _Reasons.UNSUPPORTED_CIPHER, - ) - block_size_bytes = algorithm.block_size // 8 - if self._tag is not None and len(self._tag) > block_size_bytes: - raise ValueError( - f"Authentication tag cannot be more than {block_size_bytes} " - "bytes." - ) diff --git a/env/lib/python3.10/site-packages/cryptography/hazmat/primitives/cmac.py b/env/lib/python3.10/site-packages/cryptography/hazmat/primitives/cmac.py deleted file mode 100644 index 2c67ce2..0000000 --- a/env/lib/python3.10/site-packages/cryptography/hazmat/primitives/cmac.py +++ /dev/null @@ -1,10 +0,0 @@ -# This file is dual licensed under the terms of the Apache License, Version -# 2.0, and the BSD License. See the LICENSE file in the root of this repository -# for complete details. - -from __future__ import annotations - -from cryptography.hazmat.bindings._rust import openssl as rust_openssl - -__all__ = ["CMAC"] -CMAC = rust_openssl.cmac.CMAC diff --git a/env/lib/python3.10/site-packages/cryptography/hazmat/primitives/constant_time.py b/env/lib/python3.10/site-packages/cryptography/hazmat/primitives/constant_time.py deleted file mode 100644 index 3975c71..0000000 --- a/env/lib/python3.10/site-packages/cryptography/hazmat/primitives/constant_time.py +++ /dev/null @@ -1,14 +0,0 @@ -# This file is dual licensed under the terms of the Apache License, Version -# 2.0, and the BSD License. See the LICENSE file in the root of this repository -# for complete details. - -from __future__ import annotations - -import hmac - - -def bytes_eq(a: bytes, b: bytes) -> bool: - if not isinstance(a, bytes) or not isinstance(b, bytes): - raise TypeError("a and b must be bytes.") - - return hmac.compare_digest(a, b) diff --git a/env/lib/python3.10/site-packages/cryptography/hazmat/primitives/hashes.py b/env/lib/python3.10/site-packages/cryptography/hazmat/primitives/hashes.py deleted file mode 100644 index b819e39..0000000 --- a/env/lib/python3.10/site-packages/cryptography/hazmat/primitives/hashes.py +++ /dev/null @@ -1,242 +0,0 @@ -# This file is dual licensed under the terms of the Apache License, Version -# 2.0, and the BSD License. See the LICENSE file in the root of this repository -# for complete details. - -from __future__ import annotations - -import abc - -from cryptography.hazmat.bindings._rust import openssl as rust_openssl - -__all__ = [ - "MD5", - "SHA1", - "SHA3_224", - "SHA3_256", - "SHA3_384", - "SHA3_512", - "SHA224", - "SHA256", - "SHA384", - "SHA512", - "SHA512_224", - "SHA512_256", - "SHAKE128", - "SHAKE256", - "SM3", - "BLAKE2b", - "BLAKE2s", - "ExtendableOutputFunction", - "Hash", - "HashAlgorithm", - "HashContext", -] - - -class HashAlgorithm(metaclass=abc.ABCMeta): - @property - @abc.abstractmethod - def name(self) -> str: - """ - A string naming this algorithm (e.g. "sha256", "md5"). - """ - - @property - @abc.abstractmethod - def digest_size(self) -> int: - """ - The size of the resulting digest in bytes. - """ - - @property - @abc.abstractmethod - def block_size(self) -> int | None: - """ - The internal block size of the hash function, or None if the hash - function does not use blocks internally (e.g. SHA3). - """ - - -class HashContext(metaclass=abc.ABCMeta): - @property - @abc.abstractmethod - def algorithm(self) -> HashAlgorithm: - """ - A HashAlgorithm that will be used by this context. - """ - - @abc.abstractmethod - def update(self, data: bytes) -> None: - """ - Processes the provided bytes through the hash. - """ - - @abc.abstractmethod - def finalize(self) -> bytes: - """ - Finalizes the hash context and returns the hash digest as bytes. - """ - - @abc.abstractmethod - def copy(self) -> HashContext: - """ - Return a HashContext that is a copy of the current context. - """ - - -Hash = rust_openssl.hashes.Hash -HashContext.register(Hash) - - -class ExtendableOutputFunction(metaclass=abc.ABCMeta): - """ - An interface for extendable output functions. - """ - - -class SHA1(HashAlgorithm): - name = "sha1" - digest_size = 20 - block_size = 64 - - -class SHA512_224(HashAlgorithm): # noqa: N801 - name = "sha512-224" - digest_size = 28 - block_size = 128 - - -class SHA512_256(HashAlgorithm): # noqa: N801 - name = "sha512-256" - digest_size = 32 - block_size = 128 - - -class SHA224(HashAlgorithm): - name = "sha224" - digest_size = 28 - block_size = 64 - - -class SHA256(HashAlgorithm): - name = "sha256" - digest_size = 32 - block_size = 64 - - -class SHA384(HashAlgorithm): - name = "sha384" - digest_size = 48 - block_size = 128 - - -class SHA512(HashAlgorithm): - name = "sha512" - digest_size = 64 - block_size = 128 - - -class SHA3_224(HashAlgorithm): # noqa: N801 - name = "sha3-224" - digest_size = 28 - block_size = None - - -class SHA3_256(HashAlgorithm): # noqa: N801 - name = "sha3-256" - digest_size = 32 - block_size = None - - -class SHA3_384(HashAlgorithm): # noqa: N801 - name = "sha3-384" - digest_size = 48 - block_size = None - - -class SHA3_512(HashAlgorithm): # noqa: N801 - name = "sha3-512" - digest_size = 64 - block_size = None - - -class SHAKE128(HashAlgorithm, ExtendableOutputFunction): - name = "shake128" - block_size = None - - def __init__(self, digest_size: int): - if not isinstance(digest_size, int): - raise TypeError("digest_size must be an integer") - - if digest_size < 1: - raise ValueError("digest_size must be a positive integer") - - self._digest_size = digest_size - - @property - def digest_size(self) -> int: - return self._digest_size - - -class SHAKE256(HashAlgorithm, ExtendableOutputFunction): - name = "shake256" - block_size = None - - def __init__(self, digest_size: int): - if not isinstance(digest_size, int): - raise TypeError("digest_size must be an integer") - - if digest_size < 1: - raise ValueError("digest_size must be a positive integer") - - self._digest_size = digest_size - - @property - def digest_size(self) -> int: - return self._digest_size - - -class MD5(HashAlgorithm): - name = "md5" - digest_size = 16 - block_size = 64 - - -class BLAKE2b(HashAlgorithm): - name = "blake2b" - _max_digest_size = 64 - _min_digest_size = 1 - block_size = 128 - - def __init__(self, digest_size: int): - if digest_size != 64: - raise ValueError("Digest size must be 64") - - self._digest_size = digest_size - - @property - def digest_size(self) -> int: - return self._digest_size - - -class BLAKE2s(HashAlgorithm): - name = "blake2s" - block_size = 64 - _max_digest_size = 32 - _min_digest_size = 1 - - def __init__(self, digest_size: int): - if digest_size != 32: - raise ValueError("Digest size must be 32") - - self._digest_size = digest_size - - @property - def digest_size(self) -> int: - return self._digest_size - - -class SM3(HashAlgorithm): - name = "sm3" - digest_size = 32 - block_size = 64 diff --git a/env/lib/python3.10/site-packages/cryptography/hazmat/primitives/hmac.py b/env/lib/python3.10/site-packages/cryptography/hazmat/primitives/hmac.py deleted file mode 100644 index a9442d5..0000000 --- a/env/lib/python3.10/site-packages/cryptography/hazmat/primitives/hmac.py +++ /dev/null @@ -1,13 +0,0 @@ -# This file is dual licensed under the terms of the Apache License, Version -# 2.0, and the BSD License. See the LICENSE file in the root of this repository -# for complete details. - -from __future__ import annotations - -from cryptography.hazmat.bindings._rust import openssl as rust_openssl -from cryptography.hazmat.primitives import hashes - -__all__ = ["HMAC"] - -HMAC = rust_openssl.hmac.HMAC -hashes.HashContext.register(HMAC) diff --git a/env/lib/python3.10/site-packages/cryptography/hazmat/primitives/kdf/__init__.py b/env/lib/python3.10/site-packages/cryptography/hazmat/primitives/kdf/__init__.py deleted file mode 100644 index 79bb459..0000000 --- a/env/lib/python3.10/site-packages/cryptography/hazmat/primitives/kdf/__init__.py +++ /dev/null @@ -1,23 +0,0 @@ -# This file is dual licensed under the terms of the Apache License, Version -# 2.0, and the BSD License. See the LICENSE file in the root of this repository -# for complete details. - -from __future__ import annotations - -import abc - - -class KeyDerivationFunction(metaclass=abc.ABCMeta): - @abc.abstractmethod - def derive(self, key_material: bytes) -> bytes: - """ - Deterministically generates and returns a new key based on the existing - key material. - """ - - @abc.abstractmethod - def verify(self, key_material: bytes, expected_key: bytes) -> None: - """ - Checks whether the key generated by the key material matches the - expected derived key. Raises an exception if they do not match. - """ diff --git a/env/lib/python3.10/site-packages/cryptography/hazmat/primitives/kdf/__pycache__/__init__.cpython-310.pyc b/env/lib/python3.10/site-packages/cryptography/hazmat/primitives/kdf/__pycache__/__init__.cpython-310.pyc deleted file mode 100644 index e5d5b09..0000000 Binary files a/env/lib/python3.10/site-packages/cryptography/hazmat/primitives/kdf/__pycache__/__init__.cpython-310.pyc and /dev/null differ diff --git a/env/lib/python3.10/site-packages/cryptography/hazmat/primitives/kdf/__pycache__/concatkdf.cpython-310.pyc b/env/lib/python3.10/site-packages/cryptography/hazmat/primitives/kdf/__pycache__/concatkdf.cpython-310.pyc deleted file mode 100644 index a28e29d..0000000 Binary files a/env/lib/python3.10/site-packages/cryptography/hazmat/primitives/kdf/__pycache__/concatkdf.cpython-310.pyc and /dev/null differ diff --git a/env/lib/python3.10/site-packages/cryptography/hazmat/primitives/kdf/__pycache__/hkdf.cpython-310.pyc b/env/lib/python3.10/site-packages/cryptography/hazmat/primitives/kdf/__pycache__/hkdf.cpython-310.pyc deleted file mode 100644 index 56b8a5c..0000000 Binary files a/env/lib/python3.10/site-packages/cryptography/hazmat/primitives/kdf/__pycache__/hkdf.cpython-310.pyc and /dev/null differ diff --git a/env/lib/python3.10/site-packages/cryptography/hazmat/primitives/kdf/__pycache__/kbkdf.cpython-310.pyc b/env/lib/python3.10/site-packages/cryptography/hazmat/primitives/kdf/__pycache__/kbkdf.cpython-310.pyc deleted file mode 100644 index 9fa6534..0000000 Binary files a/env/lib/python3.10/site-packages/cryptography/hazmat/primitives/kdf/__pycache__/kbkdf.cpython-310.pyc and /dev/null differ diff --git a/env/lib/python3.10/site-packages/cryptography/hazmat/primitives/kdf/__pycache__/pbkdf2.cpython-310.pyc b/env/lib/python3.10/site-packages/cryptography/hazmat/primitives/kdf/__pycache__/pbkdf2.cpython-310.pyc deleted file mode 100644 index 0905694..0000000 Binary files a/env/lib/python3.10/site-packages/cryptography/hazmat/primitives/kdf/__pycache__/pbkdf2.cpython-310.pyc and /dev/null differ diff --git a/env/lib/python3.10/site-packages/cryptography/hazmat/primitives/kdf/__pycache__/scrypt.cpython-310.pyc b/env/lib/python3.10/site-packages/cryptography/hazmat/primitives/kdf/__pycache__/scrypt.cpython-310.pyc deleted file mode 100644 index 47d9f42..0000000 Binary files a/env/lib/python3.10/site-packages/cryptography/hazmat/primitives/kdf/__pycache__/scrypt.cpython-310.pyc and /dev/null differ diff --git a/env/lib/python3.10/site-packages/cryptography/hazmat/primitives/kdf/__pycache__/x963kdf.cpython-310.pyc b/env/lib/python3.10/site-packages/cryptography/hazmat/primitives/kdf/__pycache__/x963kdf.cpython-310.pyc deleted file mode 100644 index 1682a0b..0000000 Binary files a/env/lib/python3.10/site-packages/cryptography/hazmat/primitives/kdf/__pycache__/x963kdf.cpython-310.pyc and /dev/null differ diff --git a/env/lib/python3.10/site-packages/cryptography/hazmat/primitives/kdf/concatkdf.py b/env/lib/python3.10/site-packages/cryptography/hazmat/primitives/kdf/concatkdf.py deleted file mode 100644 index 96d9d4c..0000000 --- a/env/lib/python3.10/site-packages/cryptography/hazmat/primitives/kdf/concatkdf.py +++ /dev/null @@ -1,124 +0,0 @@ -# This file is dual licensed under the terms of the Apache License, Version -# 2.0, and the BSD License. See the LICENSE file in the root of this repository -# for complete details. - -from __future__ import annotations - -import typing - -from cryptography import utils -from cryptography.exceptions import AlreadyFinalized, InvalidKey -from cryptography.hazmat.primitives import constant_time, hashes, hmac -from cryptography.hazmat.primitives.kdf import KeyDerivationFunction - - -def _int_to_u32be(n: int) -> bytes: - return n.to_bytes(length=4, byteorder="big") - - -def _common_args_checks( - algorithm: hashes.HashAlgorithm, - length: int, - otherinfo: bytes | None, -) -> None: - max_length = algorithm.digest_size * (2**32 - 1) - if length > max_length: - raise ValueError(f"Cannot derive keys larger than {max_length} bits.") - if otherinfo is not None: - utils._check_bytes("otherinfo", otherinfo) - - -def _concatkdf_derive( - key_material: bytes, - length: int, - auxfn: typing.Callable[[], hashes.HashContext], - otherinfo: bytes, -) -> bytes: - utils._check_byteslike("key_material", key_material) - output = [b""] - outlen = 0 - counter = 1 - - while length > outlen: - h = auxfn() - h.update(_int_to_u32be(counter)) - h.update(key_material) - h.update(otherinfo) - output.append(h.finalize()) - outlen += len(output[-1]) - counter += 1 - - return b"".join(output)[:length] - - -class ConcatKDFHash(KeyDerivationFunction): - def __init__( - self, - algorithm: hashes.HashAlgorithm, - length: int, - otherinfo: bytes | None, - backend: typing.Any = None, - ): - _common_args_checks(algorithm, length, otherinfo) - self._algorithm = algorithm - self._length = length - self._otherinfo: bytes = otherinfo if otherinfo is not None else b"" - - self._used = False - - def _hash(self) -> hashes.Hash: - return hashes.Hash(self._algorithm) - - def derive(self, key_material: bytes) -> bytes: - if self._used: - raise AlreadyFinalized - self._used = True - return _concatkdf_derive( - key_material, self._length, self._hash, self._otherinfo - ) - - def verify(self, key_material: bytes, expected_key: bytes) -> None: - if not constant_time.bytes_eq(self.derive(key_material), expected_key): - raise InvalidKey - - -class ConcatKDFHMAC(KeyDerivationFunction): - def __init__( - self, - algorithm: hashes.HashAlgorithm, - length: int, - salt: bytes | None, - otherinfo: bytes | None, - backend: typing.Any = None, - ): - _common_args_checks(algorithm, length, otherinfo) - self._algorithm = algorithm - self._length = length - self._otherinfo: bytes = otherinfo if otherinfo is not None else b"" - - if algorithm.block_size is None: - raise TypeError(f"{algorithm.name} is unsupported for ConcatKDF") - - if salt is None: - salt = b"\x00" * algorithm.block_size - else: - utils._check_bytes("salt", salt) - - self._salt = salt - - self._used = False - - def _hmac(self) -> hmac.HMAC: - return hmac.HMAC(self._salt, self._algorithm) - - def derive(self, key_material: bytes) -> bytes: - if self._used: - raise AlreadyFinalized - self._used = True - return _concatkdf_derive( - key_material, self._length, self._hmac, self._otherinfo - ) - - def verify(self, key_material: bytes, expected_key: bytes) -> None: - if not constant_time.bytes_eq(self.derive(key_material), expected_key): - raise InvalidKey diff --git a/env/lib/python3.10/site-packages/cryptography/hazmat/primitives/kdf/hkdf.py b/env/lib/python3.10/site-packages/cryptography/hazmat/primitives/kdf/hkdf.py deleted file mode 100644 index ee562d2..0000000 --- a/env/lib/python3.10/site-packages/cryptography/hazmat/primitives/kdf/hkdf.py +++ /dev/null @@ -1,101 +0,0 @@ -# This file is dual licensed under the terms of the Apache License, Version -# 2.0, and the BSD License. See the LICENSE file in the root of this repository -# for complete details. - -from __future__ import annotations - -import typing - -from cryptography import utils -from cryptography.exceptions import AlreadyFinalized, InvalidKey -from cryptography.hazmat.primitives import constant_time, hashes, hmac -from cryptography.hazmat.primitives.kdf import KeyDerivationFunction - - -class HKDF(KeyDerivationFunction): - def __init__( - self, - algorithm: hashes.HashAlgorithm, - length: int, - salt: bytes | None, - info: bytes | None, - backend: typing.Any = None, - ): - self._algorithm = algorithm - - if salt is None: - salt = b"\x00" * self._algorithm.digest_size - else: - utils._check_bytes("salt", salt) - - self._salt = salt - - self._hkdf_expand = HKDFExpand(self._algorithm, length, info) - - def _extract(self, key_material: bytes) -> bytes: - h = hmac.HMAC(self._salt, self._algorithm) - h.update(key_material) - return h.finalize() - - def derive(self, key_material: bytes) -> bytes: - utils._check_byteslike("key_material", key_material) - return self._hkdf_expand.derive(self._extract(key_material)) - - def verify(self, key_material: bytes, expected_key: bytes) -> None: - if not constant_time.bytes_eq(self.derive(key_material), expected_key): - raise InvalidKey - - -class HKDFExpand(KeyDerivationFunction): - def __init__( - self, - algorithm: hashes.HashAlgorithm, - length: int, - info: bytes | None, - backend: typing.Any = None, - ): - self._algorithm = algorithm - - max_length = 255 * algorithm.digest_size - - if length > max_length: - raise ValueError( - f"Cannot derive keys larger than {max_length} octets." - ) - - self._length = length - - if info is None: - info = b"" - else: - utils._check_bytes("info", info) - - self._info = info - - self._used = False - - def _expand(self, key_material: bytes) -> bytes: - output = [b""] - counter = 1 - - while self._algorithm.digest_size * (len(output) - 1) < self._length: - h = hmac.HMAC(key_material, self._algorithm) - h.update(output[-1]) - h.update(self._info) - h.update(bytes([counter])) - output.append(h.finalize()) - counter += 1 - - return b"".join(output)[: self._length] - - def derive(self, key_material: bytes) -> bytes: - utils._check_byteslike("key_material", key_material) - if self._used: - raise AlreadyFinalized - - self._used = True - return self._expand(key_material) - - def verify(self, key_material: bytes, expected_key: bytes) -> None: - if not constant_time.bytes_eq(self.derive(key_material), expected_key): - raise InvalidKey diff --git a/env/lib/python3.10/site-packages/cryptography/hazmat/primitives/kdf/kbkdf.py b/env/lib/python3.10/site-packages/cryptography/hazmat/primitives/kdf/kbkdf.py deleted file mode 100644 index 802b484..0000000 --- a/env/lib/python3.10/site-packages/cryptography/hazmat/primitives/kdf/kbkdf.py +++ /dev/null @@ -1,302 +0,0 @@ -# This file is dual licensed under the terms of the Apache License, Version -# 2.0, and the BSD License. See the LICENSE file in the root of this repository -# for complete details. - -from __future__ import annotations - -import typing - -from cryptography import utils -from cryptography.exceptions import ( - AlreadyFinalized, - InvalidKey, - UnsupportedAlgorithm, - _Reasons, -) -from cryptography.hazmat.primitives import ( - ciphers, - cmac, - constant_time, - hashes, - hmac, -) -from cryptography.hazmat.primitives.kdf import KeyDerivationFunction - - -class Mode(utils.Enum): - CounterMode = "ctr" - - -class CounterLocation(utils.Enum): - BeforeFixed = "before_fixed" - AfterFixed = "after_fixed" - MiddleFixed = "middle_fixed" - - -class _KBKDFDeriver: - def __init__( - self, - prf: typing.Callable, - mode: Mode, - length: int, - rlen: int, - llen: int | None, - location: CounterLocation, - break_location: int | None, - label: bytes | None, - context: bytes | None, - fixed: bytes | None, - ): - assert callable(prf) - - if not isinstance(mode, Mode): - raise TypeError("mode must be of type Mode") - - if not isinstance(location, CounterLocation): - raise TypeError("location must be of type CounterLocation") - - if break_location is None and location is CounterLocation.MiddleFixed: - raise ValueError("Please specify a break_location") - - if ( - break_location is not None - and location != CounterLocation.MiddleFixed - ): - raise ValueError( - "break_location is ignored when location is not" - " CounterLocation.MiddleFixed" - ) - - if break_location is not None and not isinstance(break_location, int): - raise TypeError("break_location must be an integer") - - if break_location is not None and break_location < 0: - raise ValueError("break_location must be a positive integer") - - if (label or context) and fixed: - raise ValueError( - "When supplying fixed data, label and context are ignored." - ) - - if rlen is None or not self._valid_byte_length(rlen): - raise ValueError("rlen must be between 1 and 4") - - if llen is None and fixed is None: - raise ValueError("Please specify an llen") - - if llen is not None and not isinstance(llen, int): - raise TypeError("llen must be an integer") - - if llen == 0: - raise ValueError("llen must be non-zero") - - if label is None: - label = b"" - - if context is None: - context = b"" - - utils._check_bytes("label", label) - utils._check_bytes("context", context) - self._prf = prf - self._mode = mode - self._length = length - self._rlen = rlen - self._llen = llen - self._location = location - self._break_location = break_location - self._label = label - self._context = context - self._used = False - self._fixed_data = fixed - - @staticmethod - def _valid_byte_length(value: int) -> bool: - if not isinstance(value, int): - raise TypeError("value must be of type int") - - value_bin = utils.int_to_bytes(1, value) - if not 1 <= len(value_bin) <= 4: - return False - return True - - def derive(self, key_material: bytes, prf_output_size: int) -> bytes: - if self._used: - raise AlreadyFinalized - - utils._check_byteslike("key_material", key_material) - self._used = True - - # inverse floor division (equivalent to ceiling) - rounds = -(-self._length // prf_output_size) - - output = [b""] - - # For counter mode, the number of iterations shall not be - # larger than 2^r-1, where r <= 32 is the binary length of the counter - # This ensures that the counter values used as an input to the - # PRF will not repeat during a particular call to the KDF function. - r_bin = utils.int_to_bytes(1, self._rlen) - if rounds > pow(2, len(r_bin) * 8) - 1: - raise ValueError("There are too many iterations.") - - fixed = self._generate_fixed_input() - - if self._location == CounterLocation.BeforeFixed: - data_before_ctr = b"" - data_after_ctr = fixed - elif self._location == CounterLocation.AfterFixed: - data_before_ctr = fixed - data_after_ctr = b"" - else: - if isinstance( - self._break_location, int - ) and self._break_location > len(fixed): - raise ValueError("break_location offset > len(fixed)") - data_before_ctr = fixed[: self._break_location] - data_after_ctr = fixed[self._break_location :] - - for i in range(1, rounds + 1): - h = self._prf(key_material) - - counter = utils.int_to_bytes(i, self._rlen) - input_data = data_before_ctr + counter + data_after_ctr - - h.update(input_data) - - output.append(h.finalize()) - - return b"".join(output)[: self._length] - - def _generate_fixed_input(self) -> bytes: - if self._fixed_data and isinstance(self._fixed_data, bytes): - return self._fixed_data - - l_val = utils.int_to_bytes(self._length * 8, self._llen) - - return b"".join([self._label, b"\x00", self._context, l_val]) - - -class KBKDFHMAC(KeyDerivationFunction): - def __init__( - self, - algorithm: hashes.HashAlgorithm, - mode: Mode, - length: int, - rlen: int, - llen: int | None, - location: CounterLocation, - label: bytes | None, - context: bytes | None, - fixed: bytes | None, - backend: typing.Any = None, - *, - break_location: int | None = None, - ): - if not isinstance(algorithm, hashes.HashAlgorithm): - raise UnsupportedAlgorithm( - "Algorithm supplied is not a supported hash algorithm.", - _Reasons.UNSUPPORTED_HASH, - ) - - from cryptography.hazmat.backends.openssl.backend import ( - backend as ossl, - ) - - if not ossl.hmac_supported(algorithm): - raise UnsupportedAlgorithm( - "Algorithm supplied is not a supported hmac algorithm.", - _Reasons.UNSUPPORTED_HASH, - ) - - self._algorithm = algorithm - - self._deriver = _KBKDFDeriver( - self._prf, - mode, - length, - rlen, - llen, - location, - break_location, - label, - context, - fixed, - ) - - def _prf(self, key_material: bytes) -> hmac.HMAC: - return hmac.HMAC(key_material, self._algorithm) - - def derive(self, key_material: bytes) -> bytes: - return self._deriver.derive(key_material, self._algorithm.digest_size) - - def verify(self, key_material: bytes, expected_key: bytes) -> None: - if not constant_time.bytes_eq(self.derive(key_material), expected_key): - raise InvalidKey - - -class KBKDFCMAC(KeyDerivationFunction): - def __init__( - self, - algorithm, - mode: Mode, - length: int, - rlen: int, - llen: int | None, - location: CounterLocation, - label: bytes | None, - context: bytes | None, - fixed: bytes | None, - backend: typing.Any = None, - *, - break_location: int | None = None, - ): - if not issubclass( - algorithm, ciphers.BlockCipherAlgorithm - ) or not issubclass(algorithm, ciphers.CipherAlgorithm): - raise UnsupportedAlgorithm( - "Algorithm supplied is not a supported cipher algorithm.", - _Reasons.UNSUPPORTED_CIPHER, - ) - - self._algorithm = algorithm - self._cipher: ciphers.BlockCipherAlgorithm | None = None - - self._deriver = _KBKDFDeriver( - self._prf, - mode, - length, - rlen, - llen, - location, - break_location, - label, - context, - fixed, - ) - - def _prf(self, _: bytes) -> cmac.CMAC: - assert self._cipher is not None - - return cmac.CMAC(self._cipher) - - def derive(self, key_material: bytes) -> bytes: - self._cipher = self._algorithm(key_material) - - assert self._cipher is not None - - from cryptography.hazmat.backends.openssl.backend import ( - backend as ossl, - ) - - if not ossl.cmac_algorithm_supported(self._cipher): - raise UnsupportedAlgorithm( - "Algorithm supplied is not a supported cipher algorithm.", - _Reasons.UNSUPPORTED_CIPHER, - ) - - return self._deriver.derive(key_material, self._cipher.block_size // 8) - - def verify(self, key_material: bytes, expected_key: bytes) -> None: - if not constant_time.bytes_eq(self.derive(key_material), expected_key): - raise InvalidKey diff --git a/env/lib/python3.10/site-packages/cryptography/hazmat/primitives/kdf/pbkdf2.py b/env/lib/python3.10/site-packages/cryptography/hazmat/primitives/kdf/pbkdf2.py deleted file mode 100644 index 82689eb..0000000 --- a/env/lib/python3.10/site-packages/cryptography/hazmat/primitives/kdf/pbkdf2.py +++ /dev/null @@ -1,62 +0,0 @@ -# This file is dual licensed under the terms of the Apache License, Version -# 2.0, and the BSD License. See the LICENSE file in the root of this repository -# for complete details. - -from __future__ import annotations - -import typing - -from cryptography import utils -from cryptography.exceptions import ( - AlreadyFinalized, - InvalidKey, - UnsupportedAlgorithm, - _Reasons, -) -from cryptography.hazmat.bindings._rust import openssl as rust_openssl -from cryptography.hazmat.primitives import constant_time, hashes -from cryptography.hazmat.primitives.kdf import KeyDerivationFunction - - -class PBKDF2HMAC(KeyDerivationFunction): - def __init__( - self, - algorithm: hashes.HashAlgorithm, - length: int, - salt: bytes, - iterations: int, - backend: typing.Any = None, - ): - from cryptography.hazmat.backends.openssl.backend import ( - backend as ossl, - ) - - if not ossl.pbkdf2_hmac_supported(algorithm): - raise UnsupportedAlgorithm( - f"{algorithm.name} is not supported for PBKDF2.", - _Reasons.UNSUPPORTED_HASH, - ) - self._used = False - self._algorithm = algorithm - self._length = length - utils._check_bytes("salt", salt) - self._salt = salt - self._iterations = iterations - - def derive(self, key_material: bytes) -> bytes: - if self._used: - raise AlreadyFinalized("PBKDF2 instances can only be used once.") - self._used = True - - return rust_openssl.kdf.derive_pbkdf2_hmac( - key_material, - self._algorithm, - self._salt, - self._iterations, - self._length, - ) - - def verify(self, key_material: bytes, expected_key: bytes) -> None: - derived_key = self.derive(key_material) - if not constant_time.bytes_eq(derived_key, expected_key): - raise InvalidKey("Keys do not match.") diff --git a/env/lib/python3.10/site-packages/cryptography/hazmat/primitives/kdf/scrypt.py b/env/lib/python3.10/site-packages/cryptography/hazmat/primitives/kdf/scrypt.py deleted file mode 100644 index 05a4f67..0000000 --- a/env/lib/python3.10/site-packages/cryptography/hazmat/primitives/kdf/scrypt.py +++ /dev/null @@ -1,80 +0,0 @@ -# This file is dual licensed under the terms of the Apache License, Version -# 2.0, and the BSD License. See the LICENSE file in the root of this repository -# for complete details. - -from __future__ import annotations - -import sys -import typing - -from cryptography import utils -from cryptography.exceptions import ( - AlreadyFinalized, - InvalidKey, - UnsupportedAlgorithm, -) -from cryptography.hazmat.bindings._rust import openssl as rust_openssl -from cryptography.hazmat.primitives import constant_time -from cryptography.hazmat.primitives.kdf import KeyDerivationFunction - -# This is used by the scrypt tests to skip tests that require more memory -# than the MEM_LIMIT -_MEM_LIMIT = sys.maxsize // 2 - - -class Scrypt(KeyDerivationFunction): - def __init__( - self, - salt: bytes, - length: int, - n: int, - r: int, - p: int, - backend: typing.Any = None, - ): - from cryptography.hazmat.backends.openssl.backend import ( - backend as ossl, - ) - - if not ossl.scrypt_supported(): - raise UnsupportedAlgorithm( - "This version of OpenSSL does not support scrypt" - ) - self._length = length - utils._check_bytes("salt", salt) - if n < 2 or (n & (n - 1)) != 0: - raise ValueError("n must be greater than 1 and be a power of 2.") - - if r < 1: - raise ValueError("r must be greater than or equal to 1.") - - if p < 1: - raise ValueError("p must be greater than or equal to 1.") - - self._used = False - self._salt = salt - self._n = n - self._r = r - self._p = p - - def derive(self, key_material: bytes) -> bytes: - if self._used: - raise AlreadyFinalized("Scrypt instances can only be used once.") - self._used = True - - utils._check_byteslike("key_material", key_material) - - return rust_openssl.kdf.derive_scrypt( - key_material, - self._salt, - self._n, - self._r, - self._p, - _MEM_LIMIT, - self._length, - ) - - def verify(self, key_material: bytes, expected_key: bytes) -> None: - derived_key = self.derive(key_material) - if not constant_time.bytes_eq(derived_key, expected_key): - raise InvalidKey("Keys do not match.") diff --git a/env/lib/python3.10/site-packages/cryptography/hazmat/primitives/kdf/x963kdf.py b/env/lib/python3.10/site-packages/cryptography/hazmat/primitives/kdf/x963kdf.py deleted file mode 100644 index 6e38366..0000000 --- a/env/lib/python3.10/site-packages/cryptography/hazmat/primitives/kdf/x963kdf.py +++ /dev/null @@ -1,61 +0,0 @@ -# This file is dual licensed under the terms of the Apache License, Version -# 2.0, and the BSD License. See the LICENSE file in the root of this repository -# for complete details. - -from __future__ import annotations - -import typing - -from cryptography import utils -from cryptography.exceptions import AlreadyFinalized, InvalidKey -from cryptography.hazmat.primitives import constant_time, hashes -from cryptography.hazmat.primitives.kdf import KeyDerivationFunction - - -def _int_to_u32be(n: int) -> bytes: - return n.to_bytes(length=4, byteorder="big") - - -class X963KDF(KeyDerivationFunction): - def __init__( - self, - algorithm: hashes.HashAlgorithm, - length: int, - sharedinfo: bytes | None, - backend: typing.Any = None, - ): - max_len = algorithm.digest_size * (2**32 - 1) - if length > max_len: - raise ValueError(f"Cannot derive keys larger than {max_len} bits.") - if sharedinfo is not None: - utils._check_bytes("sharedinfo", sharedinfo) - - self._algorithm = algorithm - self._length = length - self._sharedinfo = sharedinfo - self._used = False - - def derive(self, key_material: bytes) -> bytes: - if self._used: - raise AlreadyFinalized - self._used = True - utils._check_byteslike("key_material", key_material) - output = [b""] - outlen = 0 - counter = 1 - - while self._length > outlen: - h = hashes.Hash(self._algorithm) - h.update(key_material) - h.update(_int_to_u32be(counter)) - if self._sharedinfo is not None: - h.update(self._sharedinfo) - output.append(h.finalize()) - outlen += len(output[-1]) - counter += 1 - - return b"".join(output)[: self._length] - - def verify(self, key_material: bytes, expected_key: bytes) -> None: - if not constant_time.bytes_eq(self.derive(key_material), expected_key): - raise InvalidKey diff --git a/env/lib/python3.10/site-packages/cryptography/hazmat/primitives/keywrap.py b/env/lib/python3.10/site-packages/cryptography/hazmat/primitives/keywrap.py deleted file mode 100644 index b93d87d..0000000 --- a/env/lib/python3.10/site-packages/cryptography/hazmat/primitives/keywrap.py +++ /dev/null @@ -1,177 +0,0 @@ -# This file is dual licensed under the terms of the Apache License, Version -# 2.0, and the BSD License. See the LICENSE file in the root of this repository -# for complete details. - -from __future__ import annotations - -import typing - -from cryptography.hazmat.primitives.ciphers import Cipher -from cryptography.hazmat.primitives.ciphers.algorithms import AES -from cryptography.hazmat.primitives.ciphers.modes import ECB -from cryptography.hazmat.primitives.constant_time import bytes_eq - - -def _wrap_core( - wrapping_key: bytes, - a: bytes, - r: list[bytes], -) -> bytes: - # RFC 3394 Key Wrap - 2.2.1 (index method) - encryptor = Cipher(AES(wrapping_key), ECB()).encryptor() - n = len(r) - for j in range(6): - for i in range(n): - # every encryption operation is a discrete 16 byte chunk (because - # AES has a 128-bit block size) and since we're using ECB it is - # safe to reuse the encryptor for the entire operation - b = encryptor.update(a + r[i]) - a = ( - int.from_bytes(b[:8], byteorder="big") ^ ((n * j) + i + 1) - ).to_bytes(length=8, byteorder="big") - r[i] = b[-8:] - - assert encryptor.finalize() == b"" - - return a + b"".join(r) - - -def aes_key_wrap( - wrapping_key: bytes, - key_to_wrap: bytes, - backend: typing.Any = None, -) -> bytes: - if len(wrapping_key) not in [16, 24, 32]: - raise ValueError("The wrapping key must be a valid AES key length") - - if len(key_to_wrap) < 16: - raise ValueError("The key to wrap must be at least 16 bytes") - - if len(key_to_wrap) % 8 != 0: - raise ValueError("The key to wrap must be a multiple of 8 bytes") - - a = b"\xa6\xa6\xa6\xa6\xa6\xa6\xa6\xa6" - r = [key_to_wrap[i : i + 8] for i in range(0, len(key_to_wrap), 8)] - return _wrap_core(wrapping_key, a, r) - - -def _unwrap_core( - wrapping_key: bytes, - a: bytes, - r: list[bytes], -) -> tuple[bytes, list[bytes]]: - # Implement RFC 3394 Key Unwrap - 2.2.2 (index method) - decryptor = Cipher(AES(wrapping_key), ECB()).decryptor() - n = len(r) - for j in reversed(range(6)): - for i in reversed(range(n)): - atr = ( - int.from_bytes(a, byteorder="big") ^ ((n * j) + i + 1) - ).to_bytes(length=8, byteorder="big") + r[i] - # every decryption operation is a discrete 16 byte chunk so - # it is safe to reuse the decryptor for the entire operation - b = decryptor.update(atr) - a = b[:8] - r[i] = b[-8:] - - assert decryptor.finalize() == b"" - return a, r - - -def aes_key_wrap_with_padding( - wrapping_key: bytes, - key_to_wrap: bytes, - backend: typing.Any = None, -) -> bytes: - if len(wrapping_key) not in [16, 24, 32]: - raise ValueError("The wrapping key must be a valid AES key length") - - aiv = b"\xa6\x59\x59\xa6" + len(key_to_wrap).to_bytes( - length=4, byteorder="big" - ) - # pad the key to wrap if necessary - pad = (8 - (len(key_to_wrap) % 8)) % 8 - key_to_wrap = key_to_wrap + b"\x00" * pad - if len(key_to_wrap) == 8: - # RFC 5649 - 4.1 - exactly 8 octets after padding - encryptor = Cipher(AES(wrapping_key), ECB()).encryptor() - b = encryptor.update(aiv + key_to_wrap) - assert encryptor.finalize() == b"" - return b - else: - r = [key_to_wrap[i : i + 8] for i in range(0, len(key_to_wrap), 8)] - return _wrap_core(wrapping_key, aiv, r) - - -def aes_key_unwrap_with_padding( - wrapping_key: bytes, - wrapped_key: bytes, - backend: typing.Any = None, -) -> bytes: - if len(wrapped_key) < 16: - raise InvalidUnwrap("Must be at least 16 bytes") - - if len(wrapping_key) not in [16, 24, 32]: - raise ValueError("The wrapping key must be a valid AES key length") - - if len(wrapped_key) == 16: - # RFC 5649 - 4.2 - exactly two 64-bit blocks - decryptor = Cipher(AES(wrapping_key), ECB()).decryptor() - out = decryptor.update(wrapped_key) - assert decryptor.finalize() == b"" - a = out[:8] - data = out[8:] - n = 1 - else: - r = [wrapped_key[i : i + 8] for i in range(0, len(wrapped_key), 8)] - encrypted_aiv = r.pop(0) - n = len(r) - a, r = _unwrap_core(wrapping_key, encrypted_aiv, r) - data = b"".join(r) - - # 1) Check that MSB(32,A) = A65959A6. - # 2) Check that 8*(n-1) < LSB(32,A) <= 8*n. If so, let - # MLI = LSB(32,A). - # 3) Let b = (8*n)-MLI, and then check that the rightmost b octets of - # the output data are zero. - mli = int.from_bytes(a[4:], byteorder="big") - b = (8 * n) - mli - if ( - not bytes_eq(a[:4], b"\xa6\x59\x59\xa6") - or not 8 * (n - 1) < mli <= 8 * n - or (b != 0 and not bytes_eq(data[-b:], b"\x00" * b)) - ): - raise InvalidUnwrap() - - if b == 0: - return data - else: - return data[:-b] - - -def aes_key_unwrap( - wrapping_key: bytes, - wrapped_key: bytes, - backend: typing.Any = None, -) -> bytes: - if len(wrapped_key) < 24: - raise InvalidUnwrap("Must be at least 24 bytes") - - if len(wrapped_key) % 8 != 0: - raise InvalidUnwrap("The wrapped key must be a multiple of 8 bytes") - - if len(wrapping_key) not in [16, 24, 32]: - raise ValueError("The wrapping key must be a valid AES key length") - - aiv = b"\xa6\xa6\xa6\xa6\xa6\xa6\xa6\xa6" - r = [wrapped_key[i : i + 8] for i in range(0, len(wrapped_key), 8)] - a = r.pop(0) - a, r = _unwrap_core(wrapping_key, a, r) - if not bytes_eq(a, aiv): - raise InvalidUnwrap() - - return b"".join(r) - - -class InvalidUnwrap(Exception): - pass diff --git a/env/lib/python3.10/site-packages/cryptography/hazmat/primitives/padding.py b/env/lib/python3.10/site-packages/cryptography/hazmat/primitives/padding.py deleted file mode 100644 index d1ca775..0000000 --- a/env/lib/python3.10/site-packages/cryptography/hazmat/primitives/padding.py +++ /dev/null @@ -1,204 +0,0 @@ -# This file is dual licensed under the terms of the Apache License, Version -# 2.0, and the BSD License. See the LICENSE file in the root of this repository -# for complete details. - -from __future__ import annotations - -import abc -import typing - -from cryptography import utils -from cryptography.exceptions import AlreadyFinalized -from cryptography.hazmat.bindings._rust import ( - PKCS7PaddingContext, - check_ansix923_padding, - check_pkcs7_padding, -) - - -class PaddingContext(metaclass=abc.ABCMeta): - @abc.abstractmethod - def update(self, data: bytes) -> bytes: - """ - Pads the provided bytes and returns any available data as bytes. - """ - - @abc.abstractmethod - def finalize(self) -> bytes: - """ - Finalize the padding, returns bytes. - """ - - -def _byte_padding_check(block_size: int) -> None: - if not (0 <= block_size <= 2040): - raise ValueError("block_size must be in range(0, 2041).") - - if block_size % 8 != 0: - raise ValueError("block_size must be a multiple of 8.") - - -def _byte_padding_update( - buffer_: bytes | None, data: bytes, block_size: int -) -> tuple[bytes, bytes]: - if buffer_ is None: - raise AlreadyFinalized("Context was already finalized.") - - utils._check_byteslike("data", data) - - buffer_ += bytes(data) - - finished_blocks = len(buffer_) // (block_size // 8) - - result = buffer_[: finished_blocks * (block_size // 8)] - buffer_ = buffer_[finished_blocks * (block_size // 8) :] - - return buffer_, result - - -def _byte_padding_pad( - buffer_: bytes | None, - block_size: int, - paddingfn: typing.Callable[[int], bytes], -) -> bytes: - if buffer_ is None: - raise AlreadyFinalized("Context was already finalized.") - - pad_size = block_size // 8 - len(buffer_) - return buffer_ + paddingfn(pad_size) - - -def _byte_unpadding_update( - buffer_: bytes | None, data: bytes, block_size: int -) -> tuple[bytes, bytes]: - if buffer_ is None: - raise AlreadyFinalized("Context was already finalized.") - - utils._check_byteslike("data", data) - - buffer_ += bytes(data) - - finished_blocks = max(len(buffer_) // (block_size // 8) - 1, 0) - - result = buffer_[: finished_blocks * (block_size // 8)] - buffer_ = buffer_[finished_blocks * (block_size // 8) :] - - return buffer_, result - - -def _byte_unpadding_check( - buffer_: bytes | None, - block_size: int, - checkfn: typing.Callable[[bytes], int], -) -> bytes: - if buffer_ is None: - raise AlreadyFinalized("Context was already finalized.") - - if len(buffer_) != block_size // 8: - raise ValueError("Invalid padding bytes.") - - valid = checkfn(buffer_) - - if not valid: - raise ValueError("Invalid padding bytes.") - - pad_size = buffer_[-1] - return buffer_[:-pad_size] - - -class PKCS7: - def __init__(self, block_size: int): - _byte_padding_check(block_size) - self.block_size = block_size - - def padder(self) -> PaddingContext: - return PKCS7PaddingContext(self.block_size) - - def unpadder(self) -> PaddingContext: - return _PKCS7UnpaddingContext(self.block_size) - - -class _PKCS7UnpaddingContext(PaddingContext): - _buffer: bytes | None - - def __init__(self, block_size: int): - self.block_size = block_size - # TODO: more copies than necessary, we should use zero-buffer (#193) - self._buffer = b"" - - def update(self, data: bytes) -> bytes: - self._buffer, result = _byte_unpadding_update( - self._buffer, data, self.block_size - ) - return result - - def finalize(self) -> bytes: - result = _byte_unpadding_check( - self._buffer, self.block_size, check_pkcs7_padding - ) - self._buffer = None - return result - - -PaddingContext.register(PKCS7PaddingContext) - - -class ANSIX923: - def __init__(self, block_size: int): - _byte_padding_check(block_size) - self.block_size = block_size - - def padder(self) -> PaddingContext: - return _ANSIX923PaddingContext(self.block_size) - - def unpadder(self) -> PaddingContext: - return _ANSIX923UnpaddingContext(self.block_size) - - -class _ANSIX923PaddingContext(PaddingContext): - _buffer: bytes | None - - def __init__(self, block_size: int): - self.block_size = block_size - # TODO: more copies than necessary, we should use zero-buffer (#193) - self._buffer = b"" - - def update(self, data: bytes) -> bytes: - self._buffer, result = _byte_padding_update( - self._buffer, data, self.block_size - ) - return result - - def _padding(self, size: int) -> bytes: - return bytes([0]) * (size - 1) + bytes([size]) - - def finalize(self) -> bytes: - result = _byte_padding_pad( - self._buffer, self.block_size, self._padding - ) - self._buffer = None - return result - - -class _ANSIX923UnpaddingContext(PaddingContext): - _buffer: bytes | None - - def __init__(self, block_size: int): - self.block_size = block_size - # TODO: more copies than necessary, we should use zero-buffer (#193) - self._buffer = b"" - - def update(self, data: bytes) -> bytes: - self._buffer, result = _byte_unpadding_update( - self._buffer, data, self.block_size - ) - return result - - def finalize(self) -> bytes: - result = _byte_unpadding_check( - self._buffer, - self.block_size, - check_ansix923_padding, - ) - self._buffer = None - return result diff --git a/env/lib/python3.10/site-packages/cryptography/hazmat/primitives/poly1305.py b/env/lib/python3.10/site-packages/cryptography/hazmat/primitives/poly1305.py deleted file mode 100644 index 7f5a77a..0000000 --- a/env/lib/python3.10/site-packages/cryptography/hazmat/primitives/poly1305.py +++ /dev/null @@ -1,11 +0,0 @@ -# This file is dual licensed under the terms of the Apache License, Version -# 2.0, and the BSD License. See the LICENSE file in the root of this repository -# for complete details. - -from __future__ import annotations - -from cryptography.hazmat.bindings._rust import openssl as rust_openssl - -__all__ = ["Poly1305"] - -Poly1305 = rust_openssl.poly1305.Poly1305 diff --git a/env/lib/python3.10/site-packages/cryptography/hazmat/primitives/serialization/__init__.py b/env/lib/python3.10/site-packages/cryptography/hazmat/primitives/serialization/__init__.py deleted file mode 100644 index 07b2264..0000000 --- a/env/lib/python3.10/site-packages/cryptography/hazmat/primitives/serialization/__init__.py +++ /dev/null @@ -1,63 +0,0 @@ -# This file is dual licensed under the terms of the Apache License, Version -# 2.0, and the BSD License. See the LICENSE file in the root of this repository -# for complete details. - -from __future__ import annotations - -from cryptography.hazmat.primitives._serialization import ( - BestAvailableEncryption, - Encoding, - KeySerializationEncryption, - NoEncryption, - ParameterFormat, - PrivateFormat, - PublicFormat, - _KeySerializationEncryption, -) -from cryptography.hazmat.primitives.serialization.base import ( - load_der_parameters, - load_der_private_key, - load_der_public_key, - load_pem_parameters, - load_pem_private_key, - load_pem_public_key, -) -from cryptography.hazmat.primitives.serialization.ssh import ( - SSHCertificate, - SSHCertificateBuilder, - SSHCertificateType, - SSHCertPrivateKeyTypes, - SSHCertPublicKeyTypes, - SSHPrivateKeyTypes, - SSHPublicKeyTypes, - load_ssh_private_key, - load_ssh_public_identity, - load_ssh_public_key, -) - -__all__ = [ - "BestAvailableEncryption", - "Encoding", - "KeySerializationEncryption", - "NoEncryption", - "ParameterFormat", - "PrivateFormat", - "PublicFormat", - "SSHCertPrivateKeyTypes", - "SSHCertPublicKeyTypes", - "SSHCertificate", - "SSHCertificateBuilder", - "SSHCertificateType", - "SSHPrivateKeyTypes", - "SSHPublicKeyTypes", - "_KeySerializationEncryption", - "load_der_parameters", - "load_der_private_key", - "load_der_public_key", - "load_pem_parameters", - "load_pem_private_key", - "load_pem_public_key", - "load_ssh_private_key", - "load_ssh_public_identity", - "load_ssh_public_key", -] diff --git a/env/lib/python3.10/site-packages/cryptography/hazmat/primitives/serialization/__pycache__/__init__.cpython-310.pyc b/env/lib/python3.10/site-packages/cryptography/hazmat/primitives/serialization/__pycache__/__init__.cpython-310.pyc deleted file mode 100644 index 8cb74bd..0000000 Binary files a/env/lib/python3.10/site-packages/cryptography/hazmat/primitives/serialization/__pycache__/__init__.cpython-310.pyc and /dev/null differ diff --git a/env/lib/python3.10/site-packages/cryptography/hazmat/primitives/serialization/__pycache__/base.cpython-310.pyc b/env/lib/python3.10/site-packages/cryptography/hazmat/primitives/serialization/__pycache__/base.cpython-310.pyc deleted file mode 100644 index d93441e..0000000 Binary files a/env/lib/python3.10/site-packages/cryptography/hazmat/primitives/serialization/__pycache__/base.cpython-310.pyc and /dev/null differ diff --git a/env/lib/python3.10/site-packages/cryptography/hazmat/primitives/serialization/__pycache__/pkcs12.cpython-310.pyc b/env/lib/python3.10/site-packages/cryptography/hazmat/primitives/serialization/__pycache__/pkcs12.cpython-310.pyc deleted file mode 100644 index 1dea87a..0000000 Binary files a/env/lib/python3.10/site-packages/cryptography/hazmat/primitives/serialization/__pycache__/pkcs12.cpython-310.pyc and /dev/null differ diff --git a/env/lib/python3.10/site-packages/cryptography/hazmat/primitives/serialization/__pycache__/pkcs7.cpython-310.pyc b/env/lib/python3.10/site-packages/cryptography/hazmat/primitives/serialization/__pycache__/pkcs7.cpython-310.pyc deleted file mode 100644 index c8ec003..0000000 Binary files a/env/lib/python3.10/site-packages/cryptography/hazmat/primitives/serialization/__pycache__/pkcs7.cpython-310.pyc and /dev/null differ diff --git a/env/lib/python3.10/site-packages/cryptography/hazmat/primitives/serialization/__pycache__/ssh.cpython-310.pyc b/env/lib/python3.10/site-packages/cryptography/hazmat/primitives/serialization/__pycache__/ssh.cpython-310.pyc deleted file mode 100644 index b7e1c47..0000000 Binary files a/env/lib/python3.10/site-packages/cryptography/hazmat/primitives/serialization/__pycache__/ssh.cpython-310.pyc and /dev/null differ diff --git a/env/lib/python3.10/site-packages/cryptography/hazmat/primitives/serialization/base.py b/env/lib/python3.10/site-packages/cryptography/hazmat/primitives/serialization/base.py deleted file mode 100644 index e7c998b..0000000 --- a/env/lib/python3.10/site-packages/cryptography/hazmat/primitives/serialization/base.py +++ /dev/null @@ -1,14 +0,0 @@ -# This file is dual licensed under the terms of the Apache License, Version -# 2.0, and the BSD License. See the LICENSE file in the root of this repository -# for complete details. - -from cryptography.hazmat.bindings._rust import openssl as rust_openssl - -load_pem_private_key = rust_openssl.keys.load_pem_private_key -load_der_private_key = rust_openssl.keys.load_der_private_key - -load_pem_public_key = rust_openssl.keys.load_pem_public_key -load_der_public_key = rust_openssl.keys.load_der_public_key - -load_pem_parameters = rust_openssl.dh.from_pem_parameters -load_der_parameters = rust_openssl.dh.from_der_parameters diff --git a/env/lib/python3.10/site-packages/cryptography/hazmat/primitives/serialization/pkcs12.py b/env/lib/python3.10/site-packages/cryptography/hazmat/primitives/serialization/pkcs12.py deleted file mode 100644 index 549e1f9..0000000 --- a/env/lib/python3.10/site-packages/cryptography/hazmat/primitives/serialization/pkcs12.py +++ /dev/null @@ -1,156 +0,0 @@ -# This file is dual licensed under the terms of the Apache License, Version -# 2.0, and the BSD License. See the LICENSE file in the root of this repository -# for complete details. - -from __future__ import annotations - -import typing - -from cryptography import x509 -from cryptography.hazmat.bindings._rust import pkcs12 as rust_pkcs12 -from cryptography.hazmat.primitives import serialization -from cryptography.hazmat.primitives._serialization import PBES as PBES -from cryptography.hazmat.primitives.asymmetric import ( - dsa, - ec, - ed448, - ed25519, - rsa, -) -from cryptography.hazmat.primitives.asymmetric.types import PrivateKeyTypes - -__all__ = [ - "PBES", - "PKCS12Certificate", - "PKCS12KeyAndCertificates", - "PKCS12PrivateKeyTypes", - "load_key_and_certificates", - "load_pkcs12", - "serialize_key_and_certificates", -] - -PKCS12PrivateKeyTypes = typing.Union[ - rsa.RSAPrivateKey, - dsa.DSAPrivateKey, - ec.EllipticCurvePrivateKey, - ed25519.Ed25519PrivateKey, - ed448.Ed448PrivateKey, -] - - -PKCS12Certificate = rust_pkcs12.PKCS12Certificate - - -class PKCS12KeyAndCertificates: - def __init__( - self, - key: PrivateKeyTypes | None, - cert: PKCS12Certificate | None, - additional_certs: list[PKCS12Certificate], - ): - if key is not None and not isinstance( - key, - ( - rsa.RSAPrivateKey, - dsa.DSAPrivateKey, - ec.EllipticCurvePrivateKey, - ed25519.Ed25519PrivateKey, - ed448.Ed448PrivateKey, - ), - ): - raise TypeError( - "Key must be RSA, DSA, EllipticCurve, ED25519, or ED448" - " private key, or None." - ) - if cert is not None and not isinstance(cert, PKCS12Certificate): - raise TypeError("cert must be a PKCS12Certificate object or None") - if not all( - isinstance(add_cert, PKCS12Certificate) - for add_cert in additional_certs - ): - raise TypeError( - "all values in additional_certs must be PKCS12Certificate" - " objects" - ) - self._key = key - self._cert = cert - self._additional_certs = additional_certs - - @property - def key(self) -> PrivateKeyTypes | None: - return self._key - - @property - def cert(self) -> PKCS12Certificate | None: - return self._cert - - @property - def additional_certs(self) -> list[PKCS12Certificate]: - return self._additional_certs - - def __eq__(self, other: object) -> bool: - if not isinstance(other, PKCS12KeyAndCertificates): - return NotImplemented - - return ( - self.key == other.key - and self.cert == other.cert - and self.additional_certs == other.additional_certs - ) - - def __hash__(self) -> int: - return hash((self.key, self.cert, tuple(self.additional_certs))) - - def __repr__(self) -> str: - fmt = ( - "" - ) - return fmt.format(self.key, self.cert, self.additional_certs) - - -load_key_and_certificates = rust_pkcs12.load_key_and_certificates -load_pkcs12 = rust_pkcs12.load_pkcs12 - - -_PKCS12CATypes = typing.Union[ - x509.Certificate, - PKCS12Certificate, -] - - -def serialize_key_and_certificates( - name: bytes | None, - key: PKCS12PrivateKeyTypes | None, - cert: x509.Certificate | None, - cas: typing.Iterable[_PKCS12CATypes] | None, - encryption_algorithm: serialization.KeySerializationEncryption, -) -> bytes: - if key is not None and not isinstance( - key, - ( - rsa.RSAPrivateKey, - dsa.DSAPrivateKey, - ec.EllipticCurvePrivateKey, - ed25519.Ed25519PrivateKey, - ed448.Ed448PrivateKey, - ), - ): - raise TypeError( - "Key must be RSA, DSA, EllipticCurve, ED25519, or ED448" - " private key, or None." - ) - - if not isinstance( - encryption_algorithm, serialization.KeySerializationEncryption - ): - raise TypeError( - "Key encryption algorithm must be a " - "KeySerializationEncryption instance" - ) - - if key is None and cert is None and not cas: - raise ValueError("You must supply at least one of key, cert, or cas") - - return rust_pkcs12.serialize_key_and_certificates( - name, key, cert, cas, encryption_algorithm - ) diff --git a/env/lib/python3.10/site-packages/cryptography/hazmat/primitives/serialization/pkcs7.py b/env/lib/python3.10/site-packages/cryptography/hazmat/primitives/serialization/pkcs7.py deleted file mode 100644 index 97ea9db..0000000 --- a/env/lib/python3.10/site-packages/cryptography/hazmat/primitives/serialization/pkcs7.py +++ /dev/null @@ -1,336 +0,0 @@ -# This file is dual licensed under the terms of the Apache License, Version -# 2.0, and the BSD License. See the LICENSE file in the root of this repository -# for complete details. - -from __future__ import annotations - -import email.base64mime -import email.generator -import email.message -import email.policy -import io -import typing - -from cryptography import utils, x509 -from cryptography.exceptions import UnsupportedAlgorithm, _Reasons -from cryptography.hazmat.bindings._rust import pkcs7 as rust_pkcs7 -from cryptography.hazmat.primitives import hashes, serialization -from cryptography.hazmat.primitives.asymmetric import ec, padding, rsa -from cryptography.utils import _check_byteslike - -load_pem_pkcs7_certificates = rust_pkcs7.load_pem_pkcs7_certificates - -load_der_pkcs7_certificates = rust_pkcs7.load_der_pkcs7_certificates - -serialize_certificates = rust_pkcs7.serialize_certificates - -PKCS7HashTypes = typing.Union[ - hashes.SHA224, - hashes.SHA256, - hashes.SHA384, - hashes.SHA512, -] - -PKCS7PrivateKeyTypes = typing.Union[ - rsa.RSAPrivateKey, ec.EllipticCurvePrivateKey -] - - -class PKCS7Options(utils.Enum): - Text = "Add text/plain MIME type" - Binary = "Don't translate input data into canonical MIME format" - DetachedSignature = "Don't embed data in the PKCS7 structure" - NoCapabilities = "Don't embed SMIME capabilities" - NoAttributes = "Don't embed authenticatedAttributes" - NoCerts = "Don't embed signer certificate" - - -class PKCS7SignatureBuilder: - def __init__( - self, - data: bytes | None = None, - signers: list[ - tuple[ - x509.Certificate, - PKCS7PrivateKeyTypes, - PKCS7HashTypes, - padding.PSS | padding.PKCS1v15 | None, - ] - ] = [], - additional_certs: list[x509.Certificate] = [], - ): - self._data = data - self._signers = signers - self._additional_certs = additional_certs - - def set_data(self, data: bytes) -> PKCS7SignatureBuilder: - _check_byteslike("data", data) - if self._data is not None: - raise ValueError("data may only be set once") - - return PKCS7SignatureBuilder(data, self._signers) - - def add_signer( - self, - certificate: x509.Certificate, - private_key: PKCS7PrivateKeyTypes, - hash_algorithm: PKCS7HashTypes, - *, - rsa_padding: padding.PSS | padding.PKCS1v15 | None = None, - ) -> PKCS7SignatureBuilder: - if not isinstance( - hash_algorithm, - ( - hashes.SHA224, - hashes.SHA256, - hashes.SHA384, - hashes.SHA512, - ), - ): - raise TypeError( - "hash_algorithm must be one of hashes.SHA224, " - "SHA256, SHA384, or SHA512" - ) - if not isinstance(certificate, x509.Certificate): - raise TypeError("certificate must be a x509.Certificate") - - if not isinstance( - private_key, (rsa.RSAPrivateKey, ec.EllipticCurvePrivateKey) - ): - raise TypeError("Only RSA & EC keys are supported at this time.") - - if rsa_padding is not None: - if not isinstance(rsa_padding, (padding.PSS, padding.PKCS1v15)): - raise TypeError("Padding must be PSS or PKCS1v15") - if not isinstance(private_key, rsa.RSAPrivateKey): - raise TypeError("Padding is only supported for RSA keys") - - return PKCS7SignatureBuilder( - self._data, - [ - *self._signers, - (certificate, private_key, hash_algorithm, rsa_padding), - ], - ) - - def add_certificate( - self, certificate: x509.Certificate - ) -> PKCS7SignatureBuilder: - if not isinstance(certificate, x509.Certificate): - raise TypeError("certificate must be a x509.Certificate") - - return PKCS7SignatureBuilder( - self._data, self._signers, [*self._additional_certs, certificate] - ) - - def sign( - self, - encoding: serialization.Encoding, - options: typing.Iterable[PKCS7Options], - backend: typing.Any = None, - ) -> bytes: - if len(self._signers) == 0: - raise ValueError("Must have at least one signer") - if self._data is None: - raise ValueError("You must add data to sign") - options = list(options) - if not all(isinstance(x, PKCS7Options) for x in options): - raise ValueError("options must be from the PKCS7Options enum") - if encoding not in ( - serialization.Encoding.PEM, - serialization.Encoding.DER, - serialization.Encoding.SMIME, - ): - raise ValueError( - "Must be PEM, DER, or SMIME from the Encoding enum" - ) - - # Text is a meaningless option unless it is accompanied by - # DetachedSignature - if ( - PKCS7Options.Text in options - and PKCS7Options.DetachedSignature not in options - ): - raise ValueError( - "When passing the Text option you must also pass " - "DetachedSignature" - ) - - if PKCS7Options.Text in options and encoding in ( - serialization.Encoding.DER, - serialization.Encoding.PEM, - ): - raise ValueError( - "The Text option is only available for SMIME serialization" - ) - - # No attributes implies no capabilities so we'll error if you try to - # pass both. - if ( - PKCS7Options.NoAttributes in options - and PKCS7Options.NoCapabilities in options - ): - raise ValueError( - "NoAttributes is a superset of NoCapabilities. Do not pass " - "both values." - ) - - return rust_pkcs7.sign_and_serialize(self, encoding, options) - - -class PKCS7EnvelopeBuilder: - def __init__( - self, - *, - _data: bytes | None = None, - _recipients: list[x509.Certificate] | None = None, - ): - from cryptography.hazmat.backends.openssl.backend import ( - backend as ossl, - ) - - if not ossl.rsa_encryption_supported(padding=padding.PKCS1v15()): - raise UnsupportedAlgorithm( - "RSA with PKCS1 v1.5 padding is not supported by this version" - " of OpenSSL.", - _Reasons.UNSUPPORTED_PADDING, - ) - self._data = _data - self._recipients = _recipients if _recipients is not None else [] - - def set_data(self, data: bytes) -> PKCS7EnvelopeBuilder: - _check_byteslike("data", data) - if self._data is not None: - raise ValueError("data may only be set once") - - return PKCS7EnvelopeBuilder(_data=data, _recipients=self._recipients) - - def add_recipient( - self, - certificate: x509.Certificate, - ) -> PKCS7EnvelopeBuilder: - if not isinstance(certificate, x509.Certificate): - raise TypeError("certificate must be a x509.Certificate") - - if not isinstance(certificate.public_key(), rsa.RSAPublicKey): - raise TypeError("Only RSA keys are supported at this time.") - - return PKCS7EnvelopeBuilder( - _data=self._data, - _recipients=[ - *self._recipients, - certificate, - ], - ) - - def encrypt( - self, - encoding: serialization.Encoding, - options: typing.Iterable[PKCS7Options], - ) -> bytes: - if len(self._recipients) == 0: - raise ValueError("Must have at least one recipient") - if self._data is None: - raise ValueError("You must add data to encrypt") - options = list(options) - if not all(isinstance(x, PKCS7Options) for x in options): - raise ValueError("options must be from the PKCS7Options enum") - if encoding not in ( - serialization.Encoding.PEM, - serialization.Encoding.DER, - serialization.Encoding.SMIME, - ): - raise ValueError( - "Must be PEM, DER, or SMIME from the Encoding enum" - ) - - # Only allow options that make sense for encryption - if any( - opt not in [PKCS7Options.Text, PKCS7Options.Binary] - for opt in options - ): - raise ValueError( - "Only the following options are supported for encryption: " - "Text, Binary" - ) - elif PKCS7Options.Text in options and PKCS7Options.Binary in options: - # OpenSSL accepts both options at the same time, but ignores Text. - # We fail defensively to avoid unexpected outputs. - raise ValueError( - "Cannot use Binary and Text options at the same time" - ) - - return rust_pkcs7.encrypt_and_serialize(self, encoding, options) - - -def _smime_signed_encode( - data: bytes, signature: bytes, micalg: str, text_mode: bool -) -> bytes: - # This function works pretty hard to replicate what OpenSSL does - # precisely. For good and for ill. - - m = email.message.Message() - m.add_header("MIME-Version", "1.0") - m.add_header( - "Content-Type", - "multipart/signed", - protocol="application/x-pkcs7-signature", - micalg=micalg, - ) - - m.preamble = "This is an S/MIME signed message\n" - - msg_part = OpenSSLMimePart() - msg_part.set_payload(data) - if text_mode: - msg_part.add_header("Content-Type", "text/plain") - m.attach(msg_part) - - sig_part = email.message.MIMEPart() - sig_part.add_header( - "Content-Type", "application/x-pkcs7-signature", name="smime.p7s" - ) - sig_part.add_header("Content-Transfer-Encoding", "base64") - sig_part.add_header( - "Content-Disposition", "attachment", filename="smime.p7s" - ) - sig_part.set_payload( - email.base64mime.body_encode(signature, maxlinelen=65) - ) - del sig_part["MIME-Version"] - m.attach(sig_part) - - fp = io.BytesIO() - g = email.generator.BytesGenerator( - fp, - maxheaderlen=0, - mangle_from_=False, - policy=m.policy.clone(linesep="\r\n"), - ) - g.flatten(m) - return fp.getvalue() - - -def _smime_enveloped_encode(data: bytes) -> bytes: - m = email.message.Message() - m.add_header("MIME-Version", "1.0") - m.add_header("Content-Disposition", "attachment", filename="smime.p7m") - m.add_header( - "Content-Type", - "application/pkcs7-mime", - smime_type="enveloped-data", - name="smime.p7m", - ) - m.add_header("Content-Transfer-Encoding", "base64") - - m.set_payload(email.base64mime.body_encode(data, maxlinelen=65)) - - return m.as_bytes(policy=m.policy.clone(linesep="\n", max_line_length=0)) - - -class OpenSSLMimePart(email.message.MIMEPart): - # A MIMEPart subclass that replicates OpenSSL's behavior of not including - # a newline if there are no headers. - def _write_headers(self, generator) -> None: - if list(self.raw_items()): - generator._write_headers(self) diff --git a/env/lib/python3.10/site-packages/cryptography/hazmat/primitives/serialization/ssh.py b/env/lib/python3.10/site-packages/cryptography/hazmat/primitives/serialization/ssh.py deleted file mode 100644 index c01afb0..0000000 --- a/env/lib/python3.10/site-packages/cryptography/hazmat/primitives/serialization/ssh.py +++ /dev/null @@ -1,1569 +0,0 @@ -# This file is dual licensed under the terms of the Apache License, Version -# 2.0, and the BSD License. See the LICENSE file in the root of this repository -# for complete details. - -from __future__ import annotations - -import binascii -import enum -import os -import re -import typing -import warnings -from base64 import encodebytes as _base64_encode -from dataclasses import dataclass - -from cryptography import utils -from cryptography.exceptions import UnsupportedAlgorithm -from cryptography.hazmat.primitives import hashes -from cryptography.hazmat.primitives.asymmetric import ( - dsa, - ec, - ed25519, - padding, - rsa, -) -from cryptography.hazmat.primitives.asymmetric import utils as asym_utils -from cryptography.hazmat.primitives.ciphers import ( - AEADDecryptionContext, - Cipher, - algorithms, - modes, -) -from cryptography.hazmat.primitives.serialization import ( - Encoding, - KeySerializationEncryption, - NoEncryption, - PrivateFormat, - PublicFormat, - _KeySerializationEncryption, -) - -try: - from bcrypt import kdf as _bcrypt_kdf - - _bcrypt_supported = True -except ImportError: - _bcrypt_supported = False - - def _bcrypt_kdf( - password: bytes, - salt: bytes, - desired_key_bytes: int, - rounds: int, - ignore_few_rounds: bool = False, - ) -> bytes: - raise UnsupportedAlgorithm("Need bcrypt module") - - -_SSH_ED25519 = b"ssh-ed25519" -_SSH_RSA = b"ssh-rsa" -_SSH_DSA = b"ssh-dss" -_ECDSA_NISTP256 = b"ecdsa-sha2-nistp256" -_ECDSA_NISTP384 = b"ecdsa-sha2-nistp384" -_ECDSA_NISTP521 = b"ecdsa-sha2-nistp521" -_CERT_SUFFIX = b"-cert-v01@openssh.com" - -# U2F application string suffixed pubkey -_SK_SSH_ED25519 = b"sk-ssh-ed25519@openssh.com" -_SK_SSH_ECDSA_NISTP256 = b"sk-ecdsa-sha2-nistp256@openssh.com" - -# These are not key types, only algorithms, so they cannot appear -# as a public key type -_SSH_RSA_SHA256 = b"rsa-sha2-256" -_SSH_RSA_SHA512 = b"rsa-sha2-512" - -_SSH_PUBKEY_RC = re.compile(rb"\A(\S+)[ \t]+(\S+)") -_SK_MAGIC = b"openssh-key-v1\0" -_SK_START = b"-----BEGIN OPENSSH PRIVATE KEY-----" -_SK_END = b"-----END OPENSSH PRIVATE KEY-----" -_BCRYPT = b"bcrypt" -_NONE = b"none" -_DEFAULT_CIPHER = b"aes256-ctr" -_DEFAULT_ROUNDS = 16 - -# re is only way to work on bytes-like data -_PEM_RC = re.compile(_SK_START + b"(.*?)" + _SK_END, re.DOTALL) - -# padding for max blocksize -_PADDING = memoryview(bytearray(range(1, 1 + 16))) - - -@dataclass -class _SSHCipher: - alg: type[algorithms.AES] - key_len: int - mode: type[modes.CTR] | type[modes.CBC] | type[modes.GCM] - block_len: int - iv_len: int - tag_len: int | None - is_aead: bool - - -# ciphers that are actually used in key wrapping -_SSH_CIPHERS: dict[bytes, _SSHCipher] = { - b"aes256-ctr": _SSHCipher( - alg=algorithms.AES, - key_len=32, - mode=modes.CTR, - block_len=16, - iv_len=16, - tag_len=None, - is_aead=False, - ), - b"aes256-cbc": _SSHCipher( - alg=algorithms.AES, - key_len=32, - mode=modes.CBC, - block_len=16, - iv_len=16, - tag_len=None, - is_aead=False, - ), - b"aes256-gcm@openssh.com": _SSHCipher( - alg=algorithms.AES, - key_len=32, - mode=modes.GCM, - block_len=16, - iv_len=12, - tag_len=16, - is_aead=True, - ), -} - -# map local curve name to key type -_ECDSA_KEY_TYPE = { - "secp256r1": _ECDSA_NISTP256, - "secp384r1": _ECDSA_NISTP384, - "secp521r1": _ECDSA_NISTP521, -} - - -def _get_ssh_key_type(key: SSHPrivateKeyTypes | SSHPublicKeyTypes) -> bytes: - if isinstance(key, ec.EllipticCurvePrivateKey): - key_type = _ecdsa_key_type(key.public_key()) - elif isinstance(key, ec.EllipticCurvePublicKey): - key_type = _ecdsa_key_type(key) - elif isinstance(key, (rsa.RSAPrivateKey, rsa.RSAPublicKey)): - key_type = _SSH_RSA - elif isinstance(key, (dsa.DSAPrivateKey, dsa.DSAPublicKey)): - key_type = _SSH_DSA - elif isinstance( - key, (ed25519.Ed25519PrivateKey, ed25519.Ed25519PublicKey) - ): - key_type = _SSH_ED25519 - else: - raise ValueError("Unsupported key type") - - return key_type - - -def _ecdsa_key_type(public_key: ec.EllipticCurvePublicKey) -> bytes: - """Return SSH key_type and curve_name for private key.""" - curve = public_key.curve - if curve.name not in _ECDSA_KEY_TYPE: - raise ValueError( - f"Unsupported curve for ssh private key: {curve.name!r}" - ) - return _ECDSA_KEY_TYPE[curve.name] - - -def _ssh_pem_encode( - data: bytes, - prefix: bytes = _SK_START + b"\n", - suffix: bytes = _SK_END + b"\n", -) -> bytes: - return b"".join([prefix, _base64_encode(data), suffix]) - - -def _check_block_size(data: bytes, block_len: int) -> None: - """Require data to be full blocks""" - if not data or len(data) % block_len != 0: - raise ValueError("Corrupt data: missing padding") - - -def _check_empty(data: bytes) -> None: - """All data should have been parsed.""" - if data: - raise ValueError("Corrupt data: unparsed data") - - -def _init_cipher( - ciphername: bytes, - password: bytes | None, - salt: bytes, - rounds: int, -) -> Cipher[modes.CBC | modes.CTR | modes.GCM]: - """Generate key + iv and return cipher.""" - if not password: - raise ValueError("Key is password-protected.") - - ciph = _SSH_CIPHERS[ciphername] - seed = _bcrypt_kdf( - password, salt, ciph.key_len + ciph.iv_len, rounds, True - ) - return Cipher( - ciph.alg(seed[: ciph.key_len]), - ciph.mode(seed[ciph.key_len :]), - ) - - -def _get_u32(data: memoryview) -> tuple[int, memoryview]: - """Uint32""" - if len(data) < 4: - raise ValueError("Invalid data") - return int.from_bytes(data[:4], byteorder="big"), data[4:] - - -def _get_u64(data: memoryview) -> tuple[int, memoryview]: - """Uint64""" - if len(data) < 8: - raise ValueError("Invalid data") - return int.from_bytes(data[:8], byteorder="big"), data[8:] - - -def _get_sshstr(data: memoryview) -> tuple[memoryview, memoryview]: - """Bytes with u32 length prefix""" - n, data = _get_u32(data) - if n > len(data): - raise ValueError("Invalid data") - return data[:n], data[n:] - - -def _get_mpint(data: memoryview) -> tuple[int, memoryview]: - """Big integer.""" - val, data = _get_sshstr(data) - if val and val[0] > 0x7F: - raise ValueError("Invalid data") - return int.from_bytes(val, "big"), data - - -def _to_mpint(val: int) -> bytes: - """Storage format for signed bigint.""" - if val < 0: - raise ValueError("negative mpint not allowed") - if not val: - return b"" - nbytes = (val.bit_length() + 8) // 8 - return utils.int_to_bytes(val, nbytes) - - -class _FragList: - """Build recursive structure without data copy.""" - - flist: list[bytes] - - def __init__(self, init: list[bytes] | None = None) -> None: - self.flist = [] - if init: - self.flist.extend(init) - - def put_raw(self, val: bytes) -> None: - """Add plain bytes""" - self.flist.append(val) - - def put_u32(self, val: int) -> None: - """Big-endian uint32""" - self.flist.append(val.to_bytes(length=4, byteorder="big")) - - def put_u64(self, val: int) -> None: - """Big-endian uint64""" - self.flist.append(val.to_bytes(length=8, byteorder="big")) - - def put_sshstr(self, val: bytes | _FragList) -> None: - """Bytes prefixed with u32 length""" - if isinstance(val, (bytes, memoryview, bytearray)): - self.put_u32(len(val)) - self.flist.append(val) - else: - self.put_u32(val.size()) - self.flist.extend(val.flist) - - def put_mpint(self, val: int) -> None: - """Big-endian bigint prefixed with u32 length""" - self.put_sshstr(_to_mpint(val)) - - def size(self) -> int: - """Current number of bytes""" - return sum(map(len, self.flist)) - - def render(self, dstbuf: memoryview, pos: int = 0) -> int: - """Write into bytearray""" - for frag in self.flist: - flen = len(frag) - start, pos = pos, pos + flen - dstbuf[start:pos] = frag - return pos - - def tobytes(self) -> bytes: - """Return as bytes""" - buf = memoryview(bytearray(self.size())) - self.render(buf) - return buf.tobytes() - - -class _SSHFormatRSA: - """Format for RSA keys. - - Public: - mpint e, n - Private: - mpint n, e, d, iqmp, p, q - """ - - def get_public( - self, data: memoryview - ) -> tuple[tuple[int, int], memoryview]: - """RSA public fields""" - e, data = _get_mpint(data) - n, data = _get_mpint(data) - return (e, n), data - - def load_public( - self, data: memoryview - ) -> tuple[rsa.RSAPublicKey, memoryview]: - """Make RSA public key from data.""" - (e, n), data = self.get_public(data) - public_numbers = rsa.RSAPublicNumbers(e, n) - public_key = public_numbers.public_key() - return public_key, data - - def load_private( - self, data: memoryview, pubfields - ) -> tuple[rsa.RSAPrivateKey, memoryview]: - """Make RSA private key from data.""" - n, data = _get_mpint(data) - e, data = _get_mpint(data) - d, data = _get_mpint(data) - iqmp, data = _get_mpint(data) - p, data = _get_mpint(data) - q, data = _get_mpint(data) - - if (e, n) != pubfields: - raise ValueError("Corrupt data: rsa field mismatch") - dmp1 = rsa.rsa_crt_dmp1(d, p) - dmq1 = rsa.rsa_crt_dmq1(d, q) - public_numbers = rsa.RSAPublicNumbers(e, n) - private_numbers = rsa.RSAPrivateNumbers( - p, q, d, dmp1, dmq1, iqmp, public_numbers - ) - private_key = private_numbers.private_key() - return private_key, data - - def encode_public( - self, public_key: rsa.RSAPublicKey, f_pub: _FragList - ) -> None: - """Write RSA public key""" - pubn = public_key.public_numbers() - f_pub.put_mpint(pubn.e) - f_pub.put_mpint(pubn.n) - - def encode_private( - self, private_key: rsa.RSAPrivateKey, f_priv: _FragList - ) -> None: - """Write RSA private key""" - private_numbers = private_key.private_numbers() - public_numbers = private_numbers.public_numbers - - f_priv.put_mpint(public_numbers.n) - f_priv.put_mpint(public_numbers.e) - - f_priv.put_mpint(private_numbers.d) - f_priv.put_mpint(private_numbers.iqmp) - f_priv.put_mpint(private_numbers.p) - f_priv.put_mpint(private_numbers.q) - - -class _SSHFormatDSA: - """Format for DSA keys. - - Public: - mpint p, q, g, y - Private: - mpint p, q, g, y, x - """ - - def get_public(self, data: memoryview) -> tuple[tuple, memoryview]: - """DSA public fields""" - p, data = _get_mpint(data) - q, data = _get_mpint(data) - g, data = _get_mpint(data) - y, data = _get_mpint(data) - return (p, q, g, y), data - - def load_public( - self, data: memoryview - ) -> tuple[dsa.DSAPublicKey, memoryview]: - """Make DSA public key from data.""" - (p, q, g, y), data = self.get_public(data) - parameter_numbers = dsa.DSAParameterNumbers(p, q, g) - public_numbers = dsa.DSAPublicNumbers(y, parameter_numbers) - self._validate(public_numbers) - public_key = public_numbers.public_key() - return public_key, data - - def load_private( - self, data: memoryview, pubfields - ) -> tuple[dsa.DSAPrivateKey, memoryview]: - """Make DSA private key from data.""" - (p, q, g, y), data = self.get_public(data) - x, data = _get_mpint(data) - - if (p, q, g, y) != pubfields: - raise ValueError("Corrupt data: dsa field mismatch") - parameter_numbers = dsa.DSAParameterNumbers(p, q, g) - public_numbers = dsa.DSAPublicNumbers(y, parameter_numbers) - self._validate(public_numbers) - private_numbers = dsa.DSAPrivateNumbers(x, public_numbers) - private_key = private_numbers.private_key() - return private_key, data - - def encode_public( - self, public_key: dsa.DSAPublicKey, f_pub: _FragList - ) -> None: - """Write DSA public key""" - public_numbers = public_key.public_numbers() - parameter_numbers = public_numbers.parameter_numbers - self._validate(public_numbers) - - f_pub.put_mpint(parameter_numbers.p) - f_pub.put_mpint(parameter_numbers.q) - f_pub.put_mpint(parameter_numbers.g) - f_pub.put_mpint(public_numbers.y) - - def encode_private( - self, private_key: dsa.DSAPrivateKey, f_priv: _FragList - ) -> None: - """Write DSA private key""" - self.encode_public(private_key.public_key(), f_priv) - f_priv.put_mpint(private_key.private_numbers().x) - - def _validate(self, public_numbers: dsa.DSAPublicNumbers) -> None: - parameter_numbers = public_numbers.parameter_numbers - if parameter_numbers.p.bit_length() != 1024: - raise ValueError("SSH supports only 1024 bit DSA keys") - - -class _SSHFormatECDSA: - """Format for ECDSA keys. - - Public: - str curve - bytes point - Private: - str curve - bytes point - mpint secret - """ - - def __init__(self, ssh_curve_name: bytes, curve: ec.EllipticCurve): - self.ssh_curve_name = ssh_curve_name - self.curve = curve - - def get_public( - self, data: memoryview - ) -> tuple[tuple[memoryview, memoryview], memoryview]: - """ECDSA public fields""" - curve, data = _get_sshstr(data) - point, data = _get_sshstr(data) - if curve != self.ssh_curve_name: - raise ValueError("Curve name mismatch") - if point[0] != 4: - raise NotImplementedError("Need uncompressed point") - return (curve, point), data - - def load_public( - self, data: memoryview - ) -> tuple[ec.EllipticCurvePublicKey, memoryview]: - """Make ECDSA public key from data.""" - (_, point), data = self.get_public(data) - public_key = ec.EllipticCurvePublicKey.from_encoded_point( - self.curve, point.tobytes() - ) - return public_key, data - - def load_private( - self, data: memoryview, pubfields - ) -> tuple[ec.EllipticCurvePrivateKey, memoryview]: - """Make ECDSA private key from data.""" - (curve_name, point), data = self.get_public(data) - secret, data = _get_mpint(data) - - if (curve_name, point) != pubfields: - raise ValueError("Corrupt data: ecdsa field mismatch") - private_key = ec.derive_private_key(secret, self.curve) - return private_key, data - - def encode_public( - self, public_key: ec.EllipticCurvePublicKey, f_pub: _FragList - ) -> None: - """Write ECDSA public key""" - point = public_key.public_bytes( - Encoding.X962, PublicFormat.UncompressedPoint - ) - f_pub.put_sshstr(self.ssh_curve_name) - f_pub.put_sshstr(point) - - def encode_private( - self, private_key: ec.EllipticCurvePrivateKey, f_priv: _FragList - ) -> None: - """Write ECDSA private key""" - public_key = private_key.public_key() - private_numbers = private_key.private_numbers() - - self.encode_public(public_key, f_priv) - f_priv.put_mpint(private_numbers.private_value) - - -class _SSHFormatEd25519: - """Format for Ed25519 keys. - - Public: - bytes point - Private: - bytes point - bytes secret_and_point - """ - - def get_public( - self, data: memoryview - ) -> tuple[tuple[memoryview], memoryview]: - """Ed25519 public fields""" - point, data = _get_sshstr(data) - return (point,), data - - def load_public( - self, data: memoryview - ) -> tuple[ed25519.Ed25519PublicKey, memoryview]: - """Make Ed25519 public key from data.""" - (point,), data = self.get_public(data) - public_key = ed25519.Ed25519PublicKey.from_public_bytes( - point.tobytes() - ) - return public_key, data - - def load_private( - self, data: memoryview, pubfields - ) -> tuple[ed25519.Ed25519PrivateKey, memoryview]: - """Make Ed25519 private key from data.""" - (point,), data = self.get_public(data) - keypair, data = _get_sshstr(data) - - secret = keypair[:32] - point2 = keypair[32:] - if point != point2 or (point,) != pubfields: - raise ValueError("Corrupt data: ed25519 field mismatch") - private_key = ed25519.Ed25519PrivateKey.from_private_bytes(secret) - return private_key, data - - def encode_public( - self, public_key: ed25519.Ed25519PublicKey, f_pub: _FragList - ) -> None: - """Write Ed25519 public key""" - raw_public_key = public_key.public_bytes( - Encoding.Raw, PublicFormat.Raw - ) - f_pub.put_sshstr(raw_public_key) - - def encode_private( - self, private_key: ed25519.Ed25519PrivateKey, f_priv: _FragList - ) -> None: - """Write Ed25519 private key""" - public_key = private_key.public_key() - raw_private_key = private_key.private_bytes( - Encoding.Raw, PrivateFormat.Raw, NoEncryption() - ) - raw_public_key = public_key.public_bytes( - Encoding.Raw, PublicFormat.Raw - ) - f_keypair = _FragList([raw_private_key, raw_public_key]) - - self.encode_public(public_key, f_priv) - f_priv.put_sshstr(f_keypair) - - -def load_application(data) -> tuple[memoryview, memoryview]: - """ - U2F application strings - """ - application, data = _get_sshstr(data) - if not application.tobytes().startswith(b"ssh:"): - raise ValueError( - "U2F application string does not start with b'ssh:' " - f"({application})" - ) - return application, data - - -class _SSHFormatSKEd25519: - """ - The format of a sk-ssh-ed25519@openssh.com public key is: - - string "sk-ssh-ed25519@openssh.com" - string public key - string application (user-specified, but typically "ssh:") - """ - - def load_public( - self, data: memoryview - ) -> tuple[ed25519.Ed25519PublicKey, memoryview]: - """Make Ed25519 public key from data.""" - public_key, data = _lookup_kformat(_SSH_ED25519).load_public(data) - _, data = load_application(data) - return public_key, data - - -class _SSHFormatSKECDSA: - """ - The format of a sk-ecdsa-sha2-nistp256@openssh.com public key is: - - string "sk-ecdsa-sha2-nistp256@openssh.com" - string curve name - ec_point Q - string application (user-specified, but typically "ssh:") - """ - - def load_public( - self, data: memoryview - ) -> tuple[ec.EllipticCurvePublicKey, memoryview]: - """Make ECDSA public key from data.""" - public_key, data = _lookup_kformat(_ECDSA_NISTP256).load_public(data) - _, data = load_application(data) - return public_key, data - - -_KEY_FORMATS = { - _SSH_RSA: _SSHFormatRSA(), - _SSH_DSA: _SSHFormatDSA(), - _SSH_ED25519: _SSHFormatEd25519(), - _ECDSA_NISTP256: _SSHFormatECDSA(b"nistp256", ec.SECP256R1()), - _ECDSA_NISTP384: _SSHFormatECDSA(b"nistp384", ec.SECP384R1()), - _ECDSA_NISTP521: _SSHFormatECDSA(b"nistp521", ec.SECP521R1()), - _SK_SSH_ED25519: _SSHFormatSKEd25519(), - _SK_SSH_ECDSA_NISTP256: _SSHFormatSKECDSA(), -} - - -def _lookup_kformat(key_type: bytes): - """Return valid format or throw error""" - if not isinstance(key_type, bytes): - key_type = memoryview(key_type).tobytes() - if key_type in _KEY_FORMATS: - return _KEY_FORMATS[key_type] - raise UnsupportedAlgorithm(f"Unsupported key type: {key_type!r}") - - -SSHPrivateKeyTypes = typing.Union[ - ec.EllipticCurvePrivateKey, - rsa.RSAPrivateKey, - dsa.DSAPrivateKey, - ed25519.Ed25519PrivateKey, -] - - -def load_ssh_private_key( - data: bytes, - password: bytes | None, - backend: typing.Any = None, -) -> SSHPrivateKeyTypes: - """Load private key from OpenSSH custom encoding.""" - utils._check_byteslike("data", data) - if password is not None: - utils._check_bytes("password", password) - - m = _PEM_RC.search(data) - if not m: - raise ValueError("Not OpenSSH private key format") - p1 = m.start(1) - p2 = m.end(1) - data = binascii.a2b_base64(memoryview(data)[p1:p2]) - if not data.startswith(_SK_MAGIC): - raise ValueError("Not OpenSSH private key format") - data = memoryview(data)[len(_SK_MAGIC) :] - - # parse header - ciphername, data = _get_sshstr(data) - kdfname, data = _get_sshstr(data) - kdfoptions, data = _get_sshstr(data) - nkeys, data = _get_u32(data) - if nkeys != 1: - raise ValueError("Only one key supported") - - # load public key data - pubdata, data = _get_sshstr(data) - pub_key_type, pubdata = _get_sshstr(pubdata) - kformat = _lookup_kformat(pub_key_type) - pubfields, pubdata = kformat.get_public(pubdata) - _check_empty(pubdata) - - if (ciphername, kdfname) != (_NONE, _NONE): - ciphername_bytes = ciphername.tobytes() - if ciphername_bytes not in _SSH_CIPHERS: - raise UnsupportedAlgorithm( - f"Unsupported cipher: {ciphername_bytes!r}" - ) - if kdfname != _BCRYPT: - raise UnsupportedAlgorithm(f"Unsupported KDF: {kdfname!r}") - blklen = _SSH_CIPHERS[ciphername_bytes].block_len - tag_len = _SSH_CIPHERS[ciphername_bytes].tag_len - # load secret data - edata, data = _get_sshstr(data) - # see https://bugzilla.mindrot.org/show_bug.cgi?id=3553 for - # information about how OpenSSH handles AEAD tags - if _SSH_CIPHERS[ciphername_bytes].is_aead: - tag = bytes(data) - if len(tag) != tag_len: - raise ValueError("Corrupt data: invalid tag length for cipher") - else: - _check_empty(data) - _check_block_size(edata, blklen) - salt, kbuf = _get_sshstr(kdfoptions) - rounds, kbuf = _get_u32(kbuf) - _check_empty(kbuf) - ciph = _init_cipher(ciphername_bytes, password, salt.tobytes(), rounds) - dec = ciph.decryptor() - edata = memoryview(dec.update(edata)) - if _SSH_CIPHERS[ciphername_bytes].is_aead: - assert isinstance(dec, AEADDecryptionContext) - _check_empty(dec.finalize_with_tag(tag)) - else: - # _check_block_size requires data to be a full block so there - # should be no output from finalize - _check_empty(dec.finalize()) - else: - # load secret data - edata, data = _get_sshstr(data) - _check_empty(data) - blklen = 8 - _check_block_size(edata, blklen) - ck1, edata = _get_u32(edata) - ck2, edata = _get_u32(edata) - if ck1 != ck2: - raise ValueError("Corrupt data: broken checksum") - - # load per-key struct - key_type, edata = _get_sshstr(edata) - if key_type != pub_key_type: - raise ValueError("Corrupt data: key type mismatch") - private_key, edata = kformat.load_private(edata, pubfields) - # We don't use the comment - _, edata = _get_sshstr(edata) - - # yes, SSH does padding check *after* all other parsing is done. - # need to follow as it writes zero-byte padding too. - if edata != _PADDING[: len(edata)]: - raise ValueError("Corrupt data: invalid padding") - - if isinstance(private_key, dsa.DSAPrivateKey): - warnings.warn( - "SSH DSA keys are deprecated and will be removed in a future " - "release.", - utils.DeprecatedIn40, - stacklevel=2, - ) - - return private_key - - -def _serialize_ssh_private_key( - private_key: SSHPrivateKeyTypes, - password: bytes, - encryption_algorithm: KeySerializationEncryption, -) -> bytes: - """Serialize private key with OpenSSH custom encoding.""" - utils._check_bytes("password", password) - if isinstance(private_key, dsa.DSAPrivateKey): - warnings.warn( - "SSH DSA key support is deprecated and will be " - "removed in a future release", - utils.DeprecatedIn40, - stacklevel=4, - ) - - key_type = _get_ssh_key_type(private_key) - kformat = _lookup_kformat(key_type) - - # setup parameters - f_kdfoptions = _FragList() - if password: - ciphername = _DEFAULT_CIPHER - blklen = _SSH_CIPHERS[ciphername].block_len - kdfname = _BCRYPT - rounds = _DEFAULT_ROUNDS - if ( - isinstance(encryption_algorithm, _KeySerializationEncryption) - and encryption_algorithm._kdf_rounds is not None - ): - rounds = encryption_algorithm._kdf_rounds - salt = os.urandom(16) - f_kdfoptions.put_sshstr(salt) - f_kdfoptions.put_u32(rounds) - ciph = _init_cipher(ciphername, password, salt, rounds) - else: - ciphername = kdfname = _NONE - blklen = 8 - ciph = None - nkeys = 1 - checkval = os.urandom(4) - comment = b"" - - # encode public and private parts together - f_public_key = _FragList() - f_public_key.put_sshstr(key_type) - kformat.encode_public(private_key.public_key(), f_public_key) - - f_secrets = _FragList([checkval, checkval]) - f_secrets.put_sshstr(key_type) - kformat.encode_private(private_key, f_secrets) - f_secrets.put_sshstr(comment) - f_secrets.put_raw(_PADDING[: blklen - (f_secrets.size() % blklen)]) - - # top-level structure - f_main = _FragList() - f_main.put_raw(_SK_MAGIC) - f_main.put_sshstr(ciphername) - f_main.put_sshstr(kdfname) - f_main.put_sshstr(f_kdfoptions) - f_main.put_u32(nkeys) - f_main.put_sshstr(f_public_key) - f_main.put_sshstr(f_secrets) - - # copy result info bytearray - slen = f_secrets.size() - mlen = f_main.size() - buf = memoryview(bytearray(mlen + blklen)) - f_main.render(buf) - ofs = mlen - slen - - # encrypt in-place - if ciph is not None: - ciph.encryptor().update_into(buf[ofs:mlen], buf[ofs:]) - - return _ssh_pem_encode(buf[:mlen]) - - -SSHPublicKeyTypes = typing.Union[ - ec.EllipticCurvePublicKey, - rsa.RSAPublicKey, - dsa.DSAPublicKey, - ed25519.Ed25519PublicKey, -] - -SSHCertPublicKeyTypes = typing.Union[ - ec.EllipticCurvePublicKey, - rsa.RSAPublicKey, - ed25519.Ed25519PublicKey, -] - - -class SSHCertificateType(enum.Enum): - USER = 1 - HOST = 2 - - -class SSHCertificate: - def __init__( - self, - _nonce: memoryview, - _public_key: SSHPublicKeyTypes, - _serial: int, - _cctype: int, - _key_id: memoryview, - _valid_principals: list[bytes], - _valid_after: int, - _valid_before: int, - _critical_options: dict[bytes, bytes], - _extensions: dict[bytes, bytes], - _sig_type: memoryview, - _sig_key: memoryview, - _inner_sig_type: memoryview, - _signature: memoryview, - _tbs_cert_body: memoryview, - _cert_key_type: bytes, - _cert_body: memoryview, - ): - self._nonce = _nonce - self._public_key = _public_key - self._serial = _serial - try: - self._type = SSHCertificateType(_cctype) - except ValueError: - raise ValueError("Invalid certificate type") - self._key_id = _key_id - self._valid_principals = _valid_principals - self._valid_after = _valid_after - self._valid_before = _valid_before - self._critical_options = _critical_options - self._extensions = _extensions - self._sig_type = _sig_type - self._sig_key = _sig_key - self._inner_sig_type = _inner_sig_type - self._signature = _signature - self._cert_key_type = _cert_key_type - self._cert_body = _cert_body - self._tbs_cert_body = _tbs_cert_body - - @property - def nonce(self) -> bytes: - return bytes(self._nonce) - - def public_key(self) -> SSHCertPublicKeyTypes: - # make mypy happy until we remove DSA support entirely and - # the underlying union won't have a disallowed type - return typing.cast(SSHCertPublicKeyTypes, self._public_key) - - @property - def serial(self) -> int: - return self._serial - - @property - def type(self) -> SSHCertificateType: - return self._type - - @property - def key_id(self) -> bytes: - return bytes(self._key_id) - - @property - def valid_principals(self) -> list[bytes]: - return self._valid_principals - - @property - def valid_before(self) -> int: - return self._valid_before - - @property - def valid_after(self) -> int: - return self._valid_after - - @property - def critical_options(self) -> dict[bytes, bytes]: - return self._critical_options - - @property - def extensions(self) -> dict[bytes, bytes]: - return self._extensions - - def signature_key(self) -> SSHCertPublicKeyTypes: - sigformat = _lookup_kformat(self._sig_type) - signature_key, sigkey_rest = sigformat.load_public(self._sig_key) - _check_empty(sigkey_rest) - return signature_key - - def public_bytes(self) -> bytes: - return ( - bytes(self._cert_key_type) - + b" " - + binascii.b2a_base64(bytes(self._cert_body), newline=False) - ) - - def verify_cert_signature(self) -> None: - signature_key = self.signature_key() - if isinstance(signature_key, ed25519.Ed25519PublicKey): - signature_key.verify( - bytes(self._signature), bytes(self._tbs_cert_body) - ) - elif isinstance(signature_key, ec.EllipticCurvePublicKey): - # The signature is encoded as a pair of big-endian integers - r, data = _get_mpint(self._signature) - s, data = _get_mpint(data) - _check_empty(data) - computed_sig = asym_utils.encode_dss_signature(r, s) - hash_alg = _get_ec_hash_alg(signature_key.curve) - signature_key.verify( - computed_sig, bytes(self._tbs_cert_body), ec.ECDSA(hash_alg) - ) - else: - assert isinstance(signature_key, rsa.RSAPublicKey) - if self._inner_sig_type == _SSH_RSA: - hash_alg = hashes.SHA1() - elif self._inner_sig_type == _SSH_RSA_SHA256: - hash_alg = hashes.SHA256() - else: - assert self._inner_sig_type == _SSH_RSA_SHA512 - hash_alg = hashes.SHA512() - signature_key.verify( - bytes(self._signature), - bytes(self._tbs_cert_body), - padding.PKCS1v15(), - hash_alg, - ) - - -def _get_ec_hash_alg(curve: ec.EllipticCurve) -> hashes.HashAlgorithm: - if isinstance(curve, ec.SECP256R1): - return hashes.SHA256() - elif isinstance(curve, ec.SECP384R1): - return hashes.SHA384() - else: - assert isinstance(curve, ec.SECP521R1) - return hashes.SHA512() - - -def _load_ssh_public_identity( - data: bytes, - _legacy_dsa_allowed=False, -) -> SSHCertificate | SSHPublicKeyTypes: - utils._check_byteslike("data", data) - - m = _SSH_PUBKEY_RC.match(data) - if not m: - raise ValueError("Invalid line format") - key_type = orig_key_type = m.group(1) - key_body = m.group(2) - with_cert = False - if key_type.endswith(_CERT_SUFFIX): - with_cert = True - key_type = key_type[: -len(_CERT_SUFFIX)] - if key_type == _SSH_DSA and not _legacy_dsa_allowed: - raise UnsupportedAlgorithm( - "DSA keys aren't supported in SSH certificates" - ) - kformat = _lookup_kformat(key_type) - - try: - rest = memoryview(binascii.a2b_base64(key_body)) - except (TypeError, binascii.Error): - raise ValueError("Invalid format") - - if with_cert: - cert_body = rest - inner_key_type, rest = _get_sshstr(rest) - if inner_key_type != orig_key_type: - raise ValueError("Invalid key format") - if with_cert: - nonce, rest = _get_sshstr(rest) - public_key, rest = kformat.load_public(rest) - if with_cert: - serial, rest = _get_u64(rest) - cctype, rest = _get_u32(rest) - key_id, rest = _get_sshstr(rest) - principals, rest = _get_sshstr(rest) - valid_principals = [] - while principals: - principal, principals = _get_sshstr(principals) - valid_principals.append(bytes(principal)) - valid_after, rest = _get_u64(rest) - valid_before, rest = _get_u64(rest) - crit_options, rest = _get_sshstr(rest) - critical_options = _parse_exts_opts(crit_options) - exts, rest = _get_sshstr(rest) - extensions = _parse_exts_opts(exts) - # Get the reserved field, which is unused. - _, rest = _get_sshstr(rest) - sig_key_raw, rest = _get_sshstr(rest) - sig_type, sig_key = _get_sshstr(sig_key_raw) - if sig_type == _SSH_DSA and not _legacy_dsa_allowed: - raise UnsupportedAlgorithm( - "DSA signatures aren't supported in SSH certificates" - ) - # Get the entire cert body and subtract the signature - tbs_cert_body = cert_body[: -len(rest)] - signature_raw, rest = _get_sshstr(rest) - _check_empty(rest) - inner_sig_type, sig_rest = _get_sshstr(signature_raw) - # RSA certs can have multiple algorithm types - if ( - sig_type == _SSH_RSA - and inner_sig_type - not in [_SSH_RSA_SHA256, _SSH_RSA_SHA512, _SSH_RSA] - ) or (sig_type != _SSH_RSA and inner_sig_type != sig_type): - raise ValueError("Signature key type does not match") - signature, sig_rest = _get_sshstr(sig_rest) - _check_empty(sig_rest) - return SSHCertificate( - nonce, - public_key, - serial, - cctype, - key_id, - valid_principals, - valid_after, - valid_before, - critical_options, - extensions, - sig_type, - sig_key, - inner_sig_type, - signature, - tbs_cert_body, - orig_key_type, - cert_body, - ) - else: - _check_empty(rest) - return public_key - - -def load_ssh_public_identity( - data: bytes, -) -> SSHCertificate | SSHPublicKeyTypes: - return _load_ssh_public_identity(data) - - -def _parse_exts_opts(exts_opts: memoryview) -> dict[bytes, bytes]: - result: dict[bytes, bytes] = {} - last_name = None - while exts_opts: - name, exts_opts = _get_sshstr(exts_opts) - bname: bytes = bytes(name) - if bname in result: - raise ValueError("Duplicate name") - if last_name is not None and bname < last_name: - raise ValueError("Fields not lexically sorted") - value, exts_opts = _get_sshstr(exts_opts) - if len(value) > 0: - value, extra = _get_sshstr(value) - if len(extra) > 0: - raise ValueError("Unexpected extra data after value") - result[bname] = bytes(value) - last_name = bname - return result - - -def load_ssh_public_key( - data: bytes, backend: typing.Any = None -) -> SSHPublicKeyTypes: - cert_or_key = _load_ssh_public_identity(data, _legacy_dsa_allowed=True) - public_key: SSHPublicKeyTypes - if isinstance(cert_or_key, SSHCertificate): - public_key = cert_or_key.public_key() - else: - public_key = cert_or_key - - if isinstance(public_key, dsa.DSAPublicKey): - warnings.warn( - "SSH DSA keys are deprecated and will be removed in a future " - "release.", - utils.DeprecatedIn40, - stacklevel=2, - ) - return public_key - - -def serialize_ssh_public_key(public_key: SSHPublicKeyTypes) -> bytes: - """One-line public key format for OpenSSH""" - if isinstance(public_key, dsa.DSAPublicKey): - warnings.warn( - "SSH DSA key support is deprecated and will be " - "removed in a future release", - utils.DeprecatedIn40, - stacklevel=4, - ) - key_type = _get_ssh_key_type(public_key) - kformat = _lookup_kformat(key_type) - - f_pub = _FragList() - f_pub.put_sshstr(key_type) - kformat.encode_public(public_key, f_pub) - - pub = binascii.b2a_base64(f_pub.tobytes()).strip() - return b"".join([key_type, b" ", pub]) - - -SSHCertPrivateKeyTypes = typing.Union[ - ec.EllipticCurvePrivateKey, - rsa.RSAPrivateKey, - ed25519.Ed25519PrivateKey, -] - - -# This is an undocumented limit enforced in the openssh codebase for sshd and -# ssh-keygen, but it is undefined in the ssh certificates spec. -_SSHKEY_CERT_MAX_PRINCIPALS = 256 - - -class SSHCertificateBuilder: - def __init__( - self, - _public_key: SSHCertPublicKeyTypes | None = None, - _serial: int | None = None, - _type: SSHCertificateType | None = None, - _key_id: bytes | None = None, - _valid_principals: list[bytes] = [], - _valid_for_all_principals: bool = False, - _valid_before: int | None = None, - _valid_after: int | None = None, - _critical_options: list[tuple[bytes, bytes]] = [], - _extensions: list[tuple[bytes, bytes]] = [], - ): - self._public_key = _public_key - self._serial = _serial - self._type = _type - self._key_id = _key_id - self._valid_principals = _valid_principals - self._valid_for_all_principals = _valid_for_all_principals - self._valid_before = _valid_before - self._valid_after = _valid_after - self._critical_options = _critical_options - self._extensions = _extensions - - def public_key( - self, public_key: SSHCertPublicKeyTypes - ) -> SSHCertificateBuilder: - if not isinstance( - public_key, - ( - ec.EllipticCurvePublicKey, - rsa.RSAPublicKey, - ed25519.Ed25519PublicKey, - ), - ): - raise TypeError("Unsupported key type") - if self._public_key is not None: - raise ValueError("public_key already set") - - return SSHCertificateBuilder( - _public_key=public_key, - _serial=self._serial, - _type=self._type, - _key_id=self._key_id, - _valid_principals=self._valid_principals, - _valid_for_all_principals=self._valid_for_all_principals, - _valid_before=self._valid_before, - _valid_after=self._valid_after, - _critical_options=self._critical_options, - _extensions=self._extensions, - ) - - def serial(self, serial: int) -> SSHCertificateBuilder: - if not isinstance(serial, int): - raise TypeError("serial must be an integer") - if not 0 <= serial < 2**64: - raise ValueError("serial must be between 0 and 2**64") - if self._serial is not None: - raise ValueError("serial already set") - - return SSHCertificateBuilder( - _public_key=self._public_key, - _serial=serial, - _type=self._type, - _key_id=self._key_id, - _valid_principals=self._valid_principals, - _valid_for_all_principals=self._valid_for_all_principals, - _valid_before=self._valid_before, - _valid_after=self._valid_after, - _critical_options=self._critical_options, - _extensions=self._extensions, - ) - - def type(self, type: SSHCertificateType) -> SSHCertificateBuilder: - if not isinstance(type, SSHCertificateType): - raise TypeError("type must be an SSHCertificateType") - if self._type is not None: - raise ValueError("type already set") - - return SSHCertificateBuilder( - _public_key=self._public_key, - _serial=self._serial, - _type=type, - _key_id=self._key_id, - _valid_principals=self._valid_principals, - _valid_for_all_principals=self._valid_for_all_principals, - _valid_before=self._valid_before, - _valid_after=self._valid_after, - _critical_options=self._critical_options, - _extensions=self._extensions, - ) - - def key_id(self, key_id: bytes) -> SSHCertificateBuilder: - if not isinstance(key_id, bytes): - raise TypeError("key_id must be bytes") - if self._key_id is not None: - raise ValueError("key_id already set") - - return SSHCertificateBuilder( - _public_key=self._public_key, - _serial=self._serial, - _type=self._type, - _key_id=key_id, - _valid_principals=self._valid_principals, - _valid_for_all_principals=self._valid_for_all_principals, - _valid_before=self._valid_before, - _valid_after=self._valid_after, - _critical_options=self._critical_options, - _extensions=self._extensions, - ) - - def valid_principals( - self, valid_principals: list[bytes] - ) -> SSHCertificateBuilder: - if self._valid_for_all_principals: - raise ValueError( - "Principals can't be set because the cert is valid " - "for all principals" - ) - if ( - not all(isinstance(x, bytes) for x in valid_principals) - or not valid_principals - ): - raise TypeError( - "principals must be a list of bytes and can't be empty" - ) - if self._valid_principals: - raise ValueError("valid_principals already set") - - if len(valid_principals) > _SSHKEY_CERT_MAX_PRINCIPALS: - raise ValueError( - "Reached or exceeded the maximum number of valid_principals" - ) - - return SSHCertificateBuilder( - _public_key=self._public_key, - _serial=self._serial, - _type=self._type, - _key_id=self._key_id, - _valid_principals=valid_principals, - _valid_for_all_principals=self._valid_for_all_principals, - _valid_before=self._valid_before, - _valid_after=self._valid_after, - _critical_options=self._critical_options, - _extensions=self._extensions, - ) - - def valid_for_all_principals(self): - if self._valid_principals: - raise ValueError( - "valid_principals already set, can't set " - "valid_for_all_principals" - ) - if self._valid_for_all_principals: - raise ValueError("valid_for_all_principals already set") - - return SSHCertificateBuilder( - _public_key=self._public_key, - _serial=self._serial, - _type=self._type, - _key_id=self._key_id, - _valid_principals=self._valid_principals, - _valid_for_all_principals=True, - _valid_before=self._valid_before, - _valid_after=self._valid_after, - _critical_options=self._critical_options, - _extensions=self._extensions, - ) - - def valid_before(self, valid_before: int | float) -> SSHCertificateBuilder: - if not isinstance(valid_before, (int, float)): - raise TypeError("valid_before must be an int or float") - valid_before = int(valid_before) - if valid_before < 0 or valid_before >= 2**64: - raise ValueError("valid_before must [0, 2**64)") - if self._valid_before is not None: - raise ValueError("valid_before already set") - - return SSHCertificateBuilder( - _public_key=self._public_key, - _serial=self._serial, - _type=self._type, - _key_id=self._key_id, - _valid_principals=self._valid_principals, - _valid_for_all_principals=self._valid_for_all_principals, - _valid_before=valid_before, - _valid_after=self._valid_after, - _critical_options=self._critical_options, - _extensions=self._extensions, - ) - - def valid_after(self, valid_after: int | float) -> SSHCertificateBuilder: - if not isinstance(valid_after, (int, float)): - raise TypeError("valid_after must be an int or float") - valid_after = int(valid_after) - if valid_after < 0 or valid_after >= 2**64: - raise ValueError("valid_after must [0, 2**64)") - if self._valid_after is not None: - raise ValueError("valid_after already set") - - return SSHCertificateBuilder( - _public_key=self._public_key, - _serial=self._serial, - _type=self._type, - _key_id=self._key_id, - _valid_principals=self._valid_principals, - _valid_for_all_principals=self._valid_for_all_principals, - _valid_before=self._valid_before, - _valid_after=valid_after, - _critical_options=self._critical_options, - _extensions=self._extensions, - ) - - def add_critical_option( - self, name: bytes, value: bytes - ) -> SSHCertificateBuilder: - if not isinstance(name, bytes) or not isinstance(value, bytes): - raise TypeError("name and value must be bytes") - # This is O(n**2) - if name in [name for name, _ in self._critical_options]: - raise ValueError("Duplicate critical option name") - - return SSHCertificateBuilder( - _public_key=self._public_key, - _serial=self._serial, - _type=self._type, - _key_id=self._key_id, - _valid_principals=self._valid_principals, - _valid_for_all_principals=self._valid_for_all_principals, - _valid_before=self._valid_before, - _valid_after=self._valid_after, - _critical_options=[*self._critical_options, (name, value)], - _extensions=self._extensions, - ) - - def add_extension( - self, name: bytes, value: bytes - ) -> SSHCertificateBuilder: - if not isinstance(name, bytes) or not isinstance(value, bytes): - raise TypeError("name and value must be bytes") - # This is O(n**2) - if name in [name for name, _ in self._extensions]: - raise ValueError("Duplicate extension name") - - return SSHCertificateBuilder( - _public_key=self._public_key, - _serial=self._serial, - _type=self._type, - _key_id=self._key_id, - _valid_principals=self._valid_principals, - _valid_for_all_principals=self._valid_for_all_principals, - _valid_before=self._valid_before, - _valid_after=self._valid_after, - _critical_options=self._critical_options, - _extensions=[*self._extensions, (name, value)], - ) - - def sign(self, private_key: SSHCertPrivateKeyTypes) -> SSHCertificate: - if not isinstance( - private_key, - ( - ec.EllipticCurvePrivateKey, - rsa.RSAPrivateKey, - ed25519.Ed25519PrivateKey, - ), - ): - raise TypeError("Unsupported private key type") - - if self._public_key is None: - raise ValueError("public_key must be set") - - # Not required - serial = 0 if self._serial is None else self._serial - - if self._type is None: - raise ValueError("type must be set") - - # Not required - key_id = b"" if self._key_id is None else self._key_id - - # A zero length list is valid, but means the certificate - # is valid for any principal of the specified type. We require - # the user to explicitly set valid_for_all_principals to get - # that behavior. - if not self._valid_principals and not self._valid_for_all_principals: - raise ValueError( - "valid_principals must be set if valid_for_all_principals " - "is False" - ) - - if self._valid_before is None: - raise ValueError("valid_before must be set") - - if self._valid_after is None: - raise ValueError("valid_after must be set") - - if self._valid_after > self._valid_before: - raise ValueError("valid_after must be earlier than valid_before") - - # lexically sort our byte strings - self._critical_options.sort(key=lambda x: x[0]) - self._extensions.sort(key=lambda x: x[0]) - - key_type = _get_ssh_key_type(self._public_key) - cert_prefix = key_type + _CERT_SUFFIX - - # Marshal the bytes to be signed - nonce = os.urandom(32) - kformat = _lookup_kformat(key_type) - f = _FragList() - f.put_sshstr(cert_prefix) - f.put_sshstr(nonce) - kformat.encode_public(self._public_key, f) - f.put_u64(serial) - f.put_u32(self._type.value) - f.put_sshstr(key_id) - fprincipals = _FragList() - for p in self._valid_principals: - fprincipals.put_sshstr(p) - f.put_sshstr(fprincipals.tobytes()) - f.put_u64(self._valid_after) - f.put_u64(self._valid_before) - fcrit = _FragList() - for name, value in self._critical_options: - fcrit.put_sshstr(name) - if len(value) > 0: - foptval = _FragList() - foptval.put_sshstr(value) - fcrit.put_sshstr(foptval.tobytes()) - else: - fcrit.put_sshstr(value) - f.put_sshstr(fcrit.tobytes()) - fext = _FragList() - for name, value in self._extensions: - fext.put_sshstr(name) - if len(value) > 0: - fextval = _FragList() - fextval.put_sshstr(value) - fext.put_sshstr(fextval.tobytes()) - else: - fext.put_sshstr(value) - f.put_sshstr(fext.tobytes()) - f.put_sshstr(b"") # RESERVED FIELD - # encode CA public key - ca_type = _get_ssh_key_type(private_key) - caformat = _lookup_kformat(ca_type) - caf = _FragList() - caf.put_sshstr(ca_type) - caformat.encode_public(private_key.public_key(), caf) - f.put_sshstr(caf.tobytes()) - # Sigs according to the rules defined for the CA's public key - # (RFC4253 section 6.6 for ssh-rsa, RFC5656 for ECDSA, - # and RFC8032 for Ed25519). - if isinstance(private_key, ed25519.Ed25519PrivateKey): - signature = private_key.sign(f.tobytes()) - fsig = _FragList() - fsig.put_sshstr(ca_type) - fsig.put_sshstr(signature) - f.put_sshstr(fsig.tobytes()) - elif isinstance(private_key, ec.EllipticCurvePrivateKey): - hash_alg = _get_ec_hash_alg(private_key.curve) - signature = private_key.sign(f.tobytes(), ec.ECDSA(hash_alg)) - r, s = asym_utils.decode_dss_signature(signature) - fsig = _FragList() - fsig.put_sshstr(ca_type) - fsigblob = _FragList() - fsigblob.put_mpint(r) - fsigblob.put_mpint(s) - fsig.put_sshstr(fsigblob.tobytes()) - f.put_sshstr(fsig.tobytes()) - - else: - assert isinstance(private_key, rsa.RSAPrivateKey) - # Just like Golang, we're going to use SHA512 for RSA - # https://cs.opensource.google/go/x/crypto/+/refs/tags/ - # v0.4.0:ssh/certs.go;l=445 - # RFC 8332 defines SHA256 and 512 as options - fsig = _FragList() - fsig.put_sshstr(_SSH_RSA_SHA512) - signature = private_key.sign( - f.tobytes(), padding.PKCS1v15(), hashes.SHA512() - ) - fsig.put_sshstr(signature) - f.put_sshstr(fsig.tobytes()) - - cert_data = binascii.b2a_base64(f.tobytes()).strip() - # load_ssh_public_identity returns a union, but this is - # guaranteed to be an SSHCertificate, so we cast to make - # mypy happy. - return typing.cast( - SSHCertificate, - load_ssh_public_identity(b"".join([cert_prefix, b" ", cert_data])), - ) diff --git a/env/lib/python3.10/site-packages/cryptography/hazmat/primitives/twofactor/__init__.py b/env/lib/python3.10/site-packages/cryptography/hazmat/primitives/twofactor/__init__.py deleted file mode 100644 index c1af423..0000000 --- a/env/lib/python3.10/site-packages/cryptography/hazmat/primitives/twofactor/__init__.py +++ /dev/null @@ -1,9 +0,0 @@ -# This file is dual licensed under the terms of the Apache License, Version -# 2.0, and the BSD License. See the LICENSE file in the root of this repository -# for complete details. - -from __future__ import annotations - - -class InvalidToken(Exception): - pass diff --git a/env/lib/python3.10/site-packages/cryptography/hazmat/primitives/twofactor/__pycache__/__init__.cpython-310.pyc b/env/lib/python3.10/site-packages/cryptography/hazmat/primitives/twofactor/__pycache__/__init__.cpython-310.pyc deleted file mode 100644 index 2f32b6d..0000000 Binary files a/env/lib/python3.10/site-packages/cryptography/hazmat/primitives/twofactor/__pycache__/__init__.cpython-310.pyc and /dev/null differ diff --git a/env/lib/python3.10/site-packages/cryptography/hazmat/primitives/twofactor/__pycache__/hotp.cpython-310.pyc b/env/lib/python3.10/site-packages/cryptography/hazmat/primitives/twofactor/__pycache__/hotp.cpython-310.pyc deleted file mode 100644 index 127d716..0000000 Binary files a/env/lib/python3.10/site-packages/cryptography/hazmat/primitives/twofactor/__pycache__/hotp.cpython-310.pyc and /dev/null differ diff --git a/env/lib/python3.10/site-packages/cryptography/hazmat/primitives/twofactor/__pycache__/totp.cpython-310.pyc b/env/lib/python3.10/site-packages/cryptography/hazmat/primitives/twofactor/__pycache__/totp.cpython-310.pyc deleted file mode 100644 index 5c00f5c..0000000 Binary files a/env/lib/python3.10/site-packages/cryptography/hazmat/primitives/twofactor/__pycache__/totp.cpython-310.pyc and /dev/null differ diff --git a/env/lib/python3.10/site-packages/cryptography/hazmat/primitives/twofactor/hotp.py b/env/lib/python3.10/site-packages/cryptography/hazmat/primitives/twofactor/hotp.py deleted file mode 100644 index af5ab6e..0000000 --- a/env/lib/python3.10/site-packages/cryptography/hazmat/primitives/twofactor/hotp.py +++ /dev/null @@ -1,92 +0,0 @@ -# This file is dual licensed under the terms of the Apache License, Version -# 2.0, and the BSD License. See the LICENSE file in the root of this repository -# for complete details. - -from __future__ import annotations - -import base64 -import typing -from urllib.parse import quote, urlencode - -from cryptography.hazmat.primitives import constant_time, hmac -from cryptography.hazmat.primitives.hashes import SHA1, SHA256, SHA512 -from cryptography.hazmat.primitives.twofactor import InvalidToken - -HOTPHashTypes = typing.Union[SHA1, SHA256, SHA512] - - -def _generate_uri( - hotp: HOTP, - type_name: str, - account_name: str, - issuer: str | None, - extra_parameters: list[tuple[str, int]], -) -> str: - parameters = [ - ("digits", hotp._length), - ("secret", base64.b32encode(hotp._key)), - ("algorithm", hotp._algorithm.name.upper()), - ] - - if issuer is not None: - parameters.append(("issuer", issuer)) - - parameters.extend(extra_parameters) - - label = ( - f"{quote(issuer)}:{quote(account_name)}" - if issuer - else quote(account_name) - ) - return f"otpauth://{type_name}/{label}?{urlencode(parameters)}" - - -class HOTP: - def __init__( - self, - key: bytes, - length: int, - algorithm: HOTPHashTypes, - backend: typing.Any = None, - enforce_key_length: bool = True, - ) -> None: - if len(key) < 16 and enforce_key_length is True: - raise ValueError("Key length has to be at least 128 bits.") - - if not isinstance(length, int): - raise TypeError("Length parameter must be an integer type.") - - if length < 6 or length > 8: - raise ValueError("Length of HOTP has to be between 6 and 8.") - - if not isinstance(algorithm, (SHA1, SHA256, SHA512)): - raise TypeError("Algorithm must be SHA1, SHA256 or SHA512.") - - self._key = key - self._length = length - self._algorithm = algorithm - - def generate(self, counter: int) -> bytes: - truncated_value = self._dynamic_truncate(counter) - hotp = truncated_value % (10**self._length) - return "{0:0{1}}".format(hotp, self._length).encode() - - def verify(self, hotp: bytes, counter: int) -> None: - if not constant_time.bytes_eq(self.generate(counter), hotp): - raise InvalidToken("Supplied HOTP value does not match.") - - def _dynamic_truncate(self, counter: int) -> int: - ctx = hmac.HMAC(self._key, self._algorithm) - ctx.update(counter.to_bytes(length=8, byteorder="big")) - hmac_value = ctx.finalize() - - offset = hmac_value[len(hmac_value) - 1] & 0b1111 - p = hmac_value[offset : offset + 4] - return int.from_bytes(p, byteorder="big") & 0x7FFFFFFF - - def get_provisioning_uri( - self, account_name: str, counter: int, issuer: str | None - ) -> str: - return _generate_uri( - self, "hotp", account_name, issuer, [("counter", int(counter))] - ) diff --git a/env/lib/python3.10/site-packages/cryptography/hazmat/primitives/twofactor/totp.py b/env/lib/python3.10/site-packages/cryptography/hazmat/primitives/twofactor/totp.py deleted file mode 100644 index 68a5077..0000000 --- a/env/lib/python3.10/site-packages/cryptography/hazmat/primitives/twofactor/totp.py +++ /dev/null @@ -1,50 +0,0 @@ -# This file is dual licensed under the terms of the Apache License, Version -# 2.0, and the BSD License. See the LICENSE file in the root of this repository -# for complete details. - -from __future__ import annotations - -import typing - -from cryptography.hazmat.primitives import constant_time -from cryptography.hazmat.primitives.twofactor import InvalidToken -from cryptography.hazmat.primitives.twofactor.hotp import ( - HOTP, - HOTPHashTypes, - _generate_uri, -) - - -class TOTP: - def __init__( - self, - key: bytes, - length: int, - algorithm: HOTPHashTypes, - time_step: int, - backend: typing.Any = None, - enforce_key_length: bool = True, - ): - self._time_step = time_step - self._hotp = HOTP( - key, length, algorithm, enforce_key_length=enforce_key_length - ) - - def generate(self, time: int | float) -> bytes: - counter = int(time / self._time_step) - return self._hotp.generate(counter) - - def verify(self, totp: bytes, time: int) -> None: - if not constant_time.bytes_eq(self.generate(time), totp): - raise InvalidToken("Supplied TOTP value does not match.") - - def get_provisioning_uri( - self, account_name: str, issuer: str | None - ) -> str: - return _generate_uri( - self._hotp, - "totp", - account_name, - issuer, - [("period", int(self._time_step))], - ) diff --git a/env/lib/python3.10/site-packages/cryptography/py.typed b/env/lib/python3.10/site-packages/cryptography/py.typed deleted file mode 100644 index e69de29..0000000 diff --git a/env/lib/python3.10/site-packages/cryptography/utils.py b/env/lib/python3.10/site-packages/cryptography/utils.py deleted file mode 100644 index 706d0ae..0000000 --- a/env/lib/python3.10/site-packages/cryptography/utils.py +++ /dev/null @@ -1,127 +0,0 @@ -# This file is dual licensed under the terms of the Apache License, Version -# 2.0, and the BSD License. See the LICENSE file in the root of this repository -# for complete details. - -from __future__ import annotations - -import enum -import sys -import types -import typing -import warnings - - -# We use a UserWarning subclass, instead of DeprecationWarning, because CPython -# decided deprecation warnings should be invisible by default. -class CryptographyDeprecationWarning(UserWarning): - pass - - -# Several APIs were deprecated with no specific end-of-life date because of the -# ubiquity of their use. They should not be removed until we agree on when that -# cycle ends. -DeprecatedIn36 = CryptographyDeprecationWarning -DeprecatedIn37 = CryptographyDeprecationWarning -DeprecatedIn40 = CryptographyDeprecationWarning -DeprecatedIn41 = CryptographyDeprecationWarning -DeprecatedIn42 = CryptographyDeprecationWarning -DeprecatedIn43 = CryptographyDeprecationWarning - - -def _check_bytes(name: str, value: bytes) -> None: - if not isinstance(value, bytes): - raise TypeError(f"{name} must be bytes") - - -def _check_byteslike(name: str, value: bytes) -> None: - try: - memoryview(value) - except TypeError: - raise TypeError(f"{name} must be bytes-like") - - -def int_to_bytes(integer: int, length: int | None = None) -> bytes: - if length == 0: - raise ValueError("length argument can't be 0") - return integer.to_bytes( - length or (integer.bit_length() + 7) // 8 or 1, "big" - ) - - -class InterfaceNotImplemented(Exception): - pass - - -class _DeprecatedValue: - def __init__(self, value: object, message: str, warning_class): - self.value = value - self.message = message - self.warning_class = warning_class - - -class _ModuleWithDeprecations(types.ModuleType): - def __init__(self, module: types.ModuleType): - super().__init__(module.__name__) - self.__dict__["_module"] = module - - def __getattr__(self, attr: str) -> object: - obj = getattr(self._module, attr) - if isinstance(obj, _DeprecatedValue): - warnings.warn(obj.message, obj.warning_class, stacklevel=2) - obj = obj.value - return obj - - def __setattr__(self, attr: str, value: object) -> None: - setattr(self._module, attr, value) - - def __delattr__(self, attr: str) -> None: - obj = getattr(self._module, attr) - if isinstance(obj, _DeprecatedValue): - warnings.warn(obj.message, obj.warning_class, stacklevel=2) - - delattr(self._module, attr) - - def __dir__(self) -> typing.Sequence[str]: - return ["_module", *dir(self._module)] - - -def deprecated( - value: object, - module_name: str, - message: str, - warning_class: type[Warning], - name: str | None = None, -) -> _DeprecatedValue: - module = sys.modules[module_name] - if not isinstance(module, _ModuleWithDeprecations): - sys.modules[module_name] = module = _ModuleWithDeprecations(module) - dv = _DeprecatedValue(value, message, warning_class) - # Maintain backwards compatibility with `name is None` for pyOpenSSL. - if name is not None: - setattr(module, name, dv) - return dv - - -def cached_property(func: typing.Callable) -> property: - cached_name = f"_cached_{func}" - sentinel = object() - - def inner(instance: object): - cache = getattr(instance, cached_name, sentinel) - if cache is not sentinel: - return cache - result = func(instance) - setattr(instance, cached_name, result) - return result - - return property(inner) - - -# Python 3.10 changed representation of enums. We use well-defined object -# representation and string representation from Python 3.9. -class Enum(enum.Enum): - def __repr__(self) -> str: - return f"<{self.__class__.__name__}.{self._name_}: {self._value_!r}>" - - def __str__(self) -> str: - return f"{self.__class__.__name__}.{self._name_}" diff --git a/env/lib/python3.10/site-packages/cryptography/x509/__init__.py b/env/lib/python3.10/site-packages/cryptography/x509/__init__.py deleted file mode 100644 index 26c6444..0000000 --- a/env/lib/python3.10/site-packages/cryptography/x509/__init__.py +++ /dev/null @@ -1,259 +0,0 @@ -# This file is dual licensed under the terms of the Apache License, Version -# 2.0, and the BSD License. See the LICENSE file in the root of this repository -# for complete details. - -from __future__ import annotations - -from cryptography.x509 import certificate_transparency, verification -from cryptography.x509.base import ( - Attribute, - AttributeNotFound, - Attributes, - Certificate, - CertificateBuilder, - CertificateRevocationList, - CertificateRevocationListBuilder, - CertificateSigningRequest, - CertificateSigningRequestBuilder, - InvalidVersion, - RevokedCertificate, - RevokedCertificateBuilder, - Version, - load_der_x509_certificate, - load_der_x509_crl, - load_der_x509_csr, - load_pem_x509_certificate, - load_pem_x509_certificates, - load_pem_x509_crl, - load_pem_x509_csr, - random_serial_number, -) -from cryptography.x509.extensions import ( - AccessDescription, - AuthorityInformationAccess, - AuthorityKeyIdentifier, - BasicConstraints, - CertificateIssuer, - CertificatePolicies, - CRLDistributionPoints, - CRLNumber, - CRLReason, - DeltaCRLIndicator, - DistributionPoint, - DuplicateExtension, - ExtendedKeyUsage, - Extension, - ExtensionNotFound, - Extensions, - ExtensionType, - FreshestCRL, - GeneralNames, - InhibitAnyPolicy, - InvalidityDate, - IssuerAlternativeName, - IssuingDistributionPoint, - KeyUsage, - MSCertificateTemplate, - NameConstraints, - NoticeReference, - OCSPAcceptableResponses, - OCSPNoCheck, - OCSPNonce, - PolicyConstraints, - PolicyInformation, - PrecertificateSignedCertificateTimestamps, - PrecertPoison, - ReasonFlags, - SignedCertificateTimestamps, - SubjectAlternativeName, - SubjectInformationAccess, - SubjectKeyIdentifier, - TLSFeature, - TLSFeatureType, - UnrecognizedExtension, - UserNotice, -) -from cryptography.x509.general_name import ( - DirectoryName, - DNSName, - GeneralName, - IPAddress, - OtherName, - RegisteredID, - RFC822Name, - UniformResourceIdentifier, - UnsupportedGeneralNameType, -) -from cryptography.x509.name import ( - Name, - NameAttribute, - RelativeDistinguishedName, -) -from cryptography.x509.oid import ( - AuthorityInformationAccessOID, - CertificatePoliciesOID, - CRLEntryExtensionOID, - ExtendedKeyUsageOID, - ExtensionOID, - NameOID, - ObjectIdentifier, - PublicKeyAlgorithmOID, - SignatureAlgorithmOID, -) - -OID_AUTHORITY_INFORMATION_ACCESS = ExtensionOID.AUTHORITY_INFORMATION_ACCESS -OID_AUTHORITY_KEY_IDENTIFIER = ExtensionOID.AUTHORITY_KEY_IDENTIFIER -OID_BASIC_CONSTRAINTS = ExtensionOID.BASIC_CONSTRAINTS -OID_CERTIFICATE_POLICIES = ExtensionOID.CERTIFICATE_POLICIES -OID_CRL_DISTRIBUTION_POINTS = ExtensionOID.CRL_DISTRIBUTION_POINTS -OID_EXTENDED_KEY_USAGE = ExtensionOID.EXTENDED_KEY_USAGE -OID_FRESHEST_CRL = ExtensionOID.FRESHEST_CRL -OID_INHIBIT_ANY_POLICY = ExtensionOID.INHIBIT_ANY_POLICY -OID_ISSUER_ALTERNATIVE_NAME = ExtensionOID.ISSUER_ALTERNATIVE_NAME -OID_KEY_USAGE = ExtensionOID.KEY_USAGE -OID_NAME_CONSTRAINTS = ExtensionOID.NAME_CONSTRAINTS -OID_OCSP_NO_CHECK = ExtensionOID.OCSP_NO_CHECK -OID_POLICY_CONSTRAINTS = ExtensionOID.POLICY_CONSTRAINTS -OID_POLICY_MAPPINGS = ExtensionOID.POLICY_MAPPINGS -OID_SUBJECT_ALTERNATIVE_NAME = ExtensionOID.SUBJECT_ALTERNATIVE_NAME -OID_SUBJECT_DIRECTORY_ATTRIBUTES = ExtensionOID.SUBJECT_DIRECTORY_ATTRIBUTES -OID_SUBJECT_INFORMATION_ACCESS = ExtensionOID.SUBJECT_INFORMATION_ACCESS -OID_SUBJECT_KEY_IDENTIFIER = ExtensionOID.SUBJECT_KEY_IDENTIFIER - -OID_DSA_WITH_SHA1 = SignatureAlgorithmOID.DSA_WITH_SHA1 -OID_DSA_WITH_SHA224 = SignatureAlgorithmOID.DSA_WITH_SHA224 -OID_DSA_WITH_SHA256 = SignatureAlgorithmOID.DSA_WITH_SHA256 -OID_ECDSA_WITH_SHA1 = SignatureAlgorithmOID.ECDSA_WITH_SHA1 -OID_ECDSA_WITH_SHA224 = SignatureAlgorithmOID.ECDSA_WITH_SHA224 -OID_ECDSA_WITH_SHA256 = SignatureAlgorithmOID.ECDSA_WITH_SHA256 -OID_ECDSA_WITH_SHA384 = SignatureAlgorithmOID.ECDSA_WITH_SHA384 -OID_ECDSA_WITH_SHA512 = SignatureAlgorithmOID.ECDSA_WITH_SHA512 -OID_RSA_WITH_MD5 = SignatureAlgorithmOID.RSA_WITH_MD5 -OID_RSA_WITH_SHA1 = SignatureAlgorithmOID.RSA_WITH_SHA1 -OID_RSA_WITH_SHA224 = SignatureAlgorithmOID.RSA_WITH_SHA224 -OID_RSA_WITH_SHA256 = SignatureAlgorithmOID.RSA_WITH_SHA256 -OID_RSA_WITH_SHA384 = SignatureAlgorithmOID.RSA_WITH_SHA384 -OID_RSA_WITH_SHA512 = SignatureAlgorithmOID.RSA_WITH_SHA512 -OID_RSASSA_PSS = SignatureAlgorithmOID.RSASSA_PSS - -OID_COMMON_NAME = NameOID.COMMON_NAME -OID_COUNTRY_NAME = NameOID.COUNTRY_NAME -OID_DOMAIN_COMPONENT = NameOID.DOMAIN_COMPONENT -OID_DN_QUALIFIER = NameOID.DN_QUALIFIER -OID_EMAIL_ADDRESS = NameOID.EMAIL_ADDRESS -OID_GENERATION_QUALIFIER = NameOID.GENERATION_QUALIFIER -OID_GIVEN_NAME = NameOID.GIVEN_NAME -OID_LOCALITY_NAME = NameOID.LOCALITY_NAME -OID_ORGANIZATIONAL_UNIT_NAME = NameOID.ORGANIZATIONAL_UNIT_NAME -OID_ORGANIZATION_NAME = NameOID.ORGANIZATION_NAME -OID_PSEUDONYM = NameOID.PSEUDONYM -OID_SERIAL_NUMBER = NameOID.SERIAL_NUMBER -OID_STATE_OR_PROVINCE_NAME = NameOID.STATE_OR_PROVINCE_NAME -OID_SURNAME = NameOID.SURNAME -OID_TITLE = NameOID.TITLE - -OID_CLIENT_AUTH = ExtendedKeyUsageOID.CLIENT_AUTH -OID_CODE_SIGNING = ExtendedKeyUsageOID.CODE_SIGNING -OID_EMAIL_PROTECTION = ExtendedKeyUsageOID.EMAIL_PROTECTION -OID_OCSP_SIGNING = ExtendedKeyUsageOID.OCSP_SIGNING -OID_SERVER_AUTH = ExtendedKeyUsageOID.SERVER_AUTH -OID_TIME_STAMPING = ExtendedKeyUsageOID.TIME_STAMPING - -OID_ANY_POLICY = CertificatePoliciesOID.ANY_POLICY -OID_CPS_QUALIFIER = CertificatePoliciesOID.CPS_QUALIFIER -OID_CPS_USER_NOTICE = CertificatePoliciesOID.CPS_USER_NOTICE - -OID_CERTIFICATE_ISSUER = CRLEntryExtensionOID.CERTIFICATE_ISSUER -OID_CRL_REASON = CRLEntryExtensionOID.CRL_REASON -OID_INVALIDITY_DATE = CRLEntryExtensionOID.INVALIDITY_DATE - -OID_CA_ISSUERS = AuthorityInformationAccessOID.CA_ISSUERS -OID_OCSP = AuthorityInformationAccessOID.OCSP - -__all__ = [ - "OID_CA_ISSUERS", - "OID_OCSP", - "AccessDescription", - "Attribute", - "AttributeNotFound", - "Attributes", - "AuthorityInformationAccess", - "AuthorityKeyIdentifier", - "BasicConstraints", - "CRLDistributionPoints", - "CRLNumber", - "CRLReason", - "Certificate", - "CertificateBuilder", - "CertificateIssuer", - "CertificatePolicies", - "CertificateRevocationList", - "CertificateRevocationListBuilder", - "CertificateSigningRequest", - "CertificateSigningRequestBuilder", - "DNSName", - "DeltaCRLIndicator", - "DirectoryName", - "DistributionPoint", - "DuplicateExtension", - "ExtendedKeyUsage", - "Extension", - "ExtensionNotFound", - "ExtensionType", - "Extensions", - "FreshestCRL", - "GeneralName", - "GeneralNames", - "IPAddress", - "InhibitAnyPolicy", - "InvalidVersion", - "InvalidityDate", - "IssuerAlternativeName", - "IssuingDistributionPoint", - "KeyUsage", - "MSCertificateTemplate", - "Name", - "NameAttribute", - "NameConstraints", - "NameOID", - "NoticeReference", - "OCSPAcceptableResponses", - "OCSPNoCheck", - "OCSPNonce", - "ObjectIdentifier", - "OtherName", - "PolicyConstraints", - "PolicyInformation", - "PrecertPoison", - "PrecertificateSignedCertificateTimestamps", - "PublicKeyAlgorithmOID", - "RFC822Name", - "ReasonFlags", - "RegisteredID", - "RelativeDistinguishedName", - "RevokedCertificate", - "RevokedCertificateBuilder", - "SignatureAlgorithmOID", - "SignedCertificateTimestamps", - "SubjectAlternativeName", - "SubjectInformationAccess", - "SubjectKeyIdentifier", - "TLSFeature", - "TLSFeatureType", - "UniformResourceIdentifier", - "UnrecognizedExtension", - "UnsupportedGeneralNameType", - "UserNotice", - "Version", - "certificate_transparency", - "load_der_x509_certificate", - "load_der_x509_crl", - "load_der_x509_csr", - "load_pem_x509_certificate", - "load_pem_x509_certificates", - "load_pem_x509_crl", - "load_pem_x509_csr", - "random_serial_number", - "verification", - "verification", -] diff --git a/env/lib/python3.10/site-packages/cryptography/x509/__pycache__/__init__.cpython-310.pyc b/env/lib/python3.10/site-packages/cryptography/x509/__pycache__/__init__.cpython-310.pyc deleted file mode 100644 index 271a28c..0000000 Binary files a/env/lib/python3.10/site-packages/cryptography/x509/__pycache__/__init__.cpython-310.pyc and /dev/null differ diff --git a/env/lib/python3.10/site-packages/cryptography/x509/__pycache__/base.cpython-310.pyc b/env/lib/python3.10/site-packages/cryptography/x509/__pycache__/base.cpython-310.pyc deleted file mode 100644 index cb87036..0000000 Binary files a/env/lib/python3.10/site-packages/cryptography/x509/__pycache__/base.cpython-310.pyc and /dev/null differ diff --git a/env/lib/python3.10/site-packages/cryptography/x509/__pycache__/certificate_transparency.cpython-310.pyc b/env/lib/python3.10/site-packages/cryptography/x509/__pycache__/certificate_transparency.cpython-310.pyc deleted file mode 100644 index e5500e6..0000000 Binary files a/env/lib/python3.10/site-packages/cryptography/x509/__pycache__/certificate_transparency.cpython-310.pyc and /dev/null differ diff --git a/env/lib/python3.10/site-packages/cryptography/x509/__pycache__/extensions.cpython-310.pyc b/env/lib/python3.10/site-packages/cryptography/x509/__pycache__/extensions.cpython-310.pyc deleted file mode 100644 index 94b8f4d..0000000 Binary files a/env/lib/python3.10/site-packages/cryptography/x509/__pycache__/extensions.cpython-310.pyc and /dev/null differ diff --git a/env/lib/python3.10/site-packages/cryptography/x509/__pycache__/general_name.cpython-310.pyc b/env/lib/python3.10/site-packages/cryptography/x509/__pycache__/general_name.cpython-310.pyc deleted file mode 100644 index b6b45b9..0000000 Binary files a/env/lib/python3.10/site-packages/cryptography/x509/__pycache__/general_name.cpython-310.pyc and /dev/null differ diff --git a/env/lib/python3.10/site-packages/cryptography/x509/__pycache__/name.cpython-310.pyc b/env/lib/python3.10/site-packages/cryptography/x509/__pycache__/name.cpython-310.pyc deleted file mode 100644 index 14108ac..0000000 Binary files a/env/lib/python3.10/site-packages/cryptography/x509/__pycache__/name.cpython-310.pyc and /dev/null differ diff --git a/env/lib/python3.10/site-packages/cryptography/x509/__pycache__/ocsp.cpython-310.pyc b/env/lib/python3.10/site-packages/cryptography/x509/__pycache__/ocsp.cpython-310.pyc deleted file mode 100644 index bd97890..0000000 Binary files a/env/lib/python3.10/site-packages/cryptography/x509/__pycache__/ocsp.cpython-310.pyc and /dev/null differ diff --git a/env/lib/python3.10/site-packages/cryptography/x509/__pycache__/oid.cpython-310.pyc b/env/lib/python3.10/site-packages/cryptography/x509/__pycache__/oid.cpython-310.pyc deleted file mode 100644 index e0e94a9..0000000 Binary files a/env/lib/python3.10/site-packages/cryptography/x509/__pycache__/oid.cpython-310.pyc and /dev/null differ diff --git a/env/lib/python3.10/site-packages/cryptography/x509/__pycache__/verification.cpython-310.pyc b/env/lib/python3.10/site-packages/cryptography/x509/__pycache__/verification.cpython-310.pyc deleted file mode 100644 index 1fc9faf..0000000 Binary files a/env/lib/python3.10/site-packages/cryptography/x509/__pycache__/verification.cpython-310.pyc and /dev/null differ diff --git a/env/lib/python3.10/site-packages/cryptography/x509/base.py b/env/lib/python3.10/site-packages/cryptography/x509/base.py deleted file mode 100644 index 6ed41e6..0000000 --- a/env/lib/python3.10/site-packages/cryptography/x509/base.py +++ /dev/null @@ -1,1226 +0,0 @@ -# This file is dual licensed under the terms of the Apache License, Version -# 2.0, and the BSD License. See the LICENSE file in the root of this repository -# for complete details. - -from __future__ import annotations - -import abc -import datetime -import os -import typing -import warnings - -from cryptography import utils -from cryptography.hazmat.bindings._rust import x509 as rust_x509 -from cryptography.hazmat.primitives import hashes, serialization -from cryptography.hazmat.primitives.asymmetric import ( - dsa, - ec, - ed448, - ed25519, - padding, - rsa, - x448, - x25519, -) -from cryptography.hazmat.primitives.asymmetric.types import ( - CertificateIssuerPrivateKeyTypes, - CertificateIssuerPublicKeyTypes, - CertificatePublicKeyTypes, -) -from cryptography.x509.extensions import ( - Extension, - Extensions, - ExtensionType, - _make_sequence_methods, -) -from cryptography.x509.name import Name, _ASN1Type -from cryptography.x509.oid import ObjectIdentifier - -_EARLIEST_UTC_TIME = datetime.datetime(1950, 1, 1) - -# This must be kept in sync with sign.rs's list of allowable types in -# identify_hash_type -_AllowedHashTypes = typing.Union[ - hashes.SHA224, - hashes.SHA256, - hashes.SHA384, - hashes.SHA512, - hashes.SHA3_224, - hashes.SHA3_256, - hashes.SHA3_384, - hashes.SHA3_512, -] - - -class AttributeNotFound(Exception): - def __init__(self, msg: str, oid: ObjectIdentifier) -> None: - super().__init__(msg) - self.oid = oid - - -def _reject_duplicate_extension( - extension: Extension[ExtensionType], - extensions: list[Extension[ExtensionType]], -) -> None: - # This is quadratic in the number of extensions - for e in extensions: - if e.oid == extension.oid: - raise ValueError("This extension has already been set.") - - -def _reject_duplicate_attribute( - oid: ObjectIdentifier, - attributes: list[tuple[ObjectIdentifier, bytes, int | None]], -) -> None: - # This is quadratic in the number of attributes - for attr_oid, _, _ in attributes: - if attr_oid == oid: - raise ValueError("This attribute has already been set.") - - -def _convert_to_naive_utc_time(time: datetime.datetime) -> datetime.datetime: - """Normalizes a datetime to a naive datetime in UTC. - - time -- datetime to normalize. Assumed to be in UTC if not timezone - aware. - """ - if time.tzinfo is not None: - offset = time.utcoffset() - offset = offset if offset else datetime.timedelta() - return time.replace(tzinfo=None) - offset - else: - return time - - -class Attribute: - def __init__( - self, - oid: ObjectIdentifier, - value: bytes, - _type: int = _ASN1Type.UTF8String.value, - ) -> None: - self._oid = oid - self._value = value - self._type = _type - - @property - def oid(self) -> ObjectIdentifier: - return self._oid - - @property - def value(self) -> bytes: - return self._value - - def __repr__(self) -> str: - return f"" - - def __eq__(self, other: object) -> bool: - if not isinstance(other, Attribute): - return NotImplemented - - return ( - self.oid == other.oid - and self.value == other.value - and self._type == other._type - ) - - def __hash__(self) -> int: - return hash((self.oid, self.value, self._type)) - - -class Attributes: - def __init__( - self, - attributes: typing.Iterable[Attribute], - ) -> None: - self._attributes = list(attributes) - - __len__, __iter__, __getitem__ = _make_sequence_methods("_attributes") - - def __repr__(self) -> str: - return f"" - - def get_attribute_for_oid(self, oid: ObjectIdentifier) -> Attribute: - for attr in self: - if attr.oid == oid: - return attr - - raise AttributeNotFound(f"No {oid} attribute was found", oid) - - -class Version(utils.Enum): - v1 = 0 - v3 = 2 - - -class InvalidVersion(Exception): - def __init__(self, msg: str, parsed_version: int) -> None: - super().__init__(msg) - self.parsed_version = parsed_version - - -class Certificate(metaclass=abc.ABCMeta): - @abc.abstractmethod - def fingerprint(self, algorithm: hashes.HashAlgorithm) -> bytes: - """ - Returns bytes using digest passed. - """ - - @property - @abc.abstractmethod - def serial_number(self) -> int: - """ - Returns certificate serial number - """ - - @property - @abc.abstractmethod - def version(self) -> Version: - """ - Returns the certificate version - """ - - @abc.abstractmethod - def public_key(self) -> CertificatePublicKeyTypes: - """ - Returns the public key - """ - - @property - @abc.abstractmethod - def public_key_algorithm_oid(self) -> ObjectIdentifier: - """ - Returns the ObjectIdentifier of the public key. - """ - - @property - @abc.abstractmethod - def not_valid_before(self) -> datetime.datetime: - """ - Not before time (represented as UTC datetime) - """ - - @property - @abc.abstractmethod - def not_valid_before_utc(self) -> datetime.datetime: - """ - Not before time (represented as a non-naive UTC datetime) - """ - - @property - @abc.abstractmethod - def not_valid_after(self) -> datetime.datetime: - """ - Not after time (represented as UTC datetime) - """ - - @property - @abc.abstractmethod - def not_valid_after_utc(self) -> datetime.datetime: - """ - Not after time (represented as a non-naive UTC datetime) - """ - - @property - @abc.abstractmethod - def issuer(self) -> Name: - """ - Returns the issuer name object. - """ - - @property - @abc.abstractmethod - def subject(self) -> Name: - """ - Returns the subject name object. - """ - - @property - @abc.abstractmethod - def signature_hash_algorithm( - self, - ) -> hashes.HashAlgorithm | None: - """ - Returns a HashAlgorithm corresponding to the type of the digest signed - in the certificate. - """ - - @property - @abc.abstractmethod - def signature_algorithm_oid(self) -> ObjectIdentifier: - """ - Returns the ObjectIdentifier of the signature algorithm. - """ - - @property - @abc.abstractmethod - def signature_algorithm_parameters( - self, - ) -> None | padding.PSS | padding.PKCS1v15 | ec.ECDSA: - """ - Returns the signature algorithm parameters. - """ - - @property - @abc.abstractmethod - def extensions(self) -> Extensions: - """ - Returns an Extensions object. - """ - - @property - @abc.abstractmethod - def signature(self) -> bytes: - """ - Returns the signature bytes. - """ - - @property - @abc.abstractmethod - def tbs_certificate_bytes(self) -> bytes: - """ - Returns the tbsCertificate payload bytes as defined in RFC 5280. - """ - - @property - @abc.abstractmethod - def tbs_precertificate_bytes(self) -> bytes: - """ - Returns the tbsCertificate payload bytes with the SCT list extension - stripped. - """ - - @abc.abstractmethod - def __eq__(self, other: object) -> bool: - """ - Checks equality. - """ - - @abc.abstractmethod - def __hash__(self) -> int: - """ - Computes a hash. - """ - - @abc.abstractmethod - def public_bytes(self, encoding: serialization.Encoding) -> bytes: - """ - Serializes the certificate to PEM or DER format. - """ - - @abc.abstractmethod - def verify_directly_issued_by(self, issuer: Certificate) -> None: - """ - This method verifies that certificate issuer name matches the - issuer subject name and that the certificate is signed by the - issuer's private key. No other validation is performed. - """ - - -# Runtime isinstance checks need this since the rust class is not a subclass. -Certificate.register(rust_x509.Certificate) - - -class RevokedCertificate(metaclass=abc.ABCMeta): - @property - @abc.abstractmethod - def serial_number(self) -> int: - """ - Returns the serial number of the revoked certificate. - """ - - @property - @abc.abstractmethod - def revocation_date(self) -> datetime.datetime: - """ - Returns the date of when this certificate was revoked. - """ - - @property - @abc.abstractmethod - def revocation_date_utc(self) -> datetime.datetime: - """ - Returns the date of when this certificate was revoked as a non-naive - UTC datetime. - """ - - @property - @abc.abstractmethod - def extensions(self) -> Extensions: - """ - Returns an Extensions object containing a list of Revoked extensions. - """ - - -# Runtime isinstance checks need this since the rust class is not a subclass. -RevokedCertificate.register(rust_x509.RevokedCertificate) - - -class _RawRevokedCertificate(RevokedCertificate): - def __init__( - self, - serial_number: int, - revocation_date: datetime.datetime, - extensions: Extensions, - ): - self._serial_number = serial_number - self._revocation_date = revocation_date - self._extensions = extensions - - @property - def serial_number(self) -> int: - return self._serial_number - - @property - def revocation_date(self) -> datetime.datetime: - warnings.warn( - "Properties that return a naïve datetime object have been " - "deprecated. Please switch to revocation_date_utc.", - utils.DeprecatedIn42, - stacklevel=2, - ) - return self._revocation_date - - @property - def revocation_date_utc(self) -> datetime.datetime: - return self._revocation_date.replace(tzinfo=datetime.timezone.utc) - - @property - def extensions(self) -> Extensions: - return self._extensions - - -class CertificateRevocationList(metaclass=abc.ABCMeta): - @abc.abstractmethod - def public_bytes(self, encoding: serialization.Encoding) -> bytes: - """ - Serializes the CRL to PEM or DER format. - """ - - @abc.abstractmethod - def fingerprint(self, algorithm: hashes.HashAlgorithm) -> bytes: - """ - Returns bytes using digest passed. - """ - - @abc.abstractmethod - def get_revoked_certificate_by_serial_number( - self, serial_number: int - ) -> RevokedCertificate | None: - """ - Returns an instance of RevokedCertificate or None if the serial_number - is not in the CRL. - """ - - @property - @abc.abstractmethod - def signature_hash_algorithm( - self, - ) -> hashes.HashAlgorithm | None: - """ - Returns a HashAlgorithm corresponding to the type of the digest signed - in the certificate. - """ - - @property - @abc.abstractmethod - def signature_algorithm_oid(self) -> ObjectIdentifier: - """ - Returns the ObjectIdentifier of the signature algorithm. - """ - - @property - @abc.abstractmethod - def signature_algorithm_parameters( - self, - ) -> None | padding.PSS | padding.PKCS1v15 | ec.ECDSA: - """ - Returns the signature algorithm parameters. - """ - - @property - @abc.abstractmethod - def issuer(self) -> Name: - """ - Returns the X509Name with the issuer of this CRL. - """ - - @property - @abc.abstractmethod - def next_update(self) -> datetime.datetime | None: - """ - Returns the date of next update for this CRL. - """ - - @property - @abc.abstractmethod - def next_update_utc(self) -> datetime.datetime | None: - """ - Returns the date of next update for this CRL as a non-naive UTC - datetime. - """ - - @property - @abc.abstractmethod - def last_update(self) -> datetime.datetime: - """ - Returns the date of last update for this CRL. - """ - - @property - @abc.abstractmethod - def last_update_utc(self) -> datetime.datetime: - """ - Returns the date of last update for this CRL as a non-naive UTC - datetime. - """ - - @property - @abc.abstractmethod - def extensions(self) -> Extensions: - """ - Returns an Extensions object containing a list of CRL extensions. - """ - - @property - @abc.abstractmethod - def signature(self) -> bytes: - """ - Returns the signature bytes. - """ - - @property - @abc.abstractmethod - def tbs_certlist_bytes(self) -> bytes: - """ - Returns the tbsCertList payload bytes as defined in RFC 5280. - """ - - @abc.abstractmethod - def __eq__(self, other: object) -> bool: - """ - Checks equality. - """ - - @abc.abstractmethod - def __len__(self) -> int: - """ - Number of revoked certificates in the CRL. - """ - - @typing.overload - def __getitem__(self, idx: int) -> RevokedCertificate: ... - - @typing.overload - def __getitem__(self, idx: slice) -> list[RevokedCertificate]: ... - - @abc.abstractmethod - def __getitem__( - self, idx: int | slice - ) -> RevokedCertificate | list[RevokedCertificate]: - """ - Returns a revoked certificate (or slice of revoked certificates). - """ - - @abc.abstractmethod - def __iter__(self) -> typing.Iterator[RevokedCertificate]: - """ - Iterator over the revoked certificates - """ - - @abc.abstractmethod - def is_signature_valid( - self, public_key: CertificateIssuerPublicKeyTypes - ) -> bool: - """ - Verifies signature of revocation list against given public key. - """ - - -CertificateRevocationList.register(rust_x509.CertificateRevocationList) - - -class CertificateSigningRequest(metaclass=abc.ABCMeta): - @abc.abstractmethod - def __eq__(self, other: object) -> bool: - """ - Checks equality. - """ - - @abc.abstractmethod - def __hash__(self) -> int: - """ - Computes a hash. - """ - - @abc.abstractmethod - def public_key(self) -> CertificatePublicKeyTypes: - """ - Returns the public key - """ - - @property - @abc.abstractmethod - def subject(self) -> Name: - """ - Returns the subject name object. - """ - - @property - @abc.abstractmethod - def signature_hash_algorithm( - self, - ) -> hashes.HashAlgorithm | None: - """ - Returns a HashAlgorithm corresponding to the type of the digest signed - in the certificate. - """ - - @property - @abc.abstractmethod - def signature_algorithm_oid(self) -> ObjectIdentifier: - """ - Returns the ObjectIdentifier of the signature algorithm. - """ - - @property - @abc.abstractmethod - def signature_algorithm_parameters( - self, - ) -> None | padding.PSS | padding.PKCS1v15 | ec.ECDSA: - """ - Returns the signature algorithm parameters. - """ - - @property - @abc.abstractmethod - def extensions(self) -> Extensions: - """ - Returns the extensions in the signing request. - """ - - @property - @abc.abstractmethod - def attributes(self) -> Attributes: - """ - Returns an Attributes object. - """ - - @abc.abstractmethod - def public_bytes(self, encoding: serialization.Encoding) -> bytes: - """ - Encodes the request to PEM or DER format. - """ - - @property - @abc.abstractmethod - def signature(self) -> bytes: - """ - Returns the signature bytes. - """ - - @property - @abc.abstractmethod - def tbs_certrequest_bytes(self) -> bytes: - """ - Returns the PKCS#10 CertificationRequestInfo bytes as defined in RFC - 2986. - """ - - @property - @abc.abstractmethod - def is_signature_valid(self) -> bool: - """ - Verifies signature of signing request. - """ - - @abc.abstractmethod - def get_attribute_for_oid(self, oid: ObjectIdentifier) -> bytes: - """ - Get the attribute value for a given OID. - """ - - -# Runtime isinstance checks need this since the rust class is not a subclass. -CertificateSigningRequest.register(rust_x509.CertificateSigningRequest) - - -load_pem_x509_certificate = rust_x509.load_pem_x509_certificate -load_der_x509_certificate = rust_x509.load_der_x509_certificate - -load_pem_x509_certificates = rust_x509.load_pem_x509_certificates - -load_pem_x509_csr = rust_x509.load_pem_x509_csr -load_der_x509_csr = rust_x509.load_der_x509_csr - -load_pem_x509_crl = rust_x509.load_pem_x509_crl -load_der_x509_crl = rust_x509.load_der_x509_crl - - -class CertificateSigningRequestBuilder: - def __init__( - self, - subject_name: Name | None = None, - extensions: list[Extension[ExtensionType]] = [], - attributes: list[tuple[ObjectIdentifier, bytes, int | None]] = [], - ): - """ - Creates an empty X.509 certificate request (v1). - """ - self._subject_name = subject_name - self._extensions = extensions - self._attributes = attributes - - def subject_name(self, name: Name) -> CertificateSigningRequestBuilder: - """ - Sets the certificate requestor's distinguished name. - """ - if not isinstance(name, Name): - raise TypeError("Expecting x509.Name object.") - if self._subject_name is not None: - raise ValueError("The subject name may only be set once.") - return CertificateSigningRequestBuilder( - name, self._extensions, self._attributes - ) - - def add_extension( - self, extval: ExtensionType, critical: bool - ) -> CertificateSigningRequestBuilder: - """ - Adds an X.509 extension to the certificate request. - """ - if not isinstance(extval, ExtensionType): - raise TypeError("extension must be an ExtensionType") - - extension = Extension(extval.oid, critical, extval) - _reject_duplicate_extension(extension, self._extensions) - - return CertificateSigningRequestBuilder( - self._subject_name, - [*self._extensions, extension], - self._attributes, - ) - - def add_attribute( - self, - oid: ObjectIdentifier, - value: bytes, - *, - _tag: _ASN1Type | None = None, - ) -> CertificateSigningRequestBuilder: - """ - Adds an X.509 attribute with an OID and associated value. - """ - if not isinstance(oid, ObjectIdentifier): - raise TypeError("oid must be an ObjectIdentifier") - - if not isinstance(value, bytes): - raise TypeError("value must be bytes") - - if _tag is not None and not isinstance(_tag, _ASN1Type): - raise TypeError("tag must be _ASN1Type") - - _reject_duplicate_attribute(oid, self._attributes) - - if _tag is not None: - tag = _tag.value - else: - tag = None - - return CertificateSigningRequestBuilder( - self._subject_name, - self._extensions, - [*self._attributes, (oid, value, tag)], - ) - - def sign( - self, - private_key: CertificateIssuerPrivateKeyTypes, - algorithm: _AllowedHashTypes | None, - backend: typing.Any = None, - *, - rsa_padding: padding.PSS | padding.PKCS1v15 | None = None, - ) -> CertificateSigningRequest: - """ - Signs the request using the requestor's private key. - """ - if self._subject_name is None: - raise ValueError("A CertificateSigningRequest must have a subject") - - if rsa_padding is not None: - if not isinstance(rsa_padding, (padding.PSS, padding.PKCS1v15)): - raise TypeError("Padding must be PSS or PKCS1v15") - if not isinstance(private_key, rsa.RSAPrivateKey): - raise TypeError("Padding is only supported for RSA keys") - - return rust_x509.create_x509_csr( - self, private_key, algorithm, rsa_padding - ) - - -class CertificateBuilder: - _extensions: list[Extension[ExtensionType]] - - def __init__( - self, - issuer_name: Name | None = None, - subject_name: Name | None = None, - public_key: CertificatePublicKeyTypes | None = None, - serial_number: int | None = None, - not_valid_before: datetime.datetime | None = None, - not_valid_after: datetime.datetime | None = None, - extensions: list[Extension[ExtensionType]] = [], - ) -> None: - self._version = Version.v3 - self._issuer_name = issuer_name - self._subject_name = subject_name - self._public_key = public_key - self._serial_number = serial_number - self._not_valid_before = not_valid_before - self._not_valid_after = not_valid_after - self._extensions = extensions - - def issuer_name(self, name: Name) -> CertificateBuilder: - """ - Sets the CA's distinguished name. - """ - if not isinstance(name, Name): - raise TypeError("Expecting x509.Name object.") - if self._issuer_name is not None: - raise ValueError("The issuer name may only be set once.") - return CertificateBuilder( - name, - self._subject_name, - self._public_key, - self._serial_number, - self._not_valid_before, - self._not_valid_after, - self._extensions, - ) - - def subject_name(self, name: Name) -> CertificateBuilder: - """ - Sets the requestor's distinguished name. - """ - if not isinstance(name, Name): - raise TypeError("Expecting x509.Name object.") - if self._subject_name is not None: - raise ValueError("The subject name may only be set once.") - return CertificateBuilder( - self._issuer_name, - name, - self._public_key, - self._serial_number, - self._not_valid_before, - self._not_valid_after, - self._extensions, - ) - - def public_key( - self, - key: CertificatePublicKeyTypes, - ) -> CertificateBuilder: - """ - Sets the requestor's public key (as found in the signing request). - """ - if not isinstance( - key, - ( - dsa.DSAPublicKey, - rsa.RSAPublicKey, - ec.EllipticCurvePublicKey, - ed25519.Ed25519PublicKey, - ed448.Ed448PublicKey, - x25519.X25519PublicKey, - x448.X448PublicKey, - ), - ): - raise TypeError( - "Expecting one of DSAPublicKey, RSAPublicKey," - " EllipticCurvePublicKey, Ed25519PublicKey," - " Ed448PublicKey, X25519PublicKey, or " - "X448PublicKey." - ) - if self._public_key is not None: - raise ValueError("The public key may only be set once.") - return CertificateBuilder( - self._issuer_name, - self._subject_name, - key, - self._serial_number, - self._not_valid_before, - self._not_valid_after, - self._extensions, - ) - - def serial_number(self, number: int) -> CertificateBuilder: - """ - Sets the certificate serial number. - """ - if not isinstance(number, int): - raise TypeError("Serial number must be of integral type.") - if self._serial_number is not None: - raise ValueError("The serial number may only be set once.") - if number <= 0: - raise ValueError("The serial number should be positive.") - - # ASN.1 integers are always signed, so most significant bit must be - # zero. - if number.bit_length() >= 160: # As defined in RFC 5280 - raise ValueError( - "The serial number should not be more than 159 bits." - ) - return CertificateBuilder( - self._issuer_name, - self._subject_name, - self._public_key, - number, - self._not_valid_before, - self._not_valid_after, - self._extensions, - ) - - def not_valid_before(self, time: datetime.datetime) -> CertificateBuilder: - """ - Sets the certificate activation time. - """ - if not isinstance(time, datetime.datetime): - raise TypeError("Expecting datetime object.") - if self._not_valid_before is not None: - raise ValueError("The not valid before may only be set once.") - time = _convert_to_naive_utc_time(time) - if time < _EARLIEST_UTC_TIME: - raise ValueError( - "The not valid before date must be on or after" - " 1950 January 1)." - ) - if self._not_valid_after is not None and time > self._not_valid_after: - raise ValueError( - "The not valid before date must be before the not valid after " - "date." - ) - return CertificateBuilder( - self._issuer_name, - self._subject_name, - self._public_key, - self._serial_number, - time, - self._not_valid_after, - self._extensions, - ) - - def not_valid_after(self, time: datetime.datetime) -> CertificateBuilder: - """ - Sets the certificate expiration time. - """ - if not isinstance(time, datetime.datetime): - raise TypeError("Expecting datetime object.") - if self._not_valid_after is not None: - raise ValueError("The not valid after may only be set once.") - time = _convert_to_naive_utc_time(time) - if time < _EARLIEST_UTC_TIME: - raise ValueError( - "The not valid after date must be on or after" - " 1950 January 1." - ) - if ( - self._not_valid_before is not None - and time < self._not_valid_before - ): - raise ValueError( - "The not valid after date must be after the not valid before " - "date." - ) - return CertificateBuilder( - self._issuer_name, - self._subject_name, - self._public_key, - self._serial_number, - self._not_valid_before, - time, - self._extensions, - ) - - def add_extension( - self, extval: ExtensionType, critical: bool - ) -> CertificateBuilder: - """ - Adds an X.509 extension to the certificate. - """ - if not isinstance(extval, ExtensionType): - raise TypeError("extension must be an ExtensionType") - - extension = Extension(extval.oid, critical, extval) - _reject_duplicate_extension(extension, self._extensions) - - return CertificateBuilder( - self._issuer_name, - self._subject_name, - self._public_key, - self._serial_number, - self._not_valid_before, - self._not_valid_after, - [*self._extensions, extension], - ) - - def sign( - self, - private_key: CertificateIssuerPrivateKeyTypes, - algorithm: _AllowedHashTypes | None, - backend: typing.Any = None, - *, - rsa_padding: padding.PSS | padding.PKCS1v15 | None = None, - ) -> Certificate: - """ - Signs the certificate using the CA's private key. - """ - if self._subject_name is None: - raise ValueError("A certificate must have a subject name") - - if self._issuer_name is None: - raise ValueError("A certificate must have an issuer name") - - if self._serial_number is None: - raise ValueError("A certificate must have a serial number") - - if self._not_valid_before is None: - raise ValueError("A certificate must have a not valid before time") - - if self._not_valid_after is None: - raise ValueError("A certificate must have a not valid after time") - - if self._public_key is None: - raise ValueError("A certificate must have a public key") - - if rsa_padding is not None: - if not isinstance(rsa_padding, (padding.PSS, padding.PKCS1v15)): - raise TypeError("Padding must be PSS or PKCS1v15") - if not isinstance(private_key, rsa.RSAPrivateKey): - raise TypeError("Padding is only supported for RSA keys") - - return rust_x509.create_x509_certificate( - self, private_key, algorithm, rsa_padding - ) - - -class CertificateRevocationListBuilder: - _extensions: list[Extension[ExtensionType]] - _revoked_certificates: list[RevokedCertificate] - - def __init__( - self, - issuer_name: Name | None = None, - last_update: datetime.datetime | None = None, - next_update: datetime.datetime | None = None, - extensions: list[Extension[ExtensionType]] = [], - revoked_certificates: list[RevokedCertificate] = [], - ): - self._issuer_name = issuer_name - self._last_update = last_update - self._next_update = next_update - self._extensions = extensions - self._revoked_certificates = revoked_certificates - - def issuer_name( - self, issuer_name: Name - ) -> CertificateRevocationListBuilder: - if not isinstance(issuer_name, Name): - raise TypeError("Expecting x509.Name object.") - if self._issuer_name is not None: - raise ValueError("The issuer name may only be set once.") - return CertificateRevocationListBuilder( - issuer_name, - self._last_update, - self._next_update, - self._extensions, - self._revoked_certificates, - ) - - def last_update( - self, last_update: datetime.datetime - ) -> CertificateRevocationListBuilder: - if not isinstance(last_update, datetime.datetime): - raise TypeError("Expecting datetime object.") - if self._last_update is not None: - raise ValueError("Last update may only be set once.") - last_update = _convert_to_naive_utc_time(last_update) - if last_update < _EARLIEST_UTC_TIME: - raise ValueError( - "The last update date must be on or after 1950 January 1." - ) - if self._next_update is not None and last_update > self._next_update: - raise ValueError( - "The last update date must be before the next update date." - ) - return CertificateRevocationListBuilder( - self._issuer_name, - last_update, - self._next_update, - self._extensions, - self._revoked_certificates, - ) - - def next_update( - self, next_update: datetime.datetime - ) -> CertificateRevocationListBuilder: - if not isinstance(next_update, datetime.datetime): - raise TypeError("Expecting datetime object.") - if self._next_update is not None: - raise ValueError("Last update may only be set once.") - next_update = _convert_to_naive_utc_time(next_update) - if next_update < _EARLIEST_UTC_TIME: - raise ValueError( - "The last update date must be on or after 1950 January 1." - ) - if self._last_update is not None and next_update < self._last_update: - raise ValueError( - "The next update date must be after the last update date." - ) - return CertificateRevocationListBuilder( - self._issuer_name, - self._last_update, - next_update, - self._extensions, - self._revoked_certificates, - ) - - def add_extension( - self, extval: ExtensionType, critical: bool - ) -> CertificateRevocationListBuilder: - """ - Adds an X.509 extension to the certificate revocation list. - """ - if not isinstance(extval, ExtensionType): - raise TypeError("extension must be an ExtensionType") - - extension = Extension(extval.oid, critical, extval) - _reject_duplicate_extension(extension, self._extensions) - return CertificateRevocationListBuilder( - self._issuer_name, - self._last_update, - self._next_update, - [*self._extensions, extension], - self._revoked_certificates, - ) - - def add_revoked_certificate( - self, revoked_certificate: RevokedCertificate - ) -> CertificateRevocationListBuilder: - """ - Adds a revoked certificate to the CRL. - """ - if not isinstance(revoked_certificate, RevokedCertificate): - raise TypeError("Must be an instance of RevokedCertificate") - - return CertificateRevocationListBuilder( - self._issuer_name, - self._last_update, - self._next_update, - self._extensions, - [*self._revoked_certificates, revoked_certificate], - ) - - def sign( - self, - private_key: CertificateIssuerPrivateKeyTypes, - algorithm: _AllowedHashTypes | None, - backend: typing.Any = None, - *, - rsa_padding: padding.PSS | padding.PKCS1v15 | None = None, - ) -> CertificateRevocationList: - if self._issuer_name is None: - raise ValueError("A CRL must have an issuer name") - - if self._last_update is None: - raise ValueError("A CRL must have a last update time") - - if self._next_update is None: - raise ValueError("A CRL must have a next update time") - - if rsa_padding is not None: - if not isinstance(rsa_padding, (padding.PSS, padding.PKCS1v15)): - raise TypeError("Padding must be PSS or PKCS1v15") - if not isinstance(private_key, rsa.RSAPrivateKey): - raise TypeError("Padding is only supported for RSA keys") - - return rust_x509.create_x509_crl( - self, private_key, algorithm, rsa_padding - ) - - -class RevokedCertificateBuilder: - def __init__( - self, - serial_number: int | None = None, - revocation_date: datetime.datetime | None = None, - extensions: list[Extension[ExtensionType]] = [], - ): - self._serial_number = serial_number - self._revocation_date = revocation_date - self._extensions = extensions - - def serial_number(self, number: int) -> RevokedCertificateBuilder: - if not isinstance(number, int): - raise TypeError("Serial number must be of integral type.") - if self._serial_number is not None: - raise ValueError("The serial number may only be set once.") - if number <= 0: - raise ValueError("The serial number should be positive") - - # ASN.1 integers are always signed, so most significant bit must be - # zero. - if number.bit_length() >= 160: # As defined in RFC 5280 - raise ValueError( - "The serial number should not be more than 159 bits." - ) - return RevokedCertificateBuilder( - number, self._revocation_date, self._extensions - ) - - def revocation_date( - self, time: datetime.datetime - ) -> RevokedCertificateBuilder: - if not isinstance(time, datetime.datetime): - raise TypeError("Expecting datetime object.") - if self._revocation_date is not None: - raise ValueError("The revocation date may only be set once.") - time = _convert_to_naive_utc_time(time) - if time < _EARLIEST_UTC_TIME: - raise ValueError( - "The revocation date must be on or after 1950 January 1." - ) - return RevokedCertificateBuilder( - self._serial_number, time, self._extensions - ) - - def add_extension( - self, extval: ExtensionType, critical: bool - ) -> RevokedCertificateBuilder: - if not isinstance(extval, ExtensionType): - raise TypeError("extension must be an ExtensionType") - - extension = Extension(extval.oid, critical, extval) - _reject_duplicate_extension(extension, self._extensions) - return RevokedCertificateBuilder( - self._serial_number, - self._revocation_date, - [*self._extensions, extension], - ) - - def build(self, backend: typing.Any = None) -> RevokedCertificate: - if self._serial_number is None: - raise ValueError("A revoked certificate must have a serial number") - if self._revocation_date is None: - raise ValueError( - "A revoked certificate must have a revocation date" - ) - return _RawRevokedCertificate( - self._serial_number, - self._revocation_date, - Extensions(self._extensions), - ) - - -def random_serial_number() -> int: - return int.from_bytes(os.urandom(20), "big") >> 1 diff --git a/env/lib/python3.10/site-packages/cryptography/x509/certificate_transparency.py b/env/lib/python3.10/site-packages/cryptography/x509/certificate_transparency.py deleted file mode 100644 index 73647ee..0000000 --- a/env/lib/python3.10/site-packages/cryptography/x509/certificate_transparency.py +++ /dev/null @@ -1,97 +0,0 @@ -# This file is dual licensed under the terms of the Apache License, Version -# 2.0, and the BSD License. See the LICENSE file in the root of this repository -# for complete details. - -from __future__ import annotations - -import abc -import datetime - -from cryptography import utils -from cryptography.hazmat.bindings._rust import x509 as rust_x509 -from cryptography.hazmat.primitives.hashes import HashAlgorithm - - -class LogEntryType(utils.Enum): - X509_CERTIFICATE = 0 - PRE_CERTIFICATE = 1 - - -class Version(utils.Enum): - v1 = 0 - - -class SignatureAlgorithm(utils.Enum): - """ - Signature algorithms that are valid for SCTs. - - These are exactly the same as SignatureAlgorithm in RFC 5246 (TLS 1.2). - - See: - """ - - ANONYMOUS = 0 - RSA = 1 - DSA = 2 - ECDSA = 3 - - -class SignedCertificateTimestamp(metaclass=abc.ABCMeta): - @property - @abc.abstractmethod - def version(self) -> Version: - """ - Returns the SCT version. - """ - - @property - @abc.abstractmethod - def log_id(self) -> bytes: - """ - Returns an identifier indicating which log this SCT is for. - """ - - @property - @abc.abstractmethod - def timestamp(self) -> datetime.datetime: - """ - Returns the timestamp for this SCT. - """ - - @property - @abc.abstractmethod - def entry_type(self) -> LogEntryType: - """ - Returns whether this is an SCT for a certificate or pre-certificate. - """ - - @property - @abc.abstractmethod - def signature_hash_algorithm(self) -> HashAlgorithm: - """ - Returns the hash algorithm used for the SCT's signature. - """ - - @property - @abc.abstractmethod - def signature_algorithm(self) -> SignatureAlgorithm: - """ - Returns the signing algorithm used for the SCT's signature. - """ - - @property - @abc.abstractmethod - def signature(self) -> bytes: - """ - Returns the signature for this SCT. - """ - - @property - @abc.abstractmethod - def extension_bytes(self) -> bytes: - """ - Returns the raw bytes of any extensions for this SCT. - """ - - -SignedCertificateTimestamp.register(rust_x509.Sct) diff --git a/env/lib/python3.10/site-packages/cryptography/x509/extensions.py b/env/lib/python3.10/site-packages/cryptography/x509/extensions.py deleted file mode 100644 index 5e7486a..0000000 --- a/env/lib/python3.10/site-packages/cryptography/x509/extensions.py +++ /dev/null @@ -1,2196 +0,0 @@ -# This file is dual licensed under the terms of the Apache License, Version -# 2.0, and the BSD License. See the LICENSE file in the root of this repository -# for complete details. - -from __future__ import annotations - -import abc -import datetime -import hashlib -import ipaddress -import typing - -from cryptography import utils -from cryptography.hazmat.bindings._rust import asn1 -from cryptography.hazmat.bindings._rust import x509 as rust_x509 -from cryptography.hazmat.primitives import constant_time, serialization -from cryptography.hazmat.primitives.asymmetric.ec import EllipticCurvePublicKey -from cryptography.hazmat.primitives.asymmetric.rsa import RSAPublicKey -from cryptography.hazmat.primitives.asymmetric.types import ( - CertificateIssuerPublicKeyTypes, - CertificatePublicKeyTypes, -) -from cryptography.x509.certificate_transparency import ( - SignedCertificateTimestamp, -) -from cryptography.x509.general_name import ( - DirectoryName, - DNSName, - GeneralName, - IPAddress, - OtherName, - RegisteredID, - RFC822Name, - UniformResourceIdentifier, - _IPAddressTypes, -) -from cryptography.x509.name import Name, RelativeDistinguishedName -from cryptography.x509.oid import ( - CRLEntryExtensionOID, - ExtensionOID, - ObjectIdentifier, - OCSPExtensionOID, -) - -ExtensionTypeVar = typing.TypeVar( - "ExtensionTypeVar", bound="ExtensionType", covariant=True -) - - -def _key_identifier_from_public_key( - public_key: CertificatePublicKeyTypes, -) -> bytes: - if isinstance(public_key, RSAPublicKey): - data = public_key.public_bytes( - serialization.Encoding.DER, - serialization.PublicFormat.PKCS1, - ) - elif isinstance(public_key, EllipticCurvePublicKey): - data = public_key.public_bytes( - serialization.Encoding.X962, - serialization.PublicFormat.UncompressedPoint, - ) - else: - # This is a very slow way to do this. - serialized = public_key.public_bytes( - serialization.Encoding.DER, - serialization.PublicFormat.SubjectPublicKeyInfo, - ) - data = asn1.parse_spki_for_data(serialized) - - return hashlib.sha1(data).digest() - - -def _make_sequence_methods(field_name: str): - def len_method(self) -> int: - return len(getattr(self, field_name)) - - def iter_method(self): - return iter(getattr(self, field_name)) - - def getitem_method(self, idx): - return getattr(self, field_name)[idx] - - return len_method, iter_method, getitem_method - - -class DuplicateExtension(Exception): - def __init__(self, msg: str, oid: ObjectIdentifier) -> None: - super().__init__(msg) - self.oid = oid - - -class ExtensionNotFound(Exception): - def __init__(self, msg: str, oid: ObjectIdentifier) -> None: - super().__init__(msg) - self.oid = oid - - -class ExtensionType(metaclass=abc.ABCMeta): - oid: typing.ClassVar[ObjectIdentifier] - - def public_bytes(self) -> bytes: - """ - Serializes the extension type to DER. - """ - raise NotImplementedError( - f"public_bytes is not implemented for extension type {self!r}" - ) - - -class Extensions: - def __init__( - self, extensions: typing.Iterable[Extension[ExtensionType]] - ) -> None: - self._extensions = list(extensions) - - def get_extension_for_oid( - self, oid: ObjectIdentifier - ) -> Extension[ExtensionType]: - for ext in self: - if ext.oid == oid: - return ext - - raise ExtensionNotFound(f"No {oid} extension was found", oid) - - def get_extension_for_class( - self, extclass: type[ExtensionTypeVar] - ) -> Extension[ExtensionTypeVar]: - if extclass is UnrecognizedExtension: - raise TypeError( - "UnrecognizedExtension can't be used with " - "get_extension_for_class because more than one instance of the" - " class may be present." - ) - - for ext in self: - if isinstance(ext.value, extclass): - return ext - - raise ExtensionNotFound( - f"No {extclass} extension was found", extclass.oid - ) - - __len__, __iter__, __getitem__ = _make_sequence_methods("_extensions") - - def __repr__(self) -> str: - return f"" - - -class CRLNumber(ExtensionType): - oid = ExtensionOID.CRL_NUMBER - - def __init__(self, crl_number: int) -> None: - if not isinstance(crl_number, int): - raise TypeError("crl_number must be an integer") - - self._crl_number = crl_number - - def __eq__(self, other: object) -> bool: - if not isinstance(other, CRLNumber): - return NotImplemented - - return self.crl_number == other.crl_number - - def __hash__(self) -> int: - return hash(self.crl_number) - - def __repr__(self) -> str: - return f"" - - @property - def crl_number(self) -> int: - return self._crl_number - - def public_bytes(self) -> bytes: - return rust_x509.encode_extension_value(self) - - -class AuthorityKeyIdentifier(ExtensionType): - oid = ExtensionOID.AUTHORITY_KEY_IDENTIFIER - - def __init__( - self, - key_identifier: bytes | None, - authority_cert_issuer: typing.Iterable[GeneralName] | None, - authority_cert_serial_number: int | None, - ) -> None: - if (authority_cert_issuer is None) != ( - authority_cert_serial_number is None - ): - raise ValueError( - "authority_cert_issuer and authority_cert_serial_number " - "must both be present or both None" - ) - - if authority_cert_issuer is not None: - authority_cert_issuer = list(authority_cert_issuer) - if not all( - isinstance(x, GeneralName) for x in authority_cert_issuer - ): - raise TypeError( - "authority_cert_issuer must be a list of GeneralName " - "objects" - ) - - if authority_cert_serial_number is not None and not isinstance( - authority_cert_serial_number, int - ): - raise TypeError("authority_cert_serial_number must be an integer") - - self._key_identifier = key_identifier - self._authority_cert_issuer = authority_cert_issuer - self._authority_cert_serial_number = authority_cert_serial_number - - # This takes a subset of CertificatePublicKeyTypes because an issuer - # cannot have an X25519/X448 key. This introduces some unfortunate - # asymmetry that requires typing users to explicitly - # narrow their type, but we should make this accurate and not just - # convenient. - @classmethod - def from_issuer_public_key( - cls, public_key: CertificateIssuerPublicKeyTypes - ) -> AuthorityKeyIdentifier: - digest = _key_identifier_from_public_key(public_key) - return cls( - key_identifier=digest, - authority_cert_issuer=None, - authority_cert_serial_number=None, - ) - - @classmethod - def from_issuer_subject_key_identifier( - cls, ski: SubjectKeyIdentifier - ) -> AuthorityKeyIdentifier: - return cls( - key_identifier=ski.digest, - authority_cert_issuer=None, - authority_cert_serial_number=None, - ) - - def __repr__(self) -> str: - return ( - f"" - ) - - def __eq__(self, other: object) -> bool: - if not isinstance(other, AuthorityKeyIdentifier): - return NotImplemented - - return ( - self.key_identifier == other.key_identifier - and self.authority_cert_issuer == other.authority_cert_issuer - and self.authority_cert_serial_number - == other.authority_cert_serial_number - ) - - def __hash__(self) -> int: - if self.authority_cert_issuer is None: - aci = None - else: - aci = tuple(self.authority_cert_issuer) - return hash( - (self.key_identifier, aci, self.authority_cert_serial_number) - ) - - @property - def key_identifier(self) -> bytes | None: - return self._key_identifier - - @property - def authority_cert_issuer( - self, - ) -> list[GeneralName] | None: - return self._authority_cert_issuer - - @property - def authority_cert_serial_number(self) -> int | None: - return self._authority_cert_serial_number - - def public_bytes(self) -> bytes: - return rust_x509.encode_extension_value(self) - - -class SubjectKeyIdentifier(ExtensionType): - oid = ExtensionOID.SUBJECT_KEY_IDENTIFIER - - def __init__(self, digest: bytes) -> None: - self._digest = digest - - @classmethod - def from_public_key( - cls, public_key: CertificatePublicKeyTypes - ) -> SubjectKeyIdentifier: - return cls(_key_identifier_from_public_key(public_key)) - - @property - def digest(self) -> bytes: - return self._digest - - @property - def key_identifier(self) -> bytes: - return self._digest - - def __repr__(self) -> str: - return f"" - - def __eq__(self, other: object) -> bool: - if not isinstance(other, SubjectKeyIdentifier): - return NotImplemented - - return constant_time.bytes_eq(self.digest, other.digest) - - def __hash__(self) -> int: - return hash(self.digest) - - def public_bytes(self) -> bytes: - return rust_x509.encode_extension_value(self) - - -class AuthorityInformationAccess(ExtensionType): - oid = ExtensionOID.AUTHORITY_INFORMATION_ACCESS - - def __init__( - self, descriptions: typing.Iterable[AccessDescription] - ) -> None: - descriptions = list(descriptions) - if not all(isinstance(x, AccessDescription) for x in descriptions): - raise TypeError( - "Every item in the descriptions list must be an " - "AccessDescription" - ) - - self._descriptions = descriptions - - __len__, __iter__, __getitem__ = _make_sequence_methods("_descriptions") - - def __repr__(self) -> str: - return f"" - - def __eq__(self, other: object) -> bool: - if not isinstance(other, AuthorityInformationAccess): - return NotImplemented - - return self._descriptions == other._descriptions - - def __hash__(self) -> int: - return hash(tuple(self._descriptions)) - - def public_bytes(self) -> bytes: - return rust_x509.encode_extension_value(self) - - -class SubjectInformationAccess(ExtensionType): - oid = ExtensionOID.SUBJECT_INFORMATION_ACCESS - - def __init__( - self, descriptions: typing.Iterable[AccessDescription] - ) -> None: - descriptions = list(descriptions) - if not all(isinstance(x, AccessDescription) for x in descriptions): - raise TypeError( - "Every item in the descriptions list must be an " - "AccessDescription" - ) - - self._descriptions = descriptions - - __len__, __iter__, __getitem__ = _make_sequence_methods("_descriptions") - - def __repr__(self) -> str: - return f"" - - def __eq__(self, other: object) -> bool: - if not isinstance(other, SubjectInformationAccess): - return NotImplemented - - return self._descriptions == other._descriptions - - def __hash__(self) -> int: - return hash(tuple(self._descriptions)) - - def public_bytes(self) -> bytes: - return rust_x509.encode_extension_value(self) - - -class AccessDescription: - def __init__( - self, access_method: ObjectIdentifier, access_location: GeneralName - ) -> None: - if not isinstance(access_method, ObjectIdentifier): - raise TypeError("access_method must be an ObjectIdentifier") - - if not isinstance(access_location, GeneralName): - raise TypeError("access_location must be a GeneralName") - - self._access_method = access_method - self._access_location = access_location - - def __repr__(self) -> str: - return ( - f"" - ) - - def __eq__(self, other: object) -> bool: - if not isinstance(other, AccessDescription): - return NotImplemented - - return ( - self.access_method == other.access_method - and self.access_location == other.access_location - ) - - def __hash__(self) -> int: - return hash((self.access_method, self.access_location)) - - @property - def access_method(self) -> ObjectIdentifier: - return self._access_method - - @property - def access_location(self) -> GeneralName: - return self._access_location - - -class BasicConstraints(ExtensionType): - oid = ExtensionOID.BASIC_CONSTRAINTS - - def __init__(self, ca: bool, path_length: int | None) -> None: - if not isinstance(ca, bool): - raise TypeError("ca must be a boolean value") - - if path_length is not None and not ca: - raise ValueError("path_length must be None when ca is False") - - if path_length is not None and ( - not isinstance(path_length, int) or path_length < 0 - ): - raise TypeError( - "path_length must be a non-negative integer or None" - ) - - self._ca = ca - self._path_length = path_length - - @property - def ca(self) -> bool: - return self._ca - - @property - def path_length(self) -> int | None: - return self._path_length - - def __repr__(self) -> str: - return ( - f"" - ) - - def __eq__(self, other: object) -> bool: - if not isinstance(other, BasicConstraints): - return NotImplemented - - return self.ca == other.ca and self.path_length == other.path_length - - def __hash__(self) -> int: - return hash((self.ca, self.path_length)) - - def public_bytes(self) -> bytes: - return rust_x509.encode_extension_value(self) - - -class DeltaCRLIndicator(ExtensionType): - oid = ExtensionOID.DELTA_CRL_INDICATOR - - def __init__(self, crl_number: int) -> None: - if not isinstance(crl_number, int): - raise TypeError("crl_number must be an integer") - - self._crl_number = crl_number - - @property - def crl_number(self) -> int: - return self._crl_number - - def __eq__(self, other: object) -> bool: - if not isinstance(other, DeltaCRLIndicator): - return NotImplemented - - return self.crl_number == other.crl_number - - def __hash__(self) -> int: - return hash(self.crl_number) - - def __repr__(self) -> str: - return f"" - - def public_bytes(self) -> bytes: - return rust_x509.encode_extension_value(self) - - -class CRLDistributionPoints(ExtensionType): - oid = ExtensionOID.CRL_DISTRIBUTION_POINTS - - def __init__( - self, distribution_points: typing.Iterable[DistributionPoint] - ) -> None: - distribution_points = list(distribution_points) - if not all( - isinstance(x, DistributionPoint) for x in distribution_points - ): - raise TypeError( - "distribution_points must be a list of DistributionPoint " - "objects" - ) - - self._distribution_points = distribution_points - - __len__, __iter__, __getitem__ = _make_sequence_methods( - "_distribution_points" - ) - - def __repr__(self) -> str: - return f"" - - def __eq__(self, other: object) -> bool: - if not isinstance(other, CRLDistributionPoints): - return NotImplemented - - return self._distribution_points == other._distribution_points - - def __hash__(self) -> int: - return hash(tuple(self._distribution_points)) - - def public_bytes(self) -> bytes: - return rust_x509.encode_extension_value(self) - - -class FreshestCRL(ExtensionType): - oid = ExtensionOID.FRESHEST_CRL - - def __init__( - self, distribution_points: typing.Iterable[DistributionPoint] - ) -> None: - distribution_points = list(distribution_points) - if not all( - isinstance(x, DistributionPoint) for x in distribution_points - ): - raise TypeError( - "distribution_points must be a list of DistributionPoint " - "objects" - ) - - self._distribution_points = distribution_points - - __len__, __iter__, __getitem__ = _make_sequence_methods( - "_distribution_points" - ) - - def __repr__(self) -> str: - return f"" - - def __eq__(self, other: object) -> bool: - if not isinstance(other, FreshestCRL): - return NotImplemented - - return self._distribution_points == other._distribution_points - - def __hash__(self) -> int: - return hash(tuple(self._distribution_points)) - - def public_bytes(self) -> bytes: - return rust_x509.encode_extension_value(self) - - -class DistributionPoint: - def __init__( - self, - full_name: typing.Iterable[GeneralName] | None, - relative_name: RelativeDistinguishedName | None, - reasons: frozenset[ReasonFlags] | None, - crl_issuer: typing.Iterable[GeneralName] | None, - ) -> None: - if full_name and relative_name: - raise ValueError( - "You cannot provide both full_name and relative_name, at " - "least one must be None." - ) - if not full_name and not relative_name and not crl_issuer: - raise ValueError( - "Either full_name, relative_name or crl_issuer must be " - "provided." - ) - - if full_name is not None: - full_name = list(full_name) - if not all(isinstance(x, GeneralName) for x in full_name): - raise TypeError( - "full_name must be a list of GeneralName objects" - ) - - if relative_name: - if not isinstance(relative_name, RelativeDistinguishedName): - raise TypeError( - "relative_name must be a RelativeDistinguishedName" - ) - - if crl_issuer is not None: - crl_issuer = list(crl_issuer) - if not all(isinstance(x, GeneralName) for x in crl_issuer): - raise TypeError( - "crl_issuer must be None or a list of general names" - ) - - if reasons and ( - not isinstance(reasons, frozenset) - or not all(isinstance(x, ReasonFlags) for x in reasons) - ): - raise TypeError("reasons must be None or frozenset of ReasonFlags") - - if reasons and ( - ReasonFlags.unspecified in reasons - or ReasonFlags.remove_from_crl in reasons - ): - raise ValueError( - "unspecified and remove_from_crl are not valid reasons in a " - "DistributionPoint" - ) - - self._full_name = full_name - self._relative_name = relative_name - self._reasons = reasons - self._crl_issuer = crl_issuer - - def __repr__(self) -> str: - return ( - "".format(self) - ) - - def __eq__(self, other: object) -> bool: - if not isinstance(other, DistributionPoint): - return NotImplemented - - return ( - self.full_name == other.full_name - and self.relative_name == other.relative_name - and self.reasons == other.reasons - and self.crl_issuer == other.crl_issuer - ) - - def __hash__(self) -> int: - if self.full_name is not None: - fn: tuple[GeneralName, ...] | None = tuple(self.full_name) - else: - fn = None - - if self.crl_issuer is not None: - crl_issuer: tuple[GeneralName, ...] | None = tuple(self.crl_issuer) - else: - crl_issuer = None - - return hash((fn, self.relative_name, self.reasons, crl_issuer)) - - @property - def full_name(self) -> list[GeneralName] | None: - return self._full_name - - @property - def relative_name(self) -> RelativeDistinguishedName | None: - return self._relative_name - - @property - def reasons(self) -> frozenset[ReasonFlags] | None: - return self._reasons - - @property - def crl_issuer(self) -> list[GeneralName] | None: - return self._crl_issuer - - -class ReasonFlags(utils.Enum): - unspecified = "unspecified" - key_compromise = "keyCompromise" - ca_compromise = "cACompromise" - affiliation_changed = "affiliationChanged" - superseded = "superseded" - cessation_of_operation = "cessationOfOperation" - certificate_hold = "certificateHold" - privilege_withdrawn = "privilegeWithdrawn" - aa_compromise = "aACompromise" - remove_from_crl = "removeFromCRL" - - -# These are distribution point bit string mappings. Not to be confused with -# CRLReason reason flags bit string mappings. -# ReasonFlags ::= BIT STRING { -# unused (0), -# keyCompromise (1), -# cACompromise (2), -# affiliationChanged (3), -# superseded (4), -# cessationOfOperation (5), -# certificateHold (6), -# privilegeWithdrawn (7), -# aACompromise (8) } -_REASON_BIT_MAPPING = { - 1: ReasonFlags.key_compromise, - 2: ReasonFlags.ca_compromise, - 3: ReasonFlags.affiliation_changed, - 4: ReasonFlags.superseded, - 5: ReasonFlags.cessation_of_operation, - 6: ReasonFlags.certificate_hold, - 7: ReasonFlags.privilege_withdrawn, - 8: ReasonFlags.aa_compromise, -} - -_CRLREASONFLAGS = { - ReasonFlags.key_compromise: 1, - ReasonFlags.ca_compromise: 2, - ReasonFlags.affiliation_changed: 3, - ReasonFlags.superseded: 4, - ReasonFlags.cessation_of_operation: 5, - ReasonFlags.certificate_hold: 6, - ReasonFlags.privilege_withdrawn: 7, - ReasonFlags.aa_compromise: 8, -} - -# CRLReason ::= ENUMERATED { -# unspecified (0), -# keyCompromise (1), -# cACompromise (2), -# affiliationChanged (3), -# superseded (4), -# cessationOfOperation (5), -# certificateHold (6), -# -- value 7 is not used -# removeFromCRL (8), -# privilegeWithdrawn (9), -# aACompromise (10) } -_CRL_ENTRY_REASON_ENUM_TO_CODE = { - ReasonFlags.unspecified: 0, - ReasonFlags.key_compromise: 1, - ReasonFlags.ca_compromise: 2, - ReasonFlags.affiliation_changed: 3, - ReasonFlags.superseded: 4, - ReasonFlags.cessation_of_operation: 5, - ReasonFlags.certificate_hold: 6, - ReasonFlags.remove_from_crl: 8, - ReasonFlags.privilege_withdrawn: 9, - ReasonFlags.aa_compromise: 10, -} - - -class PolicyConstraints(ExtensionType): - oid = ExtensionOID.POLICY_CONSTRAINTS - - def __init__( - self, - require_explicit_policy: int | None, - inhibit_policy_mapping: int | None, - ) -> None: - if require_explicit_policy is not None and not isinstance( - require_explicit_policy, int - ): - raise TypeError( - "require_explicit_policy must be a non-negative integer or " - "None" - ) - - if inhibit_policy_mapping is not None and not isinstance( - inhibit_policy_mapping, int - ): - raise TypeError( - "inhibit_policy_mapping must be a non-negative integer or None" - ) - - if inhibit_policy_mapping is None and require_explicit_policy is None: - raise ValueError( - "At least one of require_explicit_policy and " - "inhibit_policy_mapping must not be None" - ) - - self._require_explicit_policy = require_explicit_policy - self._inhibit_policy_mapping = inhibit_policy_mapping - - def __repr__(self) -> str: - return ( - "".format(self) - ) - - def __eq__(self, other: object) -> bool: - if not isinstance(other, PolicyConstraints): - return NotImplemented - - return ( - self.require_explicit_policy == other.require_explicit_policy - and self.inhibit_policy_mapping == other.inhibit_policy_mapping - ) - - def __hash__(self) -> int: - return hash( - (self.require_explicit_policy, self.inhibit_policy_mapping) - ) - - @property - def require_explicit_policy(self) -> int | None: - return self._require_explicit_policy - - @property - def inhibit_policy_mapping(self) -> int | None: - return self._inhibit_policy_mapping - - def public_bytes(self) -> bytes: - return rust_x509.encode_extension_value(self) - - -class CertificatePolicies(ExtensionType): - oid = ExtensionOID.CERTIFICATE_POLICIES - - def __init__(self, policies: typing.Iterable[PolicyInformation]) -> None: - policies = list(policies) - if not all(isinstance(x, PolicyInformation) for x in policies): - raise TypeError( - "Every item in the policies list must be a " - "PolicyInformation" - ) - - self._policies = policies - - __len__, __iter__, __getitem__ = _make_sequence_methods("_policies") - - def __repr__(self) -> str: - return f"" - - def __eq__(self, other: object) -> bool: - if not isinstance(other, CertificatePolicies): - return NotImplemented - - return self._policies == other._policies - - def __hash__(self) -> int: - return hash(tuple(self._policies)) - - def public_bytes(self) -> bytes: - return rust_x509.encode_extension_value(self) - - -class PolicyInformation: - def __init__( - self, - policy_identifier: ObjectIdentifier, - policy_qualifiers: typing.Iterable[str | UserNotice] | None, - ) -> None: - if not isinstance(policy_identifier, ObjectIdentifier): - raise TypeError("policy_identifier must be an ObjectIdentifier") - - self._policy_identifier = policy_identifier - - if policy_qualifiers is not None: - policy_qualifiers = list(policy_qualifiers) - if not all( - isinstance(x, (str, UserNotice)) for x in policy_qualifiers - ): - raise TypeError( - "policy_qualifiers must be a list of strings and/or " - "UserNotice objects or None" - ) - - self._policy_qualifiers = policy_qualifiers - - def __repr__(self) -> str: - return ( - f"" - ) - - def __eq__(self, other: object) -> bool: - if not isinstance(other, PolicyInformation): - return NotImplemented - - return ( - self.policy_identifier == other.policy_identifier - and self.policy_qualifiers == other.policy_qualifiers - ) - - def __hash__(self) -> int: - if self.policy_qualifiers is not None: - pq: tuple[str | UserNotice, ...] | None = tuple( - self.policy_qualifiers - ) - else: - pq = None - - return hash((self.policy_identifier, pq)) - - @property - def policy_identifier(self) -> ObjectIdentifier: - return self._policy_identifier - - @property - def policy_qualifiers( - self, - ) -> list[str | UserNotice] | None: - return self._policy_qualifiers - - -class UserNotice: - def __init__( - self, - notice_reference: NoticeReference | None, - explicit_text: str | None, - ) -> None: - if notice_reference and not isinstance( - notice_reference, NoticeReference - ): - raise TypeError( - "notice_reference must be None or a NoticeReference" - ) - - self._notice_reference = notice_reference - self._explicit_text = explicit_text - - def __repr__(self) -> str: - return ( - f"" - ) - - def __eq__(self, other: object) -> bool: - if not isinstance(other, UserNotice): - return NotImplemented - - return ( - self.notice_reference == other.notice_reference - and self.explicit_text == other.explicit_text - ) - - def __hash__(self) -> int: - return hash((self.notice_reference, self.explicit_text)) - - @property - def notice_reference(self) -> NoticeReference | None: - return self._notice_reference - - @property - def explicit_text(self) -> str | None: - return self._explicit_text - - -class NoticeReference: - def __init__( - self, - organization: str | None, - notice_numbers: typing.Iterable[int], - ) -> None: - self._organization = organization - notice_numbers = list(notice_numbers) - if not all(isinstance(x, int) for x in notice_numbers): - raise TypeError("notice_numbers must be a list of integers") - - self._notice_numbers = notice_numbers - - def __repr__(self) -> str: - return ( - f"" - ) - - def __eq__(self, other: object) -> bool: - if not isinstance(other, NoticeReference): - return NotImplemented - - return ( - self.organization == other.organization - and self.notice_numbers == other.notice_numbers - ) - - def __hash__(self) -> int: - return hash((self.organization, tuple(self.notice_numbers))) - - @property - def organization(self) -> str | None: - return self._organization - - @property - def notice_numbers(self) -> list[int]: - return self._notice_numbers - - -class ExtendedKeyUsage(ExtensionType): - oid = ExtensionOID.EXTENDED_KEY_USAGE - - def __init__(self, usages: typing.Iterable[ObjectIdentifier]) -> None: - usages = list(usages) - if not all(isinstance(x, ObjectIdentifier) for x in usages): - raise TypeError( - "Every item in the usages list must be an ObjectIdentifier" - ) - - self._usages = usages - - __len__, __iter__, __getitem__ = _make_sequence_methods("_usages") - - def __repr__(self) -> str: - return f"" - - def __eq__(self, other: object) -> bool: - if not isinstance(other, ExtendedKeyUsage): - return NotImplemented - - return self._usages == other._usages - - def __hash__(self) -> int: - return hash(tuple(self._usages)) - - def public_bytes(self) -> bytes: - return rust_x509.encode_extension_value(self) - - -class OCSPNoCheck(ExtensionType): - oid = ExtensionOID.OCSP_NO_CHECK - - def __eq__(self, other: object) -> bool: - if not isinstance(other, OCSPNoCheck): - return NotImplemented - - return True - - def __hash__(self) -> int: - return hash(OCSPNoCheck) - - def __repr__(self) -> str: - return "" - - def public_bytes(self) -> bytes: - return rust_x509.encode_extension_value(self) - - -class PrecertPoison(ExtensionType): - oid = ExtensionOID.PRECERT_POISON - - def __eq__(self, other: object) -> bool: - if not isinstance(other, PrecertPoison): - return NotImplemented - - return True - - def __hash__(self) -> int: - return hash(PrecertPoison) - - def __repr__(self) -> str: - return "" - - def public_bytes(self) -> bytes: - return rust_x509.encode_extension_value(self) - - -class TLSFeature(ExtensionType): - oid = ExtensionOID.TLS_FEATURE - - def __init__(self, features: typing.Iterable[TLSFeatureType]) -> None: - features = list(features) - if ( - not all(isinstance(x, TLSFeatureType) for x in features) - or len(features) == 0 - ): - raise TypeError( - "features must be a list of elements from the TLSFeatureType " - "enum" - ) - - self._features = features - - __len__, __iter__, __getitem__ = _make_sequence_methods("_features") - - def __repr__(self) -> str: - return f"" - - def __eq__(self, other: object) -> bool: - if not isinstance(other, TLSFeature): - return NotImplemented - - return self._features == other._features - - def __hash__(self) -> int: - return hash(tuple(self._features)) - - def public_bytes(self) -> bytes: - return rust_x509.encode_extension_value(self) - - -class TLSFeatureType(utils.Enum): - # status_request is defined in RFC 6066 and is used for what is commonly - # called OCSP Must-Staple when present in the TLS Feature extension in an - # X.509 certificate. - status_request = 5 - # status_request_v2 is defined in RFC 6961 and allows multiple OCSP - # responses to be provided. It is not currently in use by clients or - # servers. - status_request_v2 = 17 - - -_TLS_FEATURE_TYPE_TO_ENUM = {x.value: x for x in TLSFeatureType} - - -class InhibitAnyPolicy(ExtensionType): - oid = ExtensionOID.INHIBIT_ANY_POLICY - - def __init__(self, skip_certs: int) -> None: - if not isinstance(skip_certs, int): - raise TypeError("skip_certs must be an integer") - - if skip_certs < 0: - raise ValueError("skip_certs must be a non-negative integer") - - self._skip_certs = skip_certs - - def __repr__(self) -> str: - return f"" - - def __eq__(self, other: object) -> bool: - if not isinstance(other, InhibitAnyPolicy): - return NotImplemented - - return self.skip_certs == other.skip_certs - - def __hash__(self) -> int: - return hash(self.skip_certs) - - @property - def skip_certs(self) -> int: - return self._skip_certs - - def public_bytes(self) -> bytes: - return rust_x509.encode_extension_value(self) - - -class KeyUsage(ExtensionType): - oid = ExtensionOID.KEY_USAGE - - def __init__( - self, - digital_signature: bool, - content_commitment: bool, - key_encipherment: bool, - data_encipherment: bool, - key_agreement: bool, - key_cert_sign: bool, - crl_sign: bool, - encipher_only: bool, - decipher_only: bool, - ) -> None: - if not key_agreement and (encipher_only or decipher_only): - raise ValueError( - "encipher_only and decipher_only can only be true when " - "key_agreement is true" - ) - - self._digital_signature = digital_signature - self._content_commitment = content_commitment - self._key_encipherment = key_encipherment - self._data_encipherment = data_encipherment - self._key_agreement = key_agreement - self._key_cert_sign = key_cert_sign - self._crl_sign = crl_sign - self._encipher_only = encipher_only - self._decipher_only = decipher_only - - @property - def digital_signature(self) -> bool: - return self._digital_signature - - @property - def content_commitment(self) -> bool: - return self._content_commitment - - @property - def key_encipherment(self) -> bool: - return self._key_encipherment - - @property - def data_encipherment(self) -> bool: - return self._data_encipherment - - @property - def key_agreement(self) -> bool: - return self._key_agreement - - @property - def key_cert_sign(self) -> bool: - return self._key_cert_sign - - @property - def crl_sign(self) -> bool: - return self._crl_sign - - @property - def encipher_only(self) -> bool: - if not self.key_agreement: - raise ValueError( - "encipher_only is undefined unless key_agreement is true" - ) - else: - return self._encipher_only - - @property - def decipher_only(self) -> bool: - if not self.key_agreement: - raise ValueError( - "decipher_only is undefined unless key_agreement is true" - ) - else: - return self._decipher_only - - def __repr__(self) -> str: - try: - encipher_only = self.encipher_only - decipher_only = self.decipher_only - except ValueError: - # Users found None confusing because even though encipher/decipher - # have no meaning unless key_agreement is true, to construct an - # instance of the class you still need to pass False. - encipher_only = False - decipher_only = False - - return ( - f"" - ) - - def __eq__(self, other: object) -> bool: - if not isinstance(other, KeyUsage): - return NotImplemented - - return ( - self.digital_signature == other.digital_signature - and self.content_commitment == other.content_commitment - and self.key_encipherment == other.key_encipherment - and self.data_encipherment == other.data_encipherment - and self.key_agreement == other.key_agreement - and self.key_cert_sign == other.key_cert_sign - and self.crl_sign == other.crl_sign - and self._encipher_only == other._encipher_only - and self._decipher_only == other._decipher_only - ) - - def __hash__(self) -> int: - return hash( - ( - self.digital_signature, - self.content_commitment, - self.key_encipherment, - self.data_encipherment, - self.key_agreement, - self.key_cert_sign, - self.crl_sign, - self._encipher_only, - self._decipher_only, - ) - ) - - def public_bytes(self) -> bytes: - return rust_x509.encode_extension_value(self) - - -class NameConstraints(ExtensionType): - oid = ExtensionOID.NAME_CONSTRAINTS - - def __init__( - self, - permitted_subtrees: typing.Iterable[GeneralName] | None, - excluded_subtrees: typing.Iterable[GeneralName] | None, - ) -> None: - if permitted_subtrees is not None: - permitted_subtrees = list(permitted_subtrees) - if not permitted_subtrees: - raise ValueError( - "permitted_subtrees must be a non-empty list or None" - ) - if not all(isinstance(x, GeneralName) for x in permitted_subtrees): - raise TypeError( - "permitted_subtrees must be a list of GeneralName objects " - "or None" - ) - - self._validate_tree(permitted_subtrees) - - if excluded_subtrees is not None: - excluded_subtrees = list(excluded_subtrees) - if not excluded_subtrees: - raise ValueError( - "excluded_subtrees must be a non-empty list or None" - ) - if not all(isinstance(x, GeneralName) for x in excluded_subtrees): - raise TypeError( - "excluded_subtrees must be a list of GeneralName objects " - "or None" - ) - - self._validate_tree(excluded_subtrees) - - if permitted_subtrees is None and excluded_subtrees is None: - raise ValueError( - "At least one of permitted_subtrees and excluded_subtrees " - "must not be None" - ) - - self._permitted_subtrees = permitted_subtrees - self._excluded_subtrees = excluded_subtrees - - def __eq__(self, other: object) -> bool: - if not isinstance(other, NameConstraints): - return NotImplemented - - return ( - self.excluded_subtrees == other.excluded_subtrees - and self.permitted_subtrees == other.permitted_subtrees - ) - - def _validate_tree(self, tree: typing.Iterable[GeneralName]) -> None: - self._validate_ip_name(tree) - self._validate_dns_name(tree) - - def _validate_ip_name(self, tree: typing.Iterable[GeneralName]) -> None: - if any( - isinstance(name, IPAddress) - and not isinstance( - name.value, (ipaddress.IPv4Network, ipaddress.IPv6Network) - ) - for name in tree - ): - raise TypeError( - "IPAddress name constraints must be an IPv4Network or" - " IPv6Network object" - ) - - def _validate_dns_name(self, tree: typing.Iterable[GeneralName]) -> None: - if any( - isinstance(name, DNSName) and "*" in name.value for name in tree - ): - raise ValueError( - "DNSName name constraints must not contain the '*' wildcard" - " character" - ) - - def __repr__(self) -> str: - return ( - f"" - ) - - def __hash__(self) -> int: - if self.permitted_subtrees is not None: - ps: tuple[GeneralName, ...] | None = tuple(self.permitted_subtrees) - else: - ps = None - - if self.excluded_subtrees is not None: - es: tuple[GeneralName, ...] | None = tuple(self.excluded_subtrees) - else: - es = None - - return hash((ps, es)) - - @property - def permitted_subtrees( - self, - ) -> list[GeneralName] | None: - return self._permitted_subtrees - - @property - def excluded_subtrees( - self, - ) -> list[GeneralName] | None: - return self._excluded_subtrees - - def public_bytes(self) -> bytes: - return rust_x509.encode_extension_value(self) - - -class Extension(typing.Generic[ExtensionTypeVar]): - def __init__( - self, oid: ObjectIdentifier, critical: bool, value: ExtensionTypeVar - ) -> None: - if not isinstance(oid, ObjectIdentifier): - raise TypeError( - "oid argument must be an ObjectIdentifier instance." - ) - - if not isinstance(critical, bool): - raise TypeError("critical must be a boolean value") - - self._oid = oid - self._critical = critical - self._value = value - - @property - def oid(self) -> ObjectIdentifier: - return self._oid - - @property - def critical(self) -> bool: - return self._critical - - @property - def value(self) -> ExtensionTypeVar: - return self._value - - def __repr__(self) -> str: - return ( - f"" - ) - - def __eq__(self, other: object) -> bool: - if not isinstance(other, Extension): - return NotImplemented - - return ( - self.oid == other.oid - and self.critical == other.critical - and self.value == other.value - ) - - def __hash__(self) -> int: - return hash((self.oid, self.critical, self.value)) - - -class GeneralNames: - def __init__(self, general_names: typing.Iterable[GeneralName]) -> None: - general_names = list(general_names) - if not all(isinstance(x, GeneralName) for x in general_names): - raise TypeError( - "Every item in the general_names list must be an " - "object conforming to the GeneralName interface" - ) - - self._general_names = general_names - - __len__, __iter__, __getitem__ = _make_sequence_methods("_general_names") - - @typing.overload - def get_values_for_type( - self, - type: type[DNSName] - | type[UniformResourceIdentifier] - | type[RFC822Name], - ) -> list[str]: ... - - @typing.overload - def get_values_for_type( - self, - type: type[DirectoryName], - ) -> list[Name]: ... - - @typing.overload - def get_values_for_type( - self, - type: type[RegisteredID], - ) -> list[ObjectIdentifier]: ... - - @typing.overload - def get_values_for_type( - self, type: type[IPAddress] - ) -> list[_IPAddressTypes]: ... - - @typing.overload - def get_values_for_type( - self, type: type[OtherName] - ) -> list[OtherName]: ... - - def get_values_for_type( - self, - type: type[DNSName] - | type[DirectoryName] - | type[IPAddress] - | type[OtherName] - | type[RFC822Name] - | type[RegisteredID] - | type[UniformResourceIdentifier], - ) -> ( - list[_IPAddressTypes] - | list[str] - | list[OtherName] - | list[Name] - | list[ObjectIdentifier] - ): - # Return the value of each GeneralName, except for OtherName instances - # which we return directly because it has two important properties not - # just one value. - objs = (i for i in self if isinstance(i, type)) - if type != OtherName: - return [i.value for i in objs] - return list(objs) - - def __repr__(self) -> str: - return f"" - - def __eq__(self, other: object) -> bool: - if not isinstance(other, GeneralNames): - return NotImplemented - - return self._general_names == other._general_names - - def __hash__(self) -> int: - return hash(tuple(self._general_names)) - - -class SubjectAlternativeName(ExtensionType): - oid = ExtensionOID.SUBJECT_ALTERNATIVE_NAME - - def __init__(self, general_names: typing.Iterable[GeneralName]) -> None: - self._general_names = GeneralNames(general_names) - - __len__, __iter__, __getitem__ = _make_sequence_methods("_general_names") - - @typing.overload - def get_values_for_type( - self, - type: type[DNSName] - | type[UniformResourceIdentifier] - | type[RFC822Name], - ) -> list[str]: ... - - @typing.overload - def get_values_for_type( - self, - type: type[DirectoryName], - ) -> list[Name]: ... - - @typing.overload - def get_values_for_type( - self, - type: type[RegisteredID], - ) -> list[ObjectIdentifier]: ... - - @typing.overload - def get_values_for_type( - self, type: type[IPAddress] - ) -> list[_IPAddressTypes]: ... - - @typing.overload - def get_values_for_type( - self, type: type[OtherName] - ) -> list[OtherName]: ... - - def get_values_for_type( - self, - type: type[DNSName] - | type[DirectoryName] - | type[IPAddress] - | type[OtherName] - | type[RFC822Name] - | type[RegisteredID] - | type[UniformResourceIdentifier], - ) -> ( - list[_IPAddressTypes] - | list[str] - | list[OtherName] - | list[Name] - | list[ObjectIdentifier] - ): - return self._general_names.get_values_for_type(type) - - def __repr__(self) -> str: - return f"" - - def __eq__(self, other: object) -> bool: - if not isinstance(other, SubjectAlternativeName): - return NotImplemented - - return self._general_names == other._general_names - - def __hash__(self) -> int: - return hash(self._general_names) - - def public_bytes(self) -> bytes: - return rust_x509.encode_extension_value(self) - - -class IssuerAlternativeName(ExtensionType): - oid = ExtensionOID.ISSUER_ALTERNATIVE_NAME - - def __init__(self, general_names: typing.Iterable[GeneralName]) -> None: - self._general_names = GeneralNames(general_names) - - __len__, __iter__, __getitem__ = _make_sequence_methods("_general_names") - - @typing.overload - def get_values_for_type( - self, - type: type[DNSName] - | type[UniformResourceIdentifier] - | type[RFC822Name], - ) -> list[str]: ... - - @typing.overload - def get_values_for_type( - self, - type: type[DirectoryName], - ) -> list[Name]: ... - - @typing.overload - def get_values_for_type( - self, - type: type[RegisteredID], - ) -> list[ObjectIdentifier]: ... - - @typing.overload - def get_values_for_type( - self, type: type[IPAddress] - ) -> list[_IPAddressTypes]: ... - - @typing.overload - def get_values_for_type( - self, type: type[OtherName] - ) -> list[OtherName]: ... - - def get_values_for_type( - self, - type: type[DNSName] - | type[DirectoryName] - | type[IPAddress] - | type[OtherName] - | type[RFC822Name] - | type[RegisteredID] - | type[UniformResourceIdentifier], - ) -> ( - list[_IPAddressTypes] - | list[str] - | list[OtherName] - | list[Name] - | list[ObjectIdentifier] - ): - return self._general_names.get_values_for_type(type) - - def __repr__(self) -> str: - return f"" - - def __eq__(self, other: object) -> bool: - if not isinstance(other, IssuerAlternativeName): - return NotImplemented - - return self._general_names == other._general_names - - def __hash__(self) -> int: - return hash(self._general_names) - - def public_bytes(self) -> bytes: - return rust_x509.encode_extension_value(self) - - -class CertificateIssuer(ExtensionType): - oid = CRLEntryExtensionOID.CERTIFICATE_ISSUER - - def __init__(self, general_names: typing.Iterable[GeneralName]) -> None: - self._general_names = GeneralNames(general_names) - - __len__, __iter__, __getitem__ = _make_sequence_methods("_general_names") - - @typing.overload - def get_values_for_type( - self, - type: type[DNSName] - | type[UniformResourceIdentifier] - | type[RFC822Name], - ) -> list[str]: ... - - @typing.overload - def get_values_for_type( - self, - type: type[DirectoryName], - ) -> list[Name]: ... - - @typing.overload - def get_values_for_type( - self, - type: type[RegisteredID], - ) -> list[ObjectIdentifier]: ... - - @typing.overload - def get_values_for_type( - self, type: type[IPAddress] - ) -> list[_IPAddressTypes]: ... - - @typing.overload - def get_values_for_type( - self, type: type[OtherName] - ) -> list[OtherName]: ... - - def get_values_for_type( - self, - type: type[DNSName] - | type[DirectoryName] - | type[IPAddress] - | type[OtherName] - | type[RFC822Name] - | type[RegisteredID] - | type[UniformResourceIdentifier], - ) -> ( - list[_IPAddressTypes] - | list[str] - | list[OtherName] - | list[Name] - | list[ObjectIdentifier] - ): - return self._general_names.get_values_for_type(type) - - def __repr__(self) -> str: - return f"" - - def __eq__(self, other: object) -> bool: - if not isinstance(other, CertificateIssuer): - return NotImplemented - - return self._general_names == other._general_names - - def __hash__(self) -> int: - return hash(self._general_names) - - def public_bytes(self) -> bytes: - return rust_x509.encode_extension_value(self) - - -class CRLReason(ExtensionType): - oid = CRLEntryExtensionOID.CRL_REASON - - def __init__(self, reason: ReasonFlags) -> None: - if not isinstance(reason, ReasonFlags): - raise TypeError("reason must be an element from ReasonFlags") - - self._reason = reason - - def __repr__(self) -> str: - return f"" - - def __eq__(self, other: object) -> bool: - if not isinstance(other, CRLReason): - return NotImplemented - - return self.reason == other.reason - - def __hash__(self) -> int: - return hash(self.reason) - - @property - def reason(self) -> ReasonFlags: - return self._reason - - def public_bytes(self) -> bytes: - return rust_x509.encode_extension_value(self) - - -class InvalidityDate(ExtensionType): - oid = CRLEntryExtensionOID.INVALIDITY_DATE - - def __init__(self, invalidity_date: datetime.datetime) -> None: - if not isinstance(invalidity_date, datetime.datetime): - raise TypeError("invalidity_date must be a datetime.datetime") - - self._invalidity_date = invalidity_date - - def __repr__(self) -> str: - return f"" - - def __eq__(self, other: object) -> bool: - if not isinstance(other, InvalidityDate): - return NotImplemented - - return self.invalidity_date == other.invalidity_date - - def __hash__(self) -> int: - return hash(self.invalidity_date) - - @property - def invalidity_date(self) -> datetime.datetime: - return self._invalidity_date - - @property - def invalidity_date_utc(self) -> datetime.datetime: - if self._invalidity_date.tzinfo is None: - return self._invalidity_date.replace(tzinfo=datetime.timezone.utc) - else: - return self._invalidity_date.astimezone(tz=datetime.timezone.utc) - - def public_bytes(self) -> bytes: - return rust_x509.encode_extension_value(self) - - -class PrecertificateSignedCertificateTimestamps(ExtensionType): - oid = ExtensionOID.PRECERT_SIGNED_CERTIFICATE_TIMESTAMPS - - def __init__( - self, - signed_certificate_timestamps: typing.Iterable[ - SignedCertificateTimestamp - ], - ) -> None: - signed_certificate_timestamps = list(signed_certificate_timestamps) - if not all( - isinstance(sct, SignedCertificateTimestamp) - for sct in signed_certificate_timestamps - ): - raise TypeError( - "Every item in the signed_certificate_timestamps list must be " - "a SignedCertificateTimestamp" - ) - self._signed_certificate_timestamps = signed_certificate_timestamps - - __len__, __iter__, __getitem__ = _make_sequence_methods( - "_signed_certificate_timestamps" - ) - - def __repr__(self) -> str: - return f"" - - def __hash__(self) -> int: - return hash(tuple(self._signed_certificate_timestamps)) - - def __eq__(self, other: object) -> bool: - if not isinstance(other, PrecertificateSignedCertificateTimestamps): - return NotImplemented - - return ( - self._signed_certificate_timestamps - == other._signed_certificate_timestamps - ) - - def public_bytes(self) -> bytes: - return rust_x509.encode_extension_value(self) - - -class SignedCertificateTimestamps(ExtensionType): - oid = ExtensionOID.SIGNED_CERTIFICATE_TIMESTAMPS - - def __init__( - self, - signed_certificate_timestamps: typing.Iterable[ - SignedCertificateTimestamp - ], - ) -> None: - signed_certificate_timestamps = list(signed_certificate_timestamps) - if not all( - isinstance(sct, SignedCertificateTimestamp) - for sct in signed_certificate_timestamps - ): - raise TypeError( - "Every item in the signed_certificate_timestamps list must be " - "a SignedCertificateTimestamp" - ) - self._signed_certificate_timestamps = signed_certificate_timestamps - - __len__, __iter__, __getitem__ = _make_sequence_methods( - "_signed_certificate_timestamps" - ) - - def __repr__(self) -> str: - return f"" - - def __hash__(self) -> int: - return hash(tuple(self._signed_certificate_timestamps)) - - def __eq__(self, other: object) -> bool: - if not isinstance(other, SignedCertificateTimestamps): - return NotImplemented - - return ( - self._signed_certificate_timestamps - == other._signed_certificate_timestamps - ) - - def public_bytes(self) -> bytes: - return rust_x509.encode_extension_value(self) - - -class OCSPNonce(ExtensionType): - oid = OCSPExtensionOID.NONCE - - def __init__(self, nonce: bytes) -> None: - if not isinstance(nonce, bytes): - raise TypeError("nonce must be bytes") - - self._nonce = nonce - - def __eq__(self, other: object) -> bool: - if not isinstance(other, OCSPNonce): - return NotImplemented - - return self.nonce == other.nonce - - def __hash__(self) -> int: - return hash(self.nonce) - - def __repr__(self) -> str: - return f"" - - @property - def nonce(self) -> bytes: - return self._nonce - - def public_bytes(self) -> bytes: - return rust_x509.encode_extension_value(self) - - -class OCSPAcceptableResponses(ExtensionType): - oid = OCSPExtensionOID.ACCEPTABLE_RESPONSES - - def __init__(self, responses: typing.Iterable[ObjectIdentifier]) -> None: - responses = list(responses) - if any(not isinstance(r, ObjectIdentifier) for r in responses): - raise TypeError("All responses must be ObjectIdentifiers") - - self._responses = responses - - def __eq__(self, other: object) -> bool: - if not isinstance(other, OCSPAcceptableResponses): - return NotImplemented - - return self._responses == other._responses - - def __hash__(self) -> int: - return hash(tuple(self._responses)) - - def __repr__(self) -> str: - return f"" - - def __iter__(self) -> typing.Iterator[ObjectIdentifier]: - return iter(self._responses) - - def public_bytes(self) -> bytes: - return rust_x509.encode_extension_value(self) - - -class IssuingDistributionPoint(ExtensionType): - oid = ExtensionOID.ISSUING_DISTRIBUTION_POINT - - def __init__( - self, - full_name: typing.Iterable[GeneralName] | None, - relative_name: RelativeDistinguishedName | None, - only_contains_user_certs: bool, - only_contains_ca_certs: bool, - only_some_reasons: frozenset[ReasonFlags] | None, - indirect_crl: bool, - only_contains_attribute_certs: bool, - ) -> None: - if full_name is not None: - full_name = list(full_name) - - if only_some_reasons and ( - not isinstance(only_some_reasons, frozenset) - or not all(isinstance(x, ReasonFlags) for x in only_some_reasons) - ): - raise TypeError( - "only_some_reasons must be None or frozenset of ReasonFlags" - ) - - if only_some_reasons and ( - ReasonFlags.unspecified in only_some_reasons - or ReasonFlags.remove_from_crl in only_some_reasons - ): - raise ValueError( - "unspecified and remove_from_crl are not valid reasons in an " - "IssuingDistributionPoint" - ) - - if not ( - isinstance(only_contains_user_certs, bool) - and isinstance(only_contains_ca_certs, bool) - and isinstance(indirect_crl, bool) - and isinstance(only_contains_attribute_certs, bool) - ): - raise TypeError( - "only_contains_user_certs, only_contains_ca_certs, " - "indirect_crl and only_contains_attribute_certs " - "must all be boolean." - ) - - crl_constraints = [ - only_contains_user_certs, - only_contains_ca_certs, - indirect_crl, - only_contains_attribute_certs, - ] - - if len([x for x in crl_constraints if x]) > 1: - raise ValueError( - "Only one of the following can be set to True: " - "only_contains_user_certs, only_contains_ca_certs, " - "indirect_crl, only_contains_attribute_certs" - ) - - if not any( - [ - only_contains_user_certs, - only_contains_ca_certs, - indirect_crl, - only_contains_attribute_certs, - full_name, - relative_name, - only_some_reasons, - ] - ): - raise ValueError( - "Cannot create empty extension: " - "if only_contains_user_certs, only_contains_ca_certs, " - "indirect_crl, and only_contains_attribute_certs are all False" - ", then either full_name, relative_name, or only_some_reasons " - "must have a value." - ) - - self._only_contains_user_certs = only_contains_user_certs - self._only_contains_ca_certs = only_contains_ca_certs - self._indirect_crl = indirect_crl - self._only_contains_attribute_certs = only_contains_attribute_certs - self._only_some_reasons = only_some_reasons - self._full_name = full_name - self._relative_name = relative_name - - def __repr__(self) -> str: - return ( - f"" - ) - - def __eq__(self, other: object) -> bool: - if not isinstance(other, IssuingDistributionPoint): - return NotImplemented - - return ( - self.full_name == other.full_name - and self.relative_name == other.relative_name - and self.only_contains_user_certs == other.only_contains_user_certs - and self.only_contains_ca_certs == other.only_contains_ca_certs - and self.only_some_reasons == other.only_some_reasons - and self.indirect_crl == other.indirect_crl - and self.only_contains_attribute_certs - == other.only_contains_attribute_certs - ) - - def __hash__(self) -> int: - return hash( - ( - self.full_name, - self.relative_name, - self.only_contains_user_certs, - self.only_contains_ca_certs, - self.only_some_reasons, - self.indirect_crl, - self.only_contains_attribute_certs, - ) - ) - - @property - def full_name(self) -> list[GeneralName] | None: - return self._full_name - - @property - def relative_name(self) -> RelativeDistinguishedName | None: - return self._relative_name - - @property - def only_contains_user_certs(self) -> bool: - return self._only_contains_user_certs - - @property - def only_contains_ca_certs(self) -> bool: - return self._only_contains_ca_certs - - @property - def only_some_reasons( - self, - ) -> frozenset[ReasonFlags] | None: - return self._only_some_reasons - - @property - def indirect_crl(self) -> bool: - return self._indirect_crl - - @property - def only_contains_attribute_certs(self) -> bool: - return self._only_contains_attribute_certs - - def public_bytes(self) -> bytes: - return rust_x509.encode_extension_value(self) - - -class MSCertificateTemplate(ExtensionType): - oid = ExtensionOID.MS_CERTIFICATE_TEMPLATE - - def __init__( - self, - template_id: ObjectIdentifier, - major_version: int | None, - minor_version: int | None, - ) -> None: - if not isinstance(template_id, ObjectIdentifier): - raise TypeError("oid must be an ObjectIdentifier") - self._template_id = template_id - if ( - major_version is not None and not isinstance(major_version, int) - ) or ( - minor_version is not None and not isinstance(minor_version, int) - ): - raise TypeError( - "major_version and minor_version must be integers or None" - ) - self._major_version = major_version - self._minor_version = minor_version - - @property - def template_id(self) -> ObjectIdentifier: - return self._template_id - - @property - def major_version(self) -> int | None: - return self._major_version - - @property - def minor_version(self) -> int | None: - return self._minor_version - - def __repr__(self) -> str: - return ( - f"" - ) - - def __eq__(self, other: object) -> bool: - if not isinstance(other, MSCertificateTemplate): - return NotImplemented - - return ( - self.template_id == other.template_id - and self.major_version == other.major_version - and self.minor_version == other.minor_version - ) - - def __hash__(self) -> int: - return hash((self.template_id, self.major_version, self.minor_version)) - - def public_bytes(self) -> bytes: - return rust_x509.encode_extension_value(self) - - -class UnrecognizedExtension(ExtensionType): - def __init__(self, oid: ObjectIdentifier, value: bytes) -> None: - if not isinstance(oid, ObjectIdentifier): - raise TypeError("oid must be an ObjectIdentifier") - self._oid = oid - self._value = value - - @property - def oid(self) -> ObjectIdentifier: # type: ignore[override] - return self._oid - - @property - def value(self) -> bytes: - return self._value - - def __repr__(self) -> str: - return ( - f"" - ) - - def __eq__(self, other: object) -> bool: - if not isinstance(other, UnrecognizedExtension): - return NotImplemented - - return self.oid == other.oid and self.value == other.value - - def __hash__(self) -> int: - return hash((self.oid, self.value)) - - def public_bytes(self) -> bytes: - return self.value diff --git a/env/lib/python3.10/site-packages/cryptography/x509/general_name.py b/env/lib/python3.10/site-packages/cryptography/x509/general_name.py deleted file mode 100644 index 672f287..0000000 --- a/env/lib/python3.10/site-packages/cryptography/x509/general_name.py +++ /dev/null @@ -1,281 +0,0 @@ -# This file is dual licensed under the terms of the Apache License, Version -# 2.0, and the BSD License. See the LICENSE file in the root of this repository -# for complete details. - -from __future__ import annotations - -import abc -import ipaddress -import typing -from email.utils import parseaddr - -from cryptography.x509.name import Name -from cryptography.x509.oid import ObjectIdentifier - -_IPAddressTypes = typing.Union[ - ipaddress.IPv4Address, - ipaddress.IPv6Address, - ipaddress.IPv4Network, - ipaddress.IPv6Network, -] - - -class UnsupportedGeneralNameType(Exception): - pass - - -class GeneralName(metaclass=abc.ABCMeta): - @property - @abc.abstractmethod - def value(self) -> typing.Any: - """ - Return the value of the object - """ - - -class RFC822Name(GeneralName): - def __init__(self, value: str) -> None: - if isinstance(value, str): - try: - value.encode("ascii") - except UnicodeEncodeError: - raise ValueError( - "RFC822Name values should be passed as an A-label string. " - "This means unicode characters should be encoded via " - "a library like idna." - ) - else: - raise TypeError("value must be string") - - name, address = parseaddr(value) - if name or not address: - # parseaddr has found a name (e.g. Name ) or the entire - # value is an empty string. - raise ValueError("Invalid rfc822name value") - - self._value = value - - @property - def value(self) -> str: - return self._value - - @classmethod - def _init_without_validation(cls, value: str) -> RFC822Name: - instance = cls.__new__(cls) - instance._value = value - return instance - - def __repr__(self) -> str: - return f"" - - def __eq__(self, other: object) -> bool: - if not isinstance(other, RFC822Name): - return NotImplemented - - return self.value == other.value - - def __hash__(self) -> int: - return hash(self.value) - - -class DNSName(GeneralName): - def __init__(self, value: str) -> None: - if isinstance(value, str): - try: - value.encode("ascii") - except UnicodeEncodeError: - raise ValueError( - "DNSName values should be passed as an A-label string. " - "This means unicode characters should be encoded via " - "a library like idna." - ) - else: - raise TypeError("value must be string") - - self._value = value - - @property - def value(self) -> str: - return self._value - - @classmethod - def _init_without_validation(cls, value: str) -> DNSName: - instance = cls.__new__(cls) - instance._value = value - return instance - - def __repr__(self) -> str: - return f"" - - def __eq__(self, other: object) -> bool: - if not isinstance(other, DNSName): - return NotImplemented - - return self.value == other.value - - def __hash__(self) -> int: - return hash(self.value) - - -class UniformResourceIdentifier(GeneralName): - def __init__(self, value: str) -> None: - if isinstance(value, str): - try: - value.encode("ascii") - except UnicodeEncodeError: - raise ValueError( - "URI values should be passed as an A-label string. " - "This means unicode characters should be encoded via " - "a library like idna." - ) - else: - raise TypeError("value must be string") - - self._value = value - - @property - def value(self) -> str: - return self._value - - @classmethod - def _init_without_validation(cls, value: str) -> UniformResourceIdentifier: - instance = cls.__new__(cls) - instance._value = value - return instance - - def __repr__(self) -> str: - return f"" - - def __eq__(self, other: object) -> bool: - if not isinstance(other, UniformResourceIdentifier): - return NotImplemented - - return self.value == other.value - - def __hash__(self) -> int: - return hash(self.value) - - -class DirectoryName(GeneralName): - def __init__(self, value: Name) -> None: - if not isinstance(value, Name): - raise TypeError("value must be a Name") - - self._value = value - - @property - def value(self) -> Name: - return self._value - - def __repr__(self) -> str: - return f"" - - def __eq__(self, other: object) -> bool: - if not isinstance(other, DirectoryName): - return NotImplemented - - return self.value == other.value - - def __hash__(self) -> int: - return hash(self.value) - - -class RegisteredID(GeneralName): - def __init__(self, value: ObjectIdentifier) -> None: - if not isinstance(value, ObjectIdentifier): - raise TypeError("value must be an ObjectIdentifier") - - self._value = value - - @property - def value(self) -> ObjectIdentifier: - return self._value - - def __repr__(self) -> str: - return f"" - - def __eq__(self, other: object) -> bool: - if not isinstance(other, RegisteredID): - return NotImplemented - - return self.value == other.value - - def __hash__(self) -> int: - return hash(self.value) - - -class IPAddress(GeneralName): - def __init__(self, value: _IPAddressTypes) -> None: - if not isinstance( - value, - ( - ipaddress.IPv4Address, - ipaddress.IPv6Address, - ipaddress.IPv4Network, - ipaddress.IPv6Network, - ), - ): - raise TypeError( - "value must be an instance of ipaddress.IPv4Address, " - "ipaddress.IPv6Address, ipaddress.IPv4Network, or " - "ipaddress.IPv6Network" - ) - - self._value = value - - @property - def value(self) -> _IPAddressTypes: - return self._value - - def _packed(self) -> bytes: - if isinstance( - self.value, (ipaddress.IPv4Address, ipaddress.IPv6Address) - ): - return self.value.packed - else: - return ( - self.value.network_address.packed + self.value.netmask.packed - ) - - def __repr__(self) -> str: - return f"" - - def __eq__(self, other: object) -> bool: - if not isinstance(other, IPAddress): - return NotImplemented - - return self.value == other.value - - def __hash__(self) -> int: - return hash(self.value) - - -class OtherName(GeneralName): - def __init__(self, type_id: ObjectIdentifier, value: bytes) -> None: - if not isinstance(type_id, ObjectIdentifier): - raise TypeError("type_id must be an ObjectIdentifier") - if not isinstance(value, bytes): - raise TypeError("value must be a binary string") - - self._type_id = type_id - self._value = value - - @property - def type_id(self) -> ObjectIdentifier: - return self._type_id - - @property - def value(self) -> bytes: - return self._value - - def __repr__(self) -> str: - return f"" - - def __eq__(self, other: object) -> bool: - if not isinstance(other, OtherName): - return NotImplemented - - return self.type_id == other.type_id and self.value == other.value - - def __hash__(self) -> int: - return hash((self.type_id, self.value)) diff --git a/env/lib/python3.10/site-packages/cryptography/x509/name.py b/env/lib/python3.10/site-packages/cryptography/x509/name.py deleted file mode 100644 index 1b6b89d..0000000 --- a/env/lib/python3.10/site-packages/cryptography/x509/name.py +++ /dev/null @@ -1,465 +0,0 @@ -# This file is dual licensed under the terms of the Apache License, Version -# 2.0, and the BSD License. See the LICENSE file in the root of this repository -# for complete details. - -from __future__ import annotations - -import binascii -import re -import sys -import typing -import warnings - -from cryptography import utils -from cryptography.hazmat.bindings._rust import x509 as rust_x509 -from cryptography.x509.oid import NameOID, ObjectIdentifier - - -class _ASN1Type(utils.Enum): - BitString = 3 - OctetString = 4 - UTF8String = 12 - NumericString = 18 - PrintableString = 19 - T61String = 20 - IA5String = 22 - UTCTime = 23 - GeneralizedTime = 24 - VisibleString = 26 - UniversalString = 28 - BMPString = 30 - - -_ASN1_TYPE_TO_ENUM = {i.value: i for i in _ASN1Type} -_NAMEOID_DEFAULT_TYPE: dict[ObjectIdentifier, _ASN1Type] = { - NameOID.COUNTRY_NAME: _ASN1Type.PrintableString, - NameOID.JURISDICTION_COUNTRY_NAME: _ASN1Type.PrintableString, - NameOID.SERIAL_NUMBER: _ASN1Type.PrintableString, - NameOID.DN_QUALIFIER: _ASN1Type.PrintableString, - NameOID.EMAIL_ADDRESS: _ASN1Type.IA5String, - NameOID.DOMAIN_COMPONENT: _ASN1Type.IA5String, -} - -# Type alias -_OidNameMap = typing.Mapping[ObjectIdentifier, str] -_NameOidMap = typing.Mapping[str, ObjectIdentifier] - -#: Short attribute names from RFC 4514: -#: https://tools.ietf.org/html/rfc4514#page-7 -_NAMEOID_TO_NAME: _OidNameMap = { - NameOID.COMMON_NAME: "CN", - NameOID.LOCALITY_NAME: "L", - NameOID.STATE_OR_PROVINCE_NAME: "ST", - NameOID.ORGANIZATION_NAME: "O", - NameOID.ORGANIZATIONAL_UNIT_NAME: "OU", - NameOID.COUNTRY_NAME: "C", - NameOID.STREET_ADDRESS: "STREET", - NameOID.DOMAIN_COMPONENT: "DC", - NameOID.USER_ID: "UID", -} -_NAME_TO_NAMEOID = {v: k for k, v in _NAMEOID_TO_NAME.items()} - -_NAMEOID_LENGTH_LIMIT = { - NameOID.COUNTRY_NAME: (2, 2), - NameOID.JURISDICTION_COUNTRY_NAME: (2, 2), - NameOID.COMMON_NAME: (1, 64), -} - - -def _escape_dn_value(val: str | bytes) -> str: - """Escape special characters in RFC4514 Distinguished Name value.""" - - if not val: - return "" - - # RFC 4514 Section 2.4 defines the value as being the # (U+0023) character - # followed by the hexadecimal encoding of the octets. - if isinstance(val, bytes): - return "#" + binascii.hexlify(val).decode("utf8") - - # See https://tools.ietf.org/html/rfc4514#section-2.4 - val = val.replace("\\", "\\\\") - val = val.replace('"', '\\"') - val = val.replace("+", "\\+") - val = val.replace(",", "\\,") - val = val.replace(";", "\\;") - val = val.replace("<", "\\<") - val = val.replace(">", "\\>") - val = val.replace("\0", "\\00") - - if val[0] in ("#", " "): - val = "\\" + val - if val[-1] == " ": - val = val[:-1] + "\\ " - - return val - - -def _unescape_dn_value(val: str) -> str: - if not val: - return "" - - # See https://tools.ietf.org/html/rfc4514#section-3 - - # special = escaped / SPACE / SHARP / EQUALS - # escaped = DQUOTE / PLUS / COMMA / SEMI / LANGLE / RANGLE - def sub(m): - val = m.group(1) - # Regular escape - if len(val) == 1: - return val - # Hex-value scape - return chr(int(val, 16)) - - return _RFC4514NameParser._PAIR_RE.sub(sub, val) - - -class NameAttribute: - def __init__( - self, - oid: ObjectIdentifier, - value: str | bytes, - _type: _ASN1Type | None = None, - *, - _validate: bool = True, - ) -> None: - if not isinstance(oid, ObjectIdentifier): - raise TypeError( - "oid argument must be an ObjectIdentifier instance." - ) - if _type == _ASN1Type.BitString: - if oid != NameOID.X500_UNIQUE_IDENTIFIER: - raise TypeError( - "oid must be X500_UNIQUE_IDENTIFIER for BitString type." - ) - if not isinstance(value, bytes): - raise TypeError("value must be bytes for BitString") - else: - if not isinstance(value, str): - raise TypeError("value argument must be a str") - - length_limits = _NAMEOID_LENGTH_LIMIT.get(oid) - if length_limits is not None: - min_length, max_length = length_limits - assert isinstance(value, str) - c_len = len(value.encode("utf8")) - if c_len < min_length or c_len > max_length: - msg = ( - f"Attribute's length must be >= {min_length} and " - f"<= {max_length}, but it was {c_len}" - ) - if _validate is True: - raise ValueError(msg) - else: - warnings.warn(msg, stacklevel=2) - - # The appropriate ASN1 string type varies by OID and is defined across - # multiple RFCs including 2459, 3280, and 5280. In general UTF8String - # is preferred (2459), but 3280 and 5280 specify several OIDs with - # alternate types. This means when we see the sentinel value we need - # to look up whether the OID has a non-UTF8 type. If it does, set it - # to that. Otherwise, UTF8! - if _type is None: - _type = _NAMEOID_DEFAULT_TYPE.get(oid, _ASN1Type.UTF8String) - - if not isinstance(_type, _ASN1Type): - raise TypeError("_type must be from the _ASN1Type enum") - - self._oid = oid - self._value = value - self._type = _type - - @property - def oid(self) -> ObjectIdentifier: - return self._oid - - @property - def value(self) -> str | bytes: - return self._value - - @property - def rfc4514_attribute_name(self) -> str: - """ - The short attribute name (for example "CN") if available, - otherwise the OID dotted string. - """ - return _NAMEOID_TO_NAME.get(self.oid, self.oid.dotted_string) - - def rfc4514_string( - self, attr_name_overrides: _OidNameMap | None = None - ) -> str: - """ - Format as RFC4514 Distinguished Name string. - - Use short attribute name if available, otherwise fall back to OID - dotted string. - """ - attr_name = ( - attr_name_overrides.get(self.oid) if attr_name_overrides else None - ) - if attr_name is None: - attr_name = self.rfc4514_attribute_name - - return f"{attr_name}={_escape_dn_value(self.value)}" - - def __eq__(self, other: object) -> bool: - if not isinstance(other, NameAttribute): - return NotImplemented - - return self.oid == other.oid and self.value == other.value - - def __hash__(self) -> int: - return hash((self.oid, self.value)) - - def __repr__(self) -> str: - return f"" - - -class RelativeDistinguishedName: - def __init__(self, attributes: typing.Iterable[NameAttribute]): - attributes = list(attributes) - if not attributes: - raise ValueError("a relative distinguished name cannot be empty") - if not all(isinstance(x, NameAttribute) for x in attributes): - raise TypeError("attributes must be an iterable of NameAttribute") - - # Keep list and frozenset to preserve attribute order where it matters - self._attributes = attributes - self._attribute_set = frozenset(attributes) - - if len(self._attribute_set) != len(attributes): - raise ValueError("duplicate attributes are not allowed") - - def get_attributes_for_oid( - self, oid: ObjectIdentifier - ) -> list[NameAttribute]: - return [i for i in self if i.oid == oid] - - def rfc4514_string( - self, attr_name_overrides: _OidNameMap | None = None - ) -> str: - """ - Format as RFC4514 Distinguished Name string. - - Within each RDN, attributes are joined by '+', although that is rarely - used in certificates. - """ - return "+".join( - attr.rfc4514_string(attr_name_overrides) - for attr in self._attributes - ) - - def __eq__(self, other: object) -> bool: - if not isinstance(other, RelativeDistinguishedName): - return NotImplemented - - return self._attribute_set == other._attribute_set - - def __hash__(self) -> int: - return hash(self._attribute_set) - - def __iter__(self) -> typing.Iterator[NameAttribute]: - return iter(self._attributes) - - def __len__(self) -> int: - return len(self._attributes) - - def __repr__(self) -> str: - return f"" - - -class Name: - @typing.overload - def __init__(self, attributes: typing.Iterable[NameAttribute]) -> None: ... - - @typing.overload - def __init__( - self, attributes: typing.Iterable[RelativeDistinguishedName] - ) -> None: ... - - def __init__( - self, - attributes: typing.Iterable[NameAttribute | RelativeDistinguishedName], - ) -> None: - attributes = list(attributes) - if all(isinstance(x, NameAttribute) for x in attributes): - self._attributes = [ - RelativeDistinguishedName([typing.cast(NameAttribute, x)]) - for x in attributes - ] - elif all(isinstance(x, RelativeDistinguishedName) for x in attributes): - self._attributes = typing.cast( - typing.List[RelativeDistinguishedName], attributes - ) - else: - raise TypeError( - "attributes must be a list of NameAttribute" - " or a list RelativeDistinguishedName" - ) - - @classmethod - def from_rfc4514_string( - cls, - data: str, - attr_name_overrides: _NameOidMap | None = None, - ) -> Name: - return _RFC4514NameParser(data, attr_name_overrides or {}).parse() - - def rfc4514_string( - self, attr_name_overrides: _OidNameMap | None = None - ) -> str: - """ - Format as RFC4514 Distinguished Name string. - For example 'CN=foobar.com,O=Foo Corp,C=US' - - An X.509 name is a two-level structure: a list of sets of attributes. - Each list element is separated by ',' and within each list element, set - elements are separated by '+'. The latter is almost never used in - real world certificates. According to RFC4514 section 2.1 the - RDNSequence must be reversed when converting to string representation. - """ - return ",".join( - attr.rfc4514_string(attr_name_overrides) - for attr in reversed(self._attributes) - ) - - def get_attributes_for_oid( - self, oid: ObjectIdentifier - ) -> list[NameAttribute]: - return [i for i in self if i.oid == oid] - - @property - def rdns(self) -> list[RelativeDistinguishedName]: - return self._attributes - - def public_bytes(self, backend: typing.Any = None) -> bytes: - return rust_x509.encode_name_bytes(self) - - def __eq__(self, other: object) -> bool: - if not isinstance(other, Name): - return NotImplemented - - return self._attributes == other._attributes - - def __hash__(self) -> int: - # TODO: this is relatively expensive, if this looks like a bottleneck - # for you, consider optimizing! - return hash(tuple(self._attributes)) - - def __iter__(self) -> typing.Iterator[NameAttribute]: - for rdn in self._attributes: - yield from rdn - - def __len__(self) -> int: - return sum(len(rdn) for rdn in self._attributes) - - def __repr__(self) -> str: - rdns = ",".join(attr.rfc4514_string() for attr in self._attributes) - return f"" - - -class _RFC4514NameParser: - _OID_RE = re.compile(r"(0|([1-9]\d*))(\.(0|([1-9]\d*)))+") - _DESCR_RE = re.compile(r"[a-zA-Z][a-zA-Z\d-]*") - - _PAIR = r"\\([\\ #=\"\+,;<>]|[\da-zA-Z]{2})" - _PAIR_RE = re.compile(_PAIR) - _LUTF1 = r"[\x01-\x1f\x21\x24-\x2A\x2D-\x3A\x3D\x3F-\x5B\x5D-\x7F]" - _SUTF1 = r"[\x01-\x21\x23-\x2A\x2D-\x3A\x3D\x3F-\x5B\x5D-\x7F]" - _TUTF1 = r"[\x01-\x1F\x21\x23-\x2A\x2D-\x3A\x3D\x3F-\x5B\x5D-\x7F]" - _UTFMB = rf"[\x80-{chr(sys.maxunicode)}]" - _LEADCHAR = rf"{_LUTF1}|{_UTFMB}" - _STRINGCHAR = rf"{_SUTF1}|{_UTFMB}" - _TRAILCHAR = rf"{_TUTF1}|{_UTFMB}" - _STRING_RE = re.compile( - rf""" - ( - ({_LEADCHAR}|{_PAIR}) - ( - ({_STRINGCHAR}|{_PAIR})* - ({_TRAILCHAR}|{_PAIR}) - )? - )? - """, - re.VERBOSE, - ) - _HEXSTRING_RE = re.compile(r"#([\da-zA-Z]{2})+") - - def __init__(self, data: str, attr_name_overrides: _NameOidMap) -> None: - self._data = data - self._idx = 0 - - self._attr_name_overrides = attr_name_overrides - - def _has_data(self) -> bool: - return self._idx < len(self._data) - - def _peek(self) -> str | None: - if self._has_data(): - return self._data[self._idx] - return None - - def _read_char(self, ch: str) -> None: - if self._peek() != ch: - raise ValueError - self._idx += 1 - - def _read_re(self, pat) -> str: - match = pat.match(self._data, pos=self._idx) - if match is None: - raise ValueError - val = match.group() - self._idx += len(val) - return val - - def parse(self) -> Name: - """ - Parses the `data` string and converts it to a Name. - - According to RFC4514 section 2.1 the RDNSequence must be - reversed when converting to string representation. So, when - we parse it, we need to reverse again to get the RDNs on the - correct order. - """ - - if not self._has_data(): - return Name([]) - - rdns = [self._parse_rdn()] - - while self._has_data(): - self._read_char(",") - rdns.append(self._parse_rdn()) - - return Name(reversed(rdns)) - - def _parse_rdn(self) -> RelativeDistinguishedName: - nas = [self._parse_na()] - while self._peek() == "+": - self._read_char("+") - nas.append(self._parse_na()) - - return RelativeDistinguishedName(nas) - - def _parse_na(self) -> NameAttribute: - try: - oid_value = self._read_re(self._OID_RE) - except ValueError: - name = self._read_re(self._DESCR_RE) - oid = self._attr_name_overrides.get( - name, _NAME_TO_NAMEOID.get(name) - ) - if oid is None: - raise ValueError - else: - oid = ObjectIdentifier(oid_value) - - self._read_char("=") - if self._peek() == "#": - value = self._read_re(self._HEXSTRING_RE) - value = binascii.unhexlify(value[1:]).decode() - else: - raw_value = self._read_re(self._STRING_RE) - value = _unescape_dn_value(raw_value) - - return NameAttribute(oid, value) diff --git a/env/lib/python3.10/site-packages/cryptography/x509/ocsp.py b/env/lib/python3.10/site-packages/cryptography/x509/ocsp.py deleted file mode 100644 index dbb475d..0000000 --- a/env/lib/python3.10/site-packages/cryptography/x509/ocsp.py +++ /dev/null @@ -1,678 +0,0 @@ -# This file is dual licensed under the terms of the Apache License, Version -# 2.0, and the BSD License. See the LICENSE file in the root of this repository -# for complete details. - -from __future__ import annotations - -import abc -import datetime -import typing - -from cryptography import utils, x509 -from cryptography.hazmat.bindings._rust import ocsp -from cryptography.hazmat.primitives import hashes, serialization -from cryptography.hazmat.primitives.asymmetric.types import ( - CertificateIssuerPrivateKeyTypes, -) -from cryptography.x509.base import ( - _EARLIEST_UTC_TIME, - _convert_to_naive_utc_time, - _reject_duplicate_extension, -) - - -class OCSPResponderEncoding(utils.Enum): - HASH = "By Hash" - NAME = "By Name" - - -class OCSPResponseStatus(utils.Enum): - SUCCESSFUL = 0 - MALFORMED_REQUEST = 1 - INTERNAL_ERROR = 2 - TRY_LATER = 3 - SIG_REQUIRED = 5 - UNAUTHORIZED = 6 - - -_ALLOWED_HASHES = ( - hashes.SHA1, - hashes.SHA224, - hashes.SHA256, - hashes.SHA384, - hashes.SHA512, -) - - -def _verify_algorithm(algorithm: hashes.HashAlgorithm) -> None: - if not isinstance(algorithm, _ALLOWED_HASHES): - raise ValueError( - "Algorithm must be SHA1, SHA224, SHA256, SHA384, or SHA512" - ) - - -class OCSPCertStatus(utils.Enum): - GOOD = 0 - REVOKED = 1 - UNKNOWN = 2 - - -class _SingleResponse: - def __init__( - self, - cert: x509.Certificate, - issuer: x509.Certificate, - algorithm: hashes.HashAlgorithm, - cert_status: OCSPCertStatus, - this_update: datetime.datetime, - next_update: datetime.datetime | None, - revocation_time: datetime.datetime | None, - revocation_reason: x509.ReasonFlags | None, - ): - if not isinstance(cert, x509.Certificate) or not isinstance( - issuer, x509.Certificate - ): - raise TypeError("cert and issuer must be a Certificate") - - _verify_algorithm(algorithm) - if not isinstance(this_update, datetime.datetime): - raise TypeError("this_update must be a datetime object") - if next_update is not None and not isinstance( - next_update, datetime.datetime - ): - raise TypeError("next_update must be a datetime object or None") - - self._cert = cert - self._issuer = issuer - self._algorithm = algorithm - self._this_update = this_update - self._next_update = next_update - - if not isinstance(cert_status, OCSPCertStatus): - raise TypeError( - "cert_status must be an item from the OCSPCertStatus enum" - ) - if cert_status is not OCSPCertStatus.REVOKED: - if revocation_time is not None: - raise ValueError( - "revocation_time can only be provided if the certificate " - "is revoked" - ) - if revocation_reason is not None: - raise ValueError( - "revocation_reason can only be provided if the certificate" - " is revoked" - ) - else: - if not isinstance(revocation_time, datetime.datetime): - raise TypeError("revocation_time must be a datetime object") - - revocation_time = _convert_to_naive_utc_time(revocation_time) - if revocation_time < _EARLIEST_UTC_TIME: - raise ValueError( - "The revocation_time must be on or after" - " 1950 January 1." - ) - - if revocation_reason is not None and not isinstance( - revocation_reason, x509.ReasonFlags - ): - raise TypeError( - "revocation_reason must be an item from the ReasonFlags " - "enum or None" - ) - - self._cert_status = cert_status - self._revocation_time = revocation_time - self._revocation_reason = revocation_reason - - -class OCSPRequest(metaclass=abc.ABCMeta): - @property - @abc.abstractmethod - def issuer_key_hash(self) -> bytes: - """ - The hash of the issuer public key - """ - - @property - @abc.abstractmethod - def issuer_name_hash(self) -> bytes: - """ - The hash of the issuer name - """ - - @property - @abc.abstractmethod - def hash_algorithm(self) -> hashes.HashAlgorithm: - """ - The hash algorithm used in the issuer name and key hashes - """ - - @property - @abc.abstractmethod - def serial_number(self) -> int: - """ - The serial number of the cert whose status is being checked - """ - - @abc.abstractmethod - def public_bytes(self, encoding: serialization.Encoding) -> bytes: - """ - Serializes the request to DER - """ - - @property - @abc.abstractmethod - def extensions(self) -> x509.Extensions: - """ - The list of request extensions. Not single request extensions. - """ - - -class OCSPSingleResponse(metaclass=abc.ABCMeta): - @property - @abc.abstractmethod - def certificate_status(self) -> OCSPCertStatus: - """ - The status of the certificate (an element from the OCSPCertStatus enum) - """ - - @property - @abc.abstractmethod - def revocation_time(self) -> datetime.datetime | None: - """ - The date of when the certificate was revoked or None if not - revoked. - """ - - @property - @abc.abstractmethod - def revocation_time_utc(self) -> datetime.datetime | None: - """ - The date of when the certificate was revoked or None if not - revoked. Represented as a non-naive UTC datetime. - """ - - @property - @abc.abstractmethod - def revocation_reason(self) -> x509.ReasonFlags | None: - """ - The reason the certificate was revoked or None if not specified or - not revoked. - """ - - @property - @abc.abstractmethod - def this_update(self) -> datetime.datetime: - """ - The most recent time at which the status being indicated is known by - the responder to have been correct - """ - - @property - @abc.abstractmethod - def this_update_utc(self) -> datetime.datetime: - """ - The most recent time at which the status being indicated is known by - the responder to have been correct. Represented as a non-naive UTC - datetime. - """ - - @property - @abc.abstractmethod - def next_update(self) -> datetime.datetime | None: - """ - The time when newer information will be available - """ - - @property - @abc.abstractmethod - def next_update_utc(self) -> datetime.datetime | None: - """ - The time when newer information will be available. Represented as a - non-naive UTC datetime. - """ - - @property - @abc.abstractmethod - def issuer_key_hash(self) -> bytes: - """ - The hash of the issuer public key - """ - - @property - @abc.abstractmethod - def issuer_name_hash(self) -> bytes: - """ - The hash of the issuer name - """ - - @property - @abc.abstractmethod - def hash_algorithm(self) -> hashes.HashAlgorithm: - """ - The hash algorithm used in the issuer name and key hashes - """ - - @property - @abc.abstractmethod - def serial_number(self) -> int: - """ - The serial number of the cert whose status is being checked - """ - - -class OCSPResponse(metaclass=abc.ABCMeta): - @property - @abc.abstractmethod - def responses(self) -> typing.Iterator[OCSPSingleResponse]: - """ - An iterator over the individual SINGLERESP structures in the - response - """ - - @property - @abc.abstractmethod - def response_status(self) -> OCSPResponseStatus: - """ - The status of the response. This is a value from the OCSPResponseStatus - enumeration - """ - - @property - @abc.abstractmethod - def signature_algorithm_oid(self) -> x509.ObjectIdentifier: - """ - The ObjectIdentifier of the signature algorithm - """ - - @property - @abc.abstractmethod - def signature_hash_algorithm( - self, - ) -> hashes.HashAlgorithm | None: - """ - Returns a HashAlgorithm corresponding to the type of the digest signed - """ - - @property - @abc.abstractmethod - def signature(self) -> bytes: - """ - The signature bytes - """ - - @property - @abc.abstractmethod - def tbs_response_bytes(self) -> bytes: - """ - The tbsResponseData bytes - """ - - @property - @abc.abstractmethod - def certificates(self) -> list[x509.Certificate]: - """ - A list of certificates used to help build a chain to verify the OCSP - response. This situation occurs when the OCSP responder uses a delegate - certificate. - """ - - @property - @abc.abstractmethod - def responder_key_hash(self) -> bytes | None: - """ - The responder's key hash or None - """ - - @property - @abc.abstractmethod - def responder_name(self) -> x509.Name | None: - """ - The responder's Name or None - """ - - @property - @abc.abstractmethod - def produced_at(self) -> datetime.datetime: - """ - The time the response was produced - """ - - @property - @abc.abstractmethod - def produced_at_utc(self) -> datetime.datetime: - """ - The time the response was produced. Represented as a non-naive UTC - datetime. - """ - - @property - @abc.abstractmethod - def certificate_status(self) -> OCSPCertStatus: - """ - The status of the certificate (an element from the OCSPCertStatus enum) - """ - - @property - @abc.abstractmethod - def revocation_time(self) -> datetime.datetime | None: - """ - The date of when the certificate was revoked or None if not - revoked. - """ - - @property - @abc.abstractmethod - def revocation_time_utc(self) -> datetime.datetime | None: - """ - The date of when the certificate was revoked or None if not - revoked. Represented as a non-naive UTC datetime. - """ - - @property - @abc.abstractmethod - def revocation_reason(self) -> x509.ReasonFlags | None: - """ - The reason the certificate was revoked or None if not specified or - not revoked. - """ - - @property - @abc.abstractmethod - def this_update(self) -> datetime.datetime: - """ - The most recent time at which the status being indicated is known by - the responder to have been correct - """ - - @property - @abc.abstractmethod - def this_update_utc(self) -> datetime.datetime: - """ - The most recent time at which the status being indicated is known by - the responder to have been correct. Represented as a non-naive UTC - datetime. - """ - - @property - @abc.abstractmethod - def next_update(self) -> datetime.datetime | None: - """ - The time when newer information will be available - """ - - @property - @abc.abstractmethod - def next_update_utc(self) -> datetime.datetime | None: - """ - The time when newer information will be available. Represented as a - non-naive UTC datetime. - """ - - @property - @abc.abstractmethod - def issuer_key_hash(self) -> bytes: - """ - The hash of the issuer public key - """ - - @property - @abc.abstractmethod - def issuer_name_hash(self) -> bytes: - """ - The hash of the issuer name - """ - - @property - @abc.abstractmethod - def hash_algorithm(self) -> hashes.HashAlgorithm: - """ - The hash algorithm used in the issuer name and key hashes - """ - - @property - @abc.abstractmethod - def serial_number(self) -> int: - """ - The serial number of the cert whose status is being checked - """ - - @property - @abc.abstractmethod - def extensions(self) -> x509.Extensions: - """ - The list of response extensions. Not single response extensions. - """ - - @property - @abc.abstractmethod - def single_extensions(self) -> x509.Extensions: - """ - The list of single response extensions. Not response extensions. - """ - - @abc.abstractmethod - def public_bytes(self, encoding: serialization.Encoding) -> bytes: - """ - Serializes the response to DER - """ - - -OCSPRequest.register(ocsp.OCSPRequest) -OCSPResponse.register(ocsp.OCSPResponse) -OCSPSingleResponse.register(ocsp.OCSPSingleResponse) - - -class OCSPRequestBuilder: - def __init__( - self, - request: tuple[ - x509.Certificate, x509.Certificate, hashes.HashAlgorithm - ] - | None = None, - request_hash: tuple[bytes, bytes, int, hashes.HashAlgorithm] - | None = None, - extensions: list[x509.Extension[x509.ExtensionType]] = [], - ) -> None: - self._request = request - self._request_hash = request_hash - self._extensions = extensions - - def add_certificate( - self, - cert: x509.Certificate, - issuer: x509.Certificate, - algorithm: hashes.HashAlgorithm, - ) -> OCSPRequestBuilder: - if self._request is not None or self._request_hash is not None: - raise ValueError("Only one certificate can be added to a request") - - _verify_algorithm(algorithm) - if not isinstance(cert, x509.Certificate) or not isinstance( - issuer, x509.Certificate - ): - raise TypeError("cert and issuer must be a Certificate") - - return OCSPRequestBuilder( - (cert, issuer, algorithm), self._request_hash, self._extensions - ) - - def add_certificate_by_hash( - self, - issuer_name_hash: bytes, - issuer_key_hash: bytes, - serial_number: int, - algorithm: hashes.HashAlgorithm, - ) -> OCSPRequestBuilder: - if self._request is not None or self._request_hash is not None: - raise ValueError("Only one certificate can be added to a request") - - if not isinstance(serial_number, int): - raise TypeError("serial_number must be an integer") - - _verify_algorithm(algorithm) - utils._check_bytes("issuer_name_hash", issuer_name_hash) - utils._check_bytes("issuer_key_hash", issuer_key_hash) - if algorithm.digest_size != len( - issuer_name_hash - ) or algorithm.digest_size != len(issuer_key_hash): - raise ValueError( - "issuer_name_hash and issuer_key_hash must be the same length " - "as the digest size of the algorithm" - ) - - return OCSPRequestBuilder( - self._request, - (issuer_name_hash, issuer_key_hash, serial_number, algorithm), - self._extensions, - ) - - def add_extension( - self, extval: x509.ExtensionType, critical: bool - ) -> OCSPRequestBuilder: - if not isinstance(extval, x509.ExtensionType): - raise TypeError("extension must be an ExtensionType") - - extension = x509.Extension(extval.oid, critical, extval) - _reject_duplicate_extension(extension, self._extensions) - - return OCSPRequestBuilder( - self._request, self._request_hash, [*self._extensions, extension] - ) - - def build(self) -> OCSPRequest: - if self._request is None and self._request_hash is None: - raise ValueError("You must add a certificate before building") - - return ocsp.create_ocsp_request(self) - - -class OCSPResponseBuilder: - def __init__( - self, - response: _SingleResponse | None = None, - responder_id: tuple[x509.Certificate, OCSPResponderEncoding] - | None = None, - certs: list[x509.Certificate] | None = None, - extensions: list[x509.Extension[x509.ExtensionType]] = [], - ): - self._response = response - self._responder_id = responder_id - self._certs = certs - self._extensions = extensions - - def add_response( - self, - cert: x509.Certificate, - issuer: x509.Certificate, - algorithm: hashes.HashAlgorithm, - cert_status: OCSPCertStatus, - this_update: datetime.datetime, - next_update: datetime.datetime | None, - revocation_time: datetime.datetime | None, - revocation_reason: x509.ReasonFlags | None, - ) -> OCSPResponseBuilder: - if self._response is not None: - raise ValueError("Only one response per OCSPResponse.") - - singleresp = _SingleResponse( - cert, - issuer, - algorithm, - cert_status, - this_update, - next_update, - revocation_time, - revocation_reason, - ) - return OCSPResponseBuilder( - singleresp, - self._responder_id, - self._certs, - self._extensions, - ) - - def responder_id( - self, encoding: OCSPResponderEncoding, responder_cert: x509.Certificate - ) -> OCSPResponseBuilder: - if self._responder_id is not None: - raise ValueError("responder_id can only be set once") - if not isinstance(responder_cert, x509.Certificate): - raise TypeError("responder_cert must be a Certificate") - if not isinstance(encoding, OCSPResponderEncoding): - raise TypeError( - "encoding must be an element from OCSPResponderEncoding" - ) - - return OCSPResponseBuilder( - self._response, - (responder_cert, encoding), - self._certs, - self._extensions, - ) - - def certificates( - self, certs: typing.Iterable[x509.Certificate] - ) -> OCSPResponseBuilder: - if self._certs is not None: - raise ValueError("certificates may only be set once") - certs = list(certs) - if len(certs) == 0: - raise ValueError("certs must not be an empty list") - if not all(isinstance(x, x509.Certificate) for x in certs): - raise TypeError("certs must be a list of Certificates") - return OCSPResponseBuilder( - self._response, - self._responder_id, - certs, - self._extensions, - ) - - def add_extension( - self, extval: x509.ExtensionType, critical: bool - ) -> OCSPResponseBuilder: - if not isinstance(extval, x509.ExtensionType): - raise TypeError("extension must be an ExtensionType") - - extension = x509.Extension(extval.oid, critical, extval) - _reject_duplicate_extension(extension, self._extensions) - - return OCSPResponseBuilder( - self._response, - self._responder_id, - self._certs, - [*self._extensions, extension], - ) - - def sign( - self, - private_key: CertificateIssuerPrivateKeyTypes, - algorithm: hashes.HashAlgorithm | None, - ) -> OCSPResponse: - if self._response is None: - raise ValueError("You must add a response before signing") - if self._responder_id is None: - raise ValueError("You must add a responder_id before signing") - - return ocsp.create_ocsp_response( - OCSPResponseStatus.SUCCESSFUL, self, private_key, algorithm - ) - - @classmethod - def build_unsuccessful( - cls, response_status: OCSPResponseStatus - ) -> OCSPResponse: - if not isinstance(response_status, OCSPResponseStatus): - raise TypeError( - "response_status must be an item from OCSPResponseStatus" - ) - if response_status is OCSPResponseStatus.SUCCESSFUL: - raise ValueError("response_status cannot be SUCCESSFUL") - - return ocsp.create_ocsp_response(response_status, None, None, None) - - -load_der_ocsp_request = ocsp.load_der_ocsp_request -load_der_ocsp_response = ocsp.load_der_ocsp_response diff --git a/env/lib/python3.10/site-packages/cryptography/x509/oid.py b/env/lib/python3.10/site-packages/cryptography/x509/oid.py deleted file mode 100644 index d4e409e..0000000 --- a/env/lib/python3.10/site-packages/cryptography/x509/oid.py +++ /dev/null @@ -1,35 +0,0 @@ -# This file is dual licensed under the terms of the Apache License, Version -# 2.0, and the BSD License. See the LICENSE file in the root of this repository -# for complete details. - -from __future__ import annotations - -from cryptography.hazmat._oid import ( - AttributeOID, - AuthorityInformationAccessOID, - CertificatePoliciesOID, - CRLEntryExtensionOID, - ExtendedKeyUsageOID, - ExtensionOID, - NameOID, - ObjectIdentifier, - OCSPExtensionOID, - PublicKeyAlgorithmOID, - SignatureAlgorithmOID, - SubjectInformationAccessOID, -) - -__all__ = [ - "AttributeOID", - "AuthorityInformationAccessOID", - "CRLEntryExtensionOID", - "CertificatePoliciesOID", - "ExtendedKeyUsageOID", - "ExtensionOID", - "NameOID", - "OCSPExtensionOID", - "ObjectIdentifier", - "PublicKeyAlgorithmOID", - "SignatureAlgorithmOID", - "SubjectInformationAccessOID", -] diff --git a/env/lib/python3.10/site-packages/cryptography/x509/verification.py b/env/lib/python3.10/site-packages/cryptography/x509/verification.py deleted file mode 100644 index b836506..0000000 --- a/env/lib/python3.10/site-packages/cryptography/x509/verification.py +++ /dev/null @@ -1,28 +0,0 @@ -# This file is dual licensed under the terms of the Apache License, Version -# 2.0, and the BSD License. See the LICENSE file in the root of this repository -# for complete details. - -from __future__ import annotations - -import typing - -from cryptography.hazmat.bindings._rust import x509 as rust_x509 -from cryptography.x509.general_name import DNSName, IPAddress - -__all__ = [ - "ClientVerifier", - "PolicyBuilder", - "ServerVerifier", - "Store", - "Subject", - "VerificationError", - "VerifiedClient", -] - -Store = rust_x509.Store -Subject = typing.Union[DNSName, IPAddress] -VerifiedClient = rust_x509.VerifiedClient -ClientVerifier = rust_x509.ClientVerifier -ServerVerifier = rust_x509.ServerVerifier -PolicyBuilder = rust_x509.PolicyBuilder -VerificationError = rust_x509.VerificationError diff --git a/env/lib/python3.10/site-packages/distutils-precedence.pth b/env/lib/python3.10/site-packages/distutils-precedence.pth deleted file mode 100644 index 6de4198..0000000 --- a/env/lib/python3.10/site-packages/distutils-precedence.pth +++ /dev/null @@ -1 +0,0 @@ -import os; var = 'SETUPTOOLS_USE_DISTUTILS'; enabled = os.environ.get(var, 'stdlib') == 'local'; enabled and __import__('_distutils_hack').add_shim(); diff --git a/env/lib/python3.10/site-packages/docutils-0.21.2.dist-info/COPYING.txt b/env/lib/python3.10/site-packages/docutils-0.21.2.dist-info/COPYING.txt deleted file mode 100644 index 333583c..0000000 --- a/env/lib/python3.10/site-packages/docutils-0.21.2.dist-info/COPYING.txt +++ /dev/null @@ -1,159 +0,0 @@ -.. include:: docs/header0.txt - -================== - Copying Docutils -================== - -:Author: David Goodger -:Contact: goodger@python.org -:Date: $Date: 2023-06-22 17:34:37 +0200 (Do, 22. Jun 2023) $ -:Web site: https://docutils.sourceforge.io/ -:Copyright: This document has been placed in the public domain. - -Most of the files included in this project have been placed in the -public domain, and therefore have no license requirements and no -restrictions on copying or usage; see the `Public Domain Dedication`_ -below. There are exceptions_, listed below. -Files in the Sandbox_ are not distributed with Docutils releases and -may have different license terms. - - -Public Domain Dedication -======================== - -The persons who have associated their work with this project (the -"Dedicator": David Goodger and the many contributors to the Docutils -project) hereby dedicate the entire copyright, less the exceptions_ -listed below, in the work of authorship known as "Docutils" identified -below (the "Work") to the public domain. - -The primary repository for the Work is the Internet World Wide Web -site . The Work consists of the -files within the "docutils" module of the Docutils project Subversion -repository (http://svn.code.sf.net/p/docutils/code/), -whose Internet web interface is located at -. Files dedicated to the -public domain may be identified by the inclusion, near the beginning -of each file, of a declaration of the form:: - - Copyright: This document/module/DTD/stylesheet/file/etc. has been - placed in the public domain. - -Dedicator makes this dedication for the benefit of the public at large -and to the detriment of Dedicator's heirs and successors. Dedicator -intends this dedication to be an overt act of relinquishment in -perpetuity of all present and future rights under copyright law, -whether vested or contingent, in the Work. Dedicator understands that -such relinquishment of all rights includes the relinquishment of all -rights to enforce (by lawsuit or otherwise) those copyrights in the -Work. - -Dedicator recognizes that, once placed in the public domain, the Work -may be freely reproduced, distributed, transmitted, used, modified, -built upon, or otherwise exploited by anyone for any purpose, -commercial or non-commercial, and in any way, including by methods -that have not yet been invented or conceived. - -(This dedication is derived from the text of the `Creative Commons -Public Domain Dedication`. [#]_) - -.. [#] Creative Commons has `retired this legal tool`__ and does not - recommend that it be applied to works: This tool is based on United - States law and may not be applicable outside the US. For dedicating new - works to the public domain, Creative Commons recommend the replacement - Public Domain Dedication CC0_ (CC zero, "No Rights Reserved"). So does - the Free Software Foundation in its license-list_. - - __ http://creativecommons.org/retiredlicenses - .. _CC0: http://creativecommons.org/about/cc0 - -Exceptions -========== - -The exceptions to the `Public Domain Dedication`_ above are: - -* docutils/utils/smartquotes.py - - Copyright © 2011 Günter Milde, - based on `SmartyPants`_ © 2003 John Gruber - (released under a "revised" `BSD 3-Clause License`_ included in the file) - and smartypants.py © 2004, 2007 Chad Miller. - Released under the terms of the `BSD 2-Clause License`_ - (`local copy `__). - - .. _SmartyPants: http://daringfireball.net/projects/smartypants/ - -* docutils/utils/math/latex2mathml.py - - Copyright © Jens Jørgen Mortensen, Günter Milde. - Released under the terms of the `BSD 2-Clause License`_ - (`local copy `__). - -* | docutils/utils/math/math2html.py, - | docutils/writers/html5_polyglot/math.css - - Copyright © 2009,2010 Alex Fernández; 2021 Günter Milde - - These files were part of eLyXer_, released under the `GNU - General Public License`_ version 3 or later. The author relicensed - them for Docutils under the terms of the `BSD 2-Clause License`_ - (`local copy `__). - - .. _eLyXer: https://github.com/alexfernandez/elyxer - -* | docutils/__main__.py, - | docutils/parsers/commonmark_wrapper.py, - | docutils/parsers/recommonmark_wrapper.py, - | docutils/utils/error_reporting.py, - | docutils/utils/math/__init__.py, - | docutils/utils/math/latex2mathml.py, - | docutils/utils/math/tex2mathml_extern.py, - | docutils/utils/punctuation_chars.py, - | docutils/utils/smartquotes.py, - | docutils/writers/html5_polyglot/__init__.py, - | docutils/writers/html5_polyglot/\*.css, - | docutils/writers/latex2e/docutils.sty, - | docutils/writers/xetex/__init__.py, - | test/test_parsers/test_recommonmark/\*.py, - | test/test_parsers/test_rst/test_directives/test__init__.py, - | test/test_parsers/test_rst/test_directives/test_code_parsing.py, - | test/test_parsers/test_rst/test_line_length_limit_default.py, - | test/test_parsers/test_rst/test_line_length_limit.py, - | test/test_writers/test_latex2e_misc.py, - | test/transforms/test_smartquotes.py, - | tools/docutils-cli.py, - | tools/rst2html5.py - - Copyright © Günter Milde. - Released under the terms of the `BSD 2-Clause License`_ - (`local copy `__). - -* docutils/utils/roman.py - - copyright by Mark Pilgrim, released under the - `Zope Public License Version 2.1`_ (`local copy`__). - - __ licenses/ZPL-2-1.txt - -* tools/editors/emacs/rst.el - - copyright by Free Software Foundation, Inc., - released under the `GNU General Public License`_ version 3 or later - (`local copy`__). - - __ licenses/gpl-3-0.txt - -All used licenses are OSI-approved_ and GPL-compatible_. - -Plaintext versions of all the linked-to licenses are provided in the -licenses_ directory. - -.. _sandbox: https://docutils.sourceforge.io/sandbox/README.html -.. _licenses: licenses/ -.. _GNU General Public License: https://www.gnu.org/copyleft/gpl.html -.. _BSD 2-Clause License: http://opensource.org/licenses/BSD-2-Clause -.. _BSD 3-Clause License: https://opensource.org/licenses/BSD-3-Clause -.. _Zope Public License Version 2.1: https://opensource.org/license/zpl-2-1/ -.. _OSI-approved: http://opensource.org/licenses/ -.. _license-list: -.. _GPL-compatible: https://www.gnu.org/licenses/license-list.html diff --git a/env/lib/python3.10/site-packages/docutils-0.21.2.dist-info/INSTALLER b/env/lib/python3.10/site-packages/docutils-0.21.2.dist-info/INSTALLER deleted file mode 100644 index a1b589e..0000000 --- a/env/lib/python3.10/site-packages/docutils-0.21.2.dist-info/INSTALLER +++ /dev/null @@ -1 +0,0 @@ -pip diff --git a/env/lib/python3.10/site-packages/docutils-0.21.2.dist-info/METADATA b/env/lib/python3.10/site-packages/docutils-0.21.2.dist-info/METADATA deleted file mode 100644 index bec0da0..0000000 --- a/env/lib/python3.10/site-packages/docutils-0.21.2.dist-info/METADATA +++ /dev/null @@ -1,63 +0,0 @@ -Metadata-Version: 2.1 -Name: docutils -Version: 0.21.2 -Summary: Docutils -- Python Documentation Utilities -Author-email: David Goodger -Maintainer-email: docutils-develop list -Requires-Python: >=3.9 -Description-Content-Type: text/plain -Classifier: Development Status :: 4 - Beta -Classifier: Environment :: Console -Classifier: Intended Audience :: End Users/Desktop -Classifier: Intended Audience :: Other Audience -Classifier: Intended Audience :: Developers -Classifier: Intended Audience :: System Administrators -Classifier: License :: Public Domain -Classifier: License :: OSI Approved :: Python Software Foundation License -Classifier: License :: OSI Approved :: BSD License -Classifier: License :: OSI Approved :: GNU General Public License (GPL) -Classifier: Operating System :: OS Independent -Classifier: Programming Language :: Python :: 3 -Classifier: Programming Language :: Python :: 3 :: Only -Classifier: Programming Language :: Python :: 3.9 -Classifier: Programming Language :: Python :: 3.10 -Classifier: Programming Language :: Python :: 3.11 -Classifier: Topic :: Documentation -Classifier: Topic :: Software Development :: Documentation -Classifier: Topic :: Text Processing -Classifier: Natural Language :: English -Classifier: Natural Language :: Afrikaans -Classifier: Natural Language :: Arabic -Classifier: Natural Language :: Catalan -Classifier: Natural Language :: Catalan (Valencian) -Classifier: Natural Language :: Chinese (Simplified) -Classifier: Natural Language :: Chinese (Traditional) -Classifier: Natural Language :: Czech -Classifier: Natural Language :: Danish -Classifier: Natural Language :: Dutch -Classifier: Natural Language :: Esperanto -Classifier: Natural Language :: Finnish -Classifier: Natural Language :: French -Classifier: Natural Language :: Galician -Classifier: Natural Language :: Georgian -Classifier: Natural Language :: German -Classifier: Natural Language :: Hebrew -Classifier: Natural Language :: Italian -Classifier: Natural Language :: Japanese -Classifier: Natural Language :: Korean -Classifier: Natural Language :: Latvian -Classifier: Natural Language :: Lithuanian -Classifier: Natural Language :: Persian -Classifier: Natural Language :: Polish -Classifier: Natural Language :: Portuguese (Brazilian) -Classifier: Natural Language :: Russian -Classifier: Natural Language :: Slovak -Classifier: Natural Language :: Spanish -Classifier: Natural Language :: Swedish -Classifier: Natural Language :: Ukrainian -Project-URL: Homepage, https://docutils.sourceforge.io - -Docutils is a modular system for processing documentation -into useful formats, such as HTML, XML, and LaTeX. For -input Docutils supports reStructuredText, an easy-to-read, -what-you-see-is-what-you-get plaintext markup syntax. diff --git a/env/lib/python3.10/site-packages/docutils-0.21.2.dist-info/RECORD b/env/lib/python3.10/site-packages/docutils-0.21.2.dist-info/RECORD deleted file mode 100644 index 6dfb557..0000000 --- a/env/lib/python3.10/site-packages/docutils-0.21.2.dist-info/RECORD +++ /dev/null @@ -1,347 +0,0 @@ -../../../bin/docutils,sha256=Ji96-Qai1Dnbh20XEBmRtlV2c_FeZIKKwuf3qys6vIo,262 -../../../bin/rst2html,sha256=6pT4HC173pP_FXhXL5sbH-OhJAtbiyuk9zxANfZ4P1k,266 -../../../bin/rst2html4,sha256=gw9-R_dggKTO3W3zLP_WQR9zBjgG8Yv8wM2BHhgLDsI,268 -../../../bin/rst2html5,sha256=2u9nE9mDfSqclp41cBQ8aH7G8CeRq2WkzxNGfp0giD0,268 -../../../bin/rst2latex,sha256=kTg_AiHr1qNf94sUYiduXX6HcJChGZHPzyEw4wYIKC0,268 -../../../bin/rst2man,sha256=Yfxftmt-GB_7AlCE9a2NnE-7rl6DyKel2A4woX_5aTM,264 -../../../bin/rst2odt,sha256=hEsiM06PSSQ0Nhm3BlX4dx1FoVmo_YnALUkzgTlKhZg,264 -../../../bin/rst2pseudoxml,sha256=3lPQYYp__tO1aRXylZoBWK0BoyNNKOYQ3ZAAoNtwxiA,276 -../../../bin/rst2s5,sha256=4EEF6uOSG1TXpHNs5ohWWMXCGSt4uTS63eLu0TnL2Tc,262 -../../../bin/rst2xetex,sha256=rBVUKW3EEWFTUjHFcmUXRATIeMwej-JZ4MR78OePWnU,268 -../../../bin/rst2xml,sha256=ImF2xXUbfh_4H8rXge1nU9ZSh5mrlVMNLmQXHToDcTc,264 -docutils-0.21.2.dist-info/COPYING.txt,sha256=U07fdkGr3mn8UA8ijAl1GBLAWJgV7vDAgrxroxfWRqw,6310 -docutils-0.21.2.dist-info/INSTALLER,sha256=zuuue4knoyJ-UwPPXg8fezS7VCrXJQrAP7zeNuwvFQg,4 -docutils-0.21.2.dist-info/METADATA,sha256=3oOygKJQ4otFhgmtONEmLPiLjUgZUQ5Fu1ZFHxTP4C8,2776 -docutils-0.21.2.dist-info/RECORD,, -docutils-0.21.2.dist-info/WHEEL,sha256=EZbGkh7Ie4PoZfRQ8I0ZuP9VklN_TvcZ6DSE5Uar4z4,81 -docutils-0.21.2.dist-info/entry_points.txt,sha256=9xC35dFk4tSRrSMOKqq6kYlAZDEERdBM7fmwDLBMifY,379 -docutils/__init__.py,sha256=gmP9fnbribb7oZWoe4PTw8Obh_B_bt3e_CXKi45Xzus,10293 -docutils/__main__.py,sha256=HviLRJb_t90M1fOQ8s8J2A4XZXSvw4cYHkrAtZjR2XA,3625 -docutils/__pycache__/__init__.cpython-310.pyc,, -docutils/__pycache__/__main__.cpython-310.pyc,, -docutils/__pycache__/core.cpython-310.pyc,, -docutils/__pycache__/examples.cpython-310.pyc,, -docutils/__pycache__/frontend.cpython-310.pyc,, -docutils/__pycache__/io.cpython-310.pyc,, -docutils/__pycache__/nodes.cpython-310.pyc,, -docutils/__pycache__/statemachine.cpython-310.pyc,, -docutils/core.py,sha256=Lm2qwSppidgDBjoZQpOLiodDKtEbkT_CsLH93Iq4M6U,33045 -docutils/docutils.conf,sha256=F2yH40kC3N5nnjGIRdT7DHHGXbDKbd5YsbhmQxF2CNY,151 -docutils/examples.py,sha256=nP7S8EnZk4w3PYi2Ymeu9_UUcDIi39lP049Pe6iBECU,3961 -docutils/frontend.py,sha256=2oRkks_lIJHhlk537ejUki5dyHodR0SMuoZLXYRcUtI,44396 -docutils/io.py,sha256=VrAIuTlsf93O3wAYMK_PC0DqINMvRxuZZC5W_-6NEOg,22583 -docutils/languages/__init__.py,sha256=I8j0eJD3Jj3Q1C5ALqhYa-f8kCCQDPxmgn2VdPeTusI,2921 -docutils/languages/__pycache__/__init__.cpython-310.pyc,, -docutils/languages/__pycache__/af.cpython-310.pyc,, -docutils/languages/__pycache__/ar.cpython-310.pyc,, -docutils/languages/__pycache__/ca.cpython-310.pyc,, -docutils/languages/__pycache__/cs.cpython-310.pyc,, -docutils/languages/__pycache__/da.cpython-310.pyc,, -docutils/languages/__pycache__/de.cpython-310.pyc,, -docutils/languages/__pycache__/en.cpython-310.pyc,, -docutils/languages/__pycache__/eo.cpython-310.pyc,, -docutils/languages/__pycache__/es.cpython-310.pyc,, -docutils/languages/__pycache__/fa.cpython-310.pyc,, -docutils/languages/__pycache__/fi.cpython-310.pyc,, -docutils/languages/__pycache__/fr.cpython-310.pyc,, -docutils/languages/__pycache__/gl.cpython-310.pyc,, -docutils/languages/__pycache__/he.cpython-310.pyc,, -docutils/languages/__pycache__/it.cpython-310.pyc,, -docutils/languages/__pycache__/ja.cpython-310.pyc,, -docutils/languages/__pycache__/ka.cpython-310.pyc,, -docutils/languages/__pycache__/ko.cpython-310.pyc,, -docutils/languages/__pycache__/lt.cpython-310.pyc,, -docutils/languages/__pycache__/lv.cpython-310.pyc,, -docutils/languages/__pycache__/nl.cpython-310.pyc,, -docutils/languages/__pycache__/pl.cpython-310.pyc,, -docutils/languages/__pycache__/pt_br.cpython-310.pyc,, -docutils/languages/__pycache__/ru.cpython-310.pyc,, -docutils/languages/__pycache__/sk.cpython-310.pyc,, -docutils/languages/__pycache__/sv.cpython-310.pyc,, -docutils/languages/__pycache__/uk.cpython-310.pyc,, -docutils/languages/__pycache__/zh_cn.cpython-310.pyc,, -docutils/languages/__pycache__/zh_tw.cpython-310.pyc,, -docutils/languages/af.py,sha256=bjIWD_cNZAyZ9XIqqEoQaSljWlvP0vrQi8p8Fu9Nj2o,1831 -docutils/languages/ar.py,sha256=3FRbb0CSpXPHSQru96bqPDGGEdcnWhQIUG_ndpmacfg,1943 -docutils/languages/ca.py,sha256=b98Y79DEl88Jt2IZsKaH1OaJlC9W5khQcOV71Tvwpj0,2085 -docutils/languages/cs.py,sha256=qkEwPMKaPgw3D4qKCJoGXfDczIA3Wju8joBobn5hMvg,1832 -docutils/languages/da.py,sha256=1db0GiWS2YYOIqWru9BGeun7E3c-lwAgCWA6epn6KH4,1856 -docutils/languages/de.py,sha256=oaGzlhKsE4yFKTnCKrZt8nMnwZGqQ-PDT92UgUwRYzs,1728 -docutils/languages/en.py,sha256=MOTXOluYEnUwSHRlkINnBXMI8sbIfGahXF8RLws4NSk,1854 -docutils/languages/eo.py,sha256=p-wQOrAdL9tQ-89OT8A1VS8sO0OjogmPXRBAoihaAc0,1895 -docutils/languages/es.py,sha256=Jf81tKn_FJ7Kieozten6hn8WjVnYpZ9vsu4N6ER4O1s,1854 -docutils/languages/fa.py,sha256=dXhN7qZnzv1ksnR0Yb5LrSgNLwXy387ILj90v5iHHHM,1958 -docutils/languages/fi.py,sha256=34cPWxDFZXBnOw5LWENSIKaFqFLxZOc6vIioLLnS2do,1892 -docutils/languages/fr.py,sha256=zIPQD7UdOx3FPIBgIAT_mWNk4dped04jK8No2kI1Zic,1799 -docutils/languages/gl.py,sha256=Hz30Wfc-CMuGxKoHzPtY2Wdn-A6tFyvirnnFGZdZ_04,1958 -docutils/languages/he.py,sha256=eHyDSLwp6Y55GyHlgr8yMOkR-gxSgk7Ge8gfsm5n_Io,1878 -docutils/languages/it.py,sha256=aqmXdit3DYMhnke5MmyQIAVfCcrFuKzWmv0L5H06AI8,1814 -docutils/languages/ja.py,sha256=8C-hjqB7fa_Asa-uKKKkHRqT6NTjO3IJiiks6JADJNU,1890 -docutils/languages/ka.py,sha256=aqqIgkJyhLg3FOYWuhBhYDgzAj_nos9WwEQqI-eUcwg,2429 -docutils/languages/ko.py,sha256=eqKib7kW6h8DKg6inoXAtJa3f8FygdT37eGqiMifzZU,1832 -docutils/languages/lt.py,sha256=ixPG61Q2xB6J0uk0TXTXDwfKcEQgqIYC9Tc4VrBMQEY,1919 -docutils/languages/lv.py,sha256=pSZ7y94j6YWKLz4U16uBVwZsgKpd4jWkvcAgwmkDjv0,1851 -docutils/languages/nl.py,sha256=LKV2Hkuh0kH17w_ukZoAtTIlvWSo5iC3g7nPFDHoDDY,1871 -docutils/languages/pl.py,sha256=zyW9iwwriTX8YIGbT54DKgvkdyfiPxm49R0e2eIBxKE,1830 -docutils/languages/pt_br.py,sha256=navcDOoMajIE3yqibn7NPEIf-_oF15ECBWk4beMY3Zw,1865 -docutils/languages/ru.py,sha256=BAQA-1TZEeHpK44L_Os9V-WKUBt9lPApRl1nFM1zOhw,2070 -docutils/languages/sk.py,sha256=r-vrXCNGbDqFXAxM9WpRaH-ws1KumaIb1yramTTlmlI,1788 -docutils/languages/sv.py,sha256=bmJkUSdVn_j8s_P_Nu4KGBM2F42Og7sFzMNH238eKtQ,1908 -docutils/languages/uk.py,sha256=abLYoA0w2pJWlGLBglvUvEnGrhAVgbRyDEmlwV5ntxQ,2062 -docutils/languages/zh_cn.py,sha256=7nOmSfLgrb3Zq-xImQEN4-My6NqHZVat7GoMbpijVn4,1852 -docutils/languages/zh_tw.py,sha256=KJS8-gHwJVUdW7NOjgJQPb6iWkXtBWTT0JrRBV7BYTU,2112 -docutils/nodes.py,sha256=psy0zIro734Gsk7PGIqWqx0Ax_5AB0mBHC0J4KYduHo,80628 -docutils/parsers/__init__.py,sha256=NenSsWynQ-HU858BTDg0dMIX7ErS_qrAJot8KdtHyiU,3724 -docutils/parsers/__pycache__/__init__.cpython-310.pyc,, -docutils/parsers/__pycache__/commonmark_wrapper.cpython-310.pyc,, -docutils/parsers/__pycache__/null.cpython-310.pyc,, -docutils/parsers/__pycache__/recommonmark_wrapper.cpython-310.pyc,, -docutils/parsers/commonmark_wrapper.py,sha256=UcBtp5AcOSQRq-0-UsSZcnFMWa14TULD8kayhp4A0eY,1762 -docutils/parsers/null.py,sha256=LtO7n-E6lNOs4mLXZ2SiShB3C630SoJ80ugd9fh1vXI,445 -docutils/parsers/recommonmark_wrapper.py,sha256=SbFSCvBeQRH-fDQczHgK2qF_HwZ3gQsJx_CY2txX43s,5426 -docutils/parsers/rst/__init__.py,sha256=EydGQTcJeAmJQbCwPEFk77kA7Nio-Vqs7R0NMNkcb_s,15954 -docutils/parsers/rst/__pycache__/__init__.cpython-310.pyc,, -docutils/parsers/rst/__pycache__/roles.cpython-310.pyc,, -docutils/parsers/rst/__pycache__/states.cpython-310.pyc,, -docutils/parsers/rst/__pycache__/tableparser.cpython-310.pyc,, -docutils/parsers/rst/directives/__init__.py,sha256=82wwJQW7spmBVtko7E-a62LUXYfATJg3hSl6CZNrUus,14812 -docutils/parsers/rst/directives/__pycache__/__init__.cpython-310.pyc,, -docutils/parsers/rst/directives/__pycache__/admonitions.cpython-310.pyc,, -docutils/parsers/rst/directives/__pycache__/body.cpython-310.pyc,, -docutils/parsers/rst/directives/__pycache__/html.cpython-310.pyc,, -docutils/parsers/rst/directives/__pycache__/images.cpython-310.pyc,, -docutils/parsers/rst/directives/__pycache__/misc.cpython-310.pyc,, -docutils/parsers/rst/directives/__pycache__/parts.cpython-310.pyc,, -docutils/parsers/rst/directives/__pycache__/references.cpython-310.pyc,, -docutils/parsers/rst/directives/__pycache__/tables.cpython-310.pyc,, -docutils/parsers/rst/directives/admonitions.py,sha256=44OwQdPrDKD5VnCd-pPFtEzJs3U1uF-MeS3iWMmCWwU,2526 -docutils/parsers/rst/directives/body.py,sha256=lRClR4ljNiXntJfojN0HcaNpqH1m2GpwsOMS3JVNJK8,9939 -docutils/parsers/rst/directives/html.py,sha256=adxIFdnOHpqH0QSeXT7utBSy7FKWV0x18zbnXtmmuhs,695 -docutils/parsers/rst/directives/images.py,sha256=amQeXFb31hHlyjf15vgF2Sfe3OGCGzLaZAHxfsWSmOw,7265 -docutils/parsers/rst/directives/misc.py,sha256=9mc7erT0KvKHWR4_i5Uz2FhBk2pLd36fMCabyF2ClLA,26700 -docutils/parsers/rst/directives/parts.py,sha256=m5YOwZoPawR6I-Y3Q7CN9wGUzWmXa9v-Hvglh2E5ffQ,4247 -docutils/parsers/rst/directives/references.py,sha256=1Y1yhe_O2PqLtQUSly-ny291nrQKJgQiO4Hu7Xew9Zo,831 -docutils/parsers/rst/directives/tables.py,sha256=PRwO-lA1I93JAXR7qGDQ6n2cT5UYA8JQHdymHOdLFdg,23470 -docutils/parsers/rst/include/README.txt,sha256=R3Y-9wDzYQ0jOhj9FAlwG6hRRhHcEWOZcn2hMF1DeVg,670 -docutils/parsers/rst/include/isoamsa.txt,sha256=ZqGuK-R-yIxa2YDSREt48DFxc8fpF-HX51eiCKXCPp4,10925 -docutils/parsers/rst/include/isoamsb.txt,sha256=3CK8um9WjhPMVgEAbeI16rk91IzWqWXFbRJC44InP3A,7242 -docutils/parsers/rst/include/isoamsc.txt,sha256=XCI2ubAKaO-eOQj87hbBMeYpkqHvc2b2daUCS9ekUzU,1723 -docutils/parsers/rst/include/isoamsn.txt,sha256=Wx54SjZGeYVEB3oNnRi7eGYHEjROZiBUFhQAQDxcVMQ,6721 -docutils/parsers/rst/include/isoamso.txt,sha256=RFxHs5s8DtMgvDaeArmwnSZP_QN20KssvW5f6KMohYA,3825 -docutils/parsers/rst/include/isoamsr.txt,sha256=TLH3gNugqSX3-tH6gDNcgIgbGiKe_GukwZ8U1MIeJCQ,11763 -docutils/parsers/rst/include/isobox.txt,sha256=NORZqqDIewr0-CPoVWqVfTbCVrGZOqY87Crn8O4OUoo,3101 -docutils/parsers/rst/include/isocyr1.txt,sha256=B2DWWIEZ8aJ-scOBP9pbrsKYEmnNF8VZ9e9Mut2MZzU,4241 -docutils/parsers/rst/include/isocyr2.txt,sha256=t52cY0R-9bnkWiQPXW1NYDzO4ueE6ogUF9Ho4ARHg7Q,1882 -docutils/parsers/rst/include/isodia.txt,sha256=VMg8jI2IQogISrpiTS3L88TntxiMfS0cElsrpxZ1FAI,869 -docutils/parsers/rst/include/isogrk1.txt,sha256=DkJc-K_nTh-WDhfOQIRMdQ4aUnsYKb_etyEEJfU8SG4,3010 -docutils/parsers/rst/include/isogrk2.txt,sha256=0x8w_DgroVISgsTLUOuyLZNzDLThcnti27T7T7DxL7g,1705 -docutils/parsers/rst/include/isogrk3.txt,sha256=8b7gQSKtw4yhLEVMZ6vH8VaToiZMM2_kD2snlqfeIQE,2880 -docutils/parsers/rst/include/isogrk4-wide.txt,sha256=RAdw43c5ZAUBWu8MO_lJDmXVV0cGHhVrky0H_bq1eEs,3035 -docutils/parsers/rst/include/isogrk4.txt,sha256=FQxEZAJu2d_RX3G3PfHm6JlC_1osHQoheFA886MLeQk,372 -docutils/parsers/rst/include/isolat1.txt,sha256=d4dBGSPosghudIhZfMPNzsBJKvH1nyVyi6m5p0oW_HY,4397 -docutils/parsers/rst/include/isolat2.txt,sha256=2RMWwHB9djHvsdnKSv2dSNHlBc50P0JYF1DRcK3HW8s,8466 -docutils/parsers/rst/include/isomfrk-wide.txt,sha256=RsYrcq3mX-CMuV6oijCIfUTnUe8Z6w0PG0ephU9isBM,3334 -docutils/parsers/rst/include/isomfrk.txt,sha256=Y40ZXO1GLLzHezKEJJ8w8OBFtHGwtdjErI_062abwC4,519 -docutils/parsers/rst/include/isomopf-wide.txt,sha256=nrhNkzw15HdEA-Gf8T7yatCbs5b7z7q7T6SCTNLByJw,1931 -docutils/parsers/rst/include/isomopf.txt,sha256=l9rTXrdZWf2RchhPr4Oi2M-4yZYLtVH-7kdOmCPzY_M,639 -docutils/parsers/rst/include/isomscr-wide.txt,sha256=VNfKzET1n08k2PUeK9UeDamgP0j8iKpaorregQbaP3w,3231 -docutils/parsers/rst/include/isomscr.txt,sha256=EBWiVvLZYhm9e2c5i7T4Ur6i1WODl_BLiHjcWA_C45g,776 -docutils/parsers/rst/include/isonum.txt,sha256=yg4P9UxBM-72JRGkB4KVdjmRPyBWLvkOlkelh8quDzc,4066 -docutils/parsers/rst/include/isopub.txt,sha256=BFkr5rRRFuYM7a19WPP7lfBpUKftkYKZjnM2SHLtzwY,4613 -docutils/parsers/rst/include/isotech.txt,sha256=2WGt7TSBeRMr2m2DBlY-xiVjxeiNXdTZJb1DJdtgYKg,9726 -docutils/parsers/rst/include/mmlalias.txt,sha256=jQ4IbZwZAJ9rXmb7De77DYgdIlMsWA-s8uZf42EYtFU,45428 -docutils/parsers/rst/include/mmlextra-wide.txt,sha256=Myj4APWltVYohIK7f1v8urJuDVT2_I_U7rasnBCTsYY,9010 -docutils/parsers/rst/include/mmlextra.txt,sha256=DfWtgBA6Bn4TzlZokxTu5vp7zIf1hqUeaeGpYbfHSAg,6800 -docutils/parsers/rst/include/s5defs.txt,sha256=_5JOMpDtaufiZbdxh6QKpICqLvGpB9cypHM-SEt3sKA,1036 -docutils/parsers/rst/include/xhtml1-lat1.txt,sha256=ht_IZrejaCfgG95sfLNfCu1WAzU4LwpkWgzRbZ_6OA4,6112 -docutils/parsers/rst/include/xhtml1-special.txt,sha256=u4YARKjTrICRTtqlMDDOmpYR8xe-DKDRiNjzmXQs7gc,1945 -docutils/parsers/rst/include/xhtml1-symbol.txt,sha256=e6GP5rkmSNcXusRBJkKf2LSbSEyd1oFXJG_WBCYBKE8,7028 -docutils/parsers/rst/languages/__init__.py,sha256=bAE-YQUQ95QwYJVPnWcjz5bw6LOfW5Pgbhwmwp01-OY,1222 -docutils/parsers/rst/languages/__pycache__/__init__.cpython-310.pyc,, -docutils/parsers/rst/languages/__pycache__/af.cpython-310.pyc,, -docutils/parsers/rst/languages/__pycache__/ar.cpython-310.pyc,, -docutils/parsers/rst/languages/__pycache__/ca.cpython-310.pyc,, -docutils/parsers/rst/languages/__pycache__/cs.cpython-310.pyc,, -docutils/parsers/rst/languages/__pycache__/da.cpython-310.pyc,, -docutils/parsers/rst/languages/__pycache__/de.cpython-310.pyc,, -docutils/parsers/rst/languages/__pycache__/en.cpython-310.pyc,, -docutils/parsers/rst/languages/__pycache__/eo.cpython-310.pyc,, -docutils/parsers/rst/languages/__pycache__/es.cpython-310.pyc,, -docutils/parsers/rst/languages/__pycache__/fa.cpython-310.pyc,, -docutils/parsers/rst/languages/__pycache__/fi.cpython-310.pyc,, -docutils/parsers/rst/languages/__pycache__/fr.cpython-310.pyc,, -docutils/parsers/rst/languages/__pycache__/gl.cpython-310.pyc,, -docutils/parsers/rst/languages/__pycache__/he.cpython-310.pyc,, -docutils/parsers/rst/languages/__pycache__/it.cpython-310.pyc,, -docutils/parsers/rst/languages/__pycache__/ja.cpython-310.pyc,, -docutils/parsers/rst/languages/__pycache__/ka.cpython-310.pyc,, -docutils/parsers/rst/languages/__pycache__/ko.cpython-310.pyc,, -docutils/parsers/rst/languages/__pycache__/lt.cpython-310.pyc,, -docutils/parsers/rst/languages/__pycache__/lv.cpython-310.pyc,, -docutils/parsers/rst/languages/__pycache__/nl.cpython-310.pyc,, -docutils/parsers/rst/languages/__pycache__/pl.cpython-310.pyc,, -docutils/parsers/rst/languages/__pycache__/pt_br.cpython-310.pyc,, -docutils/parsers/rst/languages/__pycache__/ru.cpython-310.pyc,, -docutils/parsers/rst/languages/__pycache__/sk.cpython-310.pyc,, -docutils/parsers/rst/languages/__pycache__/sv.cpython-310.pyc,, -docutils/parsers/rst/languages/__pycache__/uk.cpython-310.pyc,, -docutils/parsers/rst/languages/__pycache__/zh_cn.cpython-310.pyc,, -docutils/parsers/rst/languages/__pycache__/zh_tw.cpython-310.pyc,, -docutils/parsers/rst/languages/af.py,sha256=MYGuM5yX_bx83imYLMJreUelpx3c1fcMmLhEu5tyQQI,3760 -docutils/parsers/rst/languages/ar.py,sha256=XpIY3SM10ccvFrO_j__REi_ITvLxSmGuf_y0Dc-Z_-g,3051 -docutils/parsers/rst/languages/ca.py,sha256=atmy4uqx3eVNvnKJwD0HuY0JSRG7uJfo7-AXrRgWg9g,4318 -docutils/parsers/rst/languages/cs.py,sha256=UXKr7zpl6UY31mBtCaNF-FBSsGVvWC59dx7KkwnIbTk,4760 -docutils/parsers/rst/languages/da.py,sha256=tQfxUwhlJohtnfKscjY0PUnwDYF2M-6Tu08EcXy7c6Q,3752 -docutils/parsers/rst/languages/de.py,sha256=nTIYDpKgOf28OJkcJbnuRvTOeLG6nDkfXDwW-MJV3Ls,3564 -docutils/parsers/rst/languages/en.py,sha256=9dmxPb_sYYsY4vEg8B_IBGQhEytcRvXtp5iOrzwmaHI,3516 -docutils/parsers/rst/languages/eo.py,sha256=ozUCNH5zKhnmYXxd5nZRu3TmF_aVeCAVzSLleRJWDf8,3751 -docutils/parsers/rst/languages/es.py,sha256=gFFm2EwAtAZOJUQLk03QUQdV0E7DKnct0amy6gpaNs4,4034 -docutils/parsers/rst/languages/fa.py,sha256=fap5ifAVKvqHGo3S2_G4mMrA8fvOiQ3BPi4z8Zlf94M,3171 -docutils/parsers/rst/languages/fi.py,sha256=xsxxUTOqVj1P1XkCQsPPkI-p1-ELE0QwMA9Oy-QPDHg,3536 -docutils/parsers/rst/languages/fr.py,sha256=V9_k_lbLkBSWuAbNeW2l4_Bv4EzaK4Dbwih0fJYIi0U,3816 -docutils/parsers/rst/languages/gl.py,sha256=v-zXIwcRYxgu8mZJmlexv73pMtwWlSB4yGVcYeENQP4,3389 -docutils/parsers/rst/languages/he.py,sha256=_UHQHJQx7EAGvMlURHJ8kDk_tcG1uv-7eTl7a4XEGgU,3293 -docutils/parsers/rst/languages/it.py,sha256=ZR4TsyM3611ASRQelyA7rx0zjUgMVM6gzrym6Bqrj40,3353 -docutils/parsers/rst/languages/ja.py,sha256=_PlI9H5u300_VlYvW52UvSy_MJ5OLlMmoNo1Pc6Pib8,3776 -docutils/parsers/rst/languages/ka.py,sha256=Xxv37fZnLqIbo1vmT1--Kj7etRxlKPgHcgfgiBvn9Xk,4141 -docutils/parsers/rst/languages/ko.py,sha256=IpSyXjTkoij0C7859i1p0iSgz9BQYpdF3fNTS6qcQ1A,3377 -docutils/parsers/rst/languages/lt.py,sha256=PHzrQAP-ZAcboCyc1jvHI5l0CMQnrbBEZ285MWJVy9w,3519 -docutils/parsers/rst/languages/lv.py,sha256=pfTCSDtQkOWYVTnTFjWhUZpnJJW7yuNzVW9xvBgP_u4,3376 -docutils/parsers/rst/languages/nl.py,sha256=dQ2Hex8oQS8f075B6nWZjeSQ0Rzjnl87oS8LauqBe1Y,3794 -docutils/parsers/rst/languages/pl.py,sha256=TRNum6wVR2XQY_Y_uhKqXNHdTBeaYgXwWaqokBaO-N0,3240 -docutils/parsers/rst/languages/pt_br.py,sha256=-cFMXIp8_6cryE7Q7smlElf4QegPqjmDRya8VxM1CQQ,3870 -docutils/parsers/rst/languages/ru.py,sha256=t5K4rlNKIcoOchVeC68sjJ8mr0Cox_BeyCvb1MDm9tQ,3398 -docutils/parsers/rst/languages/sk.py,sha256=HSdFwU6j9rZShkXyGPBc749TrUUB7tBSz-aw0f4UAzs,3943 -docutils/parsers/rst/languages/sv.py,sha256=HxyE3k93qgJw-aD6Ea3Tmfqz3QX1Q7AJIaU65MrkJxc,3261 -docutils/parsers/rst/languages/uk.py,sha256=LBUHWeKwFdEjfif4CG7LtMhBidl4ln6u4oStJ2omVsQ,3441 -docutils/parsers/rst/languages/zh_cn.py,sha256=YEC3UmY43NaI7s_6DatL_VQugiHvSUbBrHubgnMfYbI,3925 -docutils/parsers/rst/languages/zh_tw.py,sha256=icmm58W9wO69L6p5dwTQwjj45uxer2ZrJCqvBbERnkQ,5160 -docutils/parsers/rst/roles.py,sha256=GLJVffa-S_Fn3u6RTxdXJm1MXd1AQ7IxbCq99MmxIzQ,16119 -docutils/parsers/rst/states.py,sha256=ziKtByqhX2TAjRg4cOgGyQ6729W6gmVuaUUMKwKtf1k,133123 -docutils/parsers/rst/tableparser.py,sha256=D2jtx00mTdHsn20TKk9GJmCilwISeKHe9VoSy8PYELs,20912 -docutils/readers/__init__.py,sha256=rgBQZvedeYA8UZb2pYKoKqYWAjvUi8ZrzG1kANjnwC4,3520 -docutils/readers/__pycache__/__init__.cpython-310.pyc,, -docutils/readers/__pycache__/doctree.cpython-310.pyc,, -docutils/readers/__pycache__/pep.cpython-310.pyc,, -docutils/readers/__pycache__/standalone.cpython-310.pyc,, -docutils/readers/doctree.py,sha256=9QNTk_8x46sDkcSjzQiyFZxN-m9CBO3XA5bLar7OA0Q,1607 -docutils/readers/pep.py,sha256=m5RnOjKfMnmwHXU8vsr0HfZlqcqH2Yn4MXspjLO0GYE,1523 -docutils/readers/standalone.py,sha256=qwVRPiBto5CvE3m-2sZ2a2btlVl8aSbT4J_z7uIr3yM,2334 -docutils/statemachine.py,sha256=fKzVodfw6BEZQf4HquXfpBfNR8N5E8uzVeDoJA5eFRw,56956 -docutils/transforms/__init__.py,sha256=LTftDJ01XJReLdKMyuJrB_ImFr0fHOaZGqzE1VnXg9c,6968 -docutils/transforms/__pycache__/__init__.cpython-310.pyc,, -docutils/transforms/__pycache__/components.cpython-310.pyc,, -docutils/transforms/__pycache__/frontmatter.cpython-310.pyc,, -docutils/transforms/__pycache__/misc.cpython-310.pyc,, -docutils/transforms/__pycache__/parts.cpython-310.pyc,, -docutils/transforms/__pycache__/peps.cpython-310.pyc,, -docutils/transforms/__pycache__/references.cpython-310.pyc,, -docutils/transforms/__pycache__/universal.cpython-310.pyc,, -docutils/transforms/__pycache__/writer_aux.cpython-310.pyc,, -docutils/transforms/components.py,sha256=4qO1txFE98PJa4tqKCpnlnQi_UIEQDU4mMVuzwgqizY,2151 -docutils/transforms/frontmatter.py,sha256=zgWHDRBnv1Flv1Ddq5xEK6KmmGSgRyBwOzOHT8TfOAk,20809 -docutils/transforms/misc.py,sha256=BhyjLyE8j5QRELg7CcT0hEwdvnUJugUqQYyckzXDlh0,4873 -docutils/transforms/parts.py,sha256=Z_72Wf2oqchCcTIPTOgBVS1auAtA7POwJww1EaoeQPk,6912 -docutils/transforms/peps.py,sha256=7TMhgwqyopkuUetITpsByN-t0ZT8bXJOqaClUwliF0U,11111 -docutils/transforms/references.py,sha256=fhgJ6KDifnAFtPZHw6srdH76A5cepjfejzSKNF_2McA,36821 -docutils/transforms/universal.py,sha256=mhJkmCIhYQFqBAzitGyskSjGTYCwAyvSQzld6p5aLEE,12381 -docutils/transforms/writer_aux.py,sha256=G_XXqiAcSqWnFJKWRhQ8oE0jWcZPjLjyW2QBThE1Xes,3057 -docutils/utils/__init__.py,sha256=8G-da95eq3Ka72X1U0vAgNR4UZJYzS2t4o_MpfnN5c0,30381 -docutils/utils/__pycache__/__init__.cpython-310.pyc,, -docutils/utils/__pycache__/code_analyzer.cpython-310.pyc,, -docutils/utils/__pycache__/error_reporting.cpython-310.pyc,, -docutils/utils/__pycache__/punctuation_chars.cpython-310.pyc,, -docutils/utils/__pycache__/roman.cpython-310.pyc,, -docutils/utils/__pycache__/smartquotes.cpython-310.pyc,, -docutils/utils/__pycache__/urischemes.cpython-310.pyc,, -docutils/utils/code_analyzer.py,sha256=O6eUnWPYyZFXtTtk-NuNVv4k7L9sb5BJGIjacSZT4Tw,4920 -docutils/utils/error_reporting.py,sha256=vjHvpHu0BSoE2Ltm4FyYfLUsV5VWAjBbRoniWrJ7CR8,8105 -docutils/utils/math/__init__.py,sha256=DgTt07qT_uN2-9Tz8HNKkRqw5P3KIj81VurrJ2J_Omw,2553 -docutils/utils/math/__pycache__/__init__.cpython-310.pyc,, -docutils/utils/math/__pycache__/latex2mathml.cpython-310.pyc,, -docutils/utils/math/__pycache__/math2html.cpython-310.pyc,, -docutils/utils/math/__pycache__/mathalphabet2unichar.cpython-310.pyc,, -docutils/utils/math/__pycache__/mathml_elements.cpython-310.pyc,, -docutils/utils/math/__pycache__/tex2mathml_extern.cpython-310.pyc,, -docutils/utils/math/__pycache__/tex2unichar.cpython-310.pyc,, -docutils/utils/math/__pycache__/unichar2tex.cpython-310.pyc,, -docutils/utils/math/latex2mathml.py,sha256=ELwAGaYg4kx2E7_AE_rSSSZFID4QMIV8sPAIQ2PsdVE,46961 -docutils/utils/math/math2html.py,sha256=VfZ1ceFmNKu4yvF72j4xsgH_cjtOkeCevPC3UYw4enQ,107808 -docutils/utils/math/mathalphabet2unichar.py,sha256=bsgWbE09bIyXfME-TyCms-0fesWVoxXMErQVVeYOoSo,56217 -docutils/utils/math/mathml_elements.py,sha256=zMoCC2KnjAxmSGu4ZUtCZAEuyWme80pCPTn66E44gcY,14564 -docutils/utils/math/tex2mathml_extern.py,sha256=lo4n8GFXWcpe-yw8TPPahzHfhXU9_jv2oo-Sm_TasE4,9436 -docutils/utils/math/tex2unichar.py,sha256=_A2K7ap6o6cb1LY9qCYtsfP-zk5eeu_Hu6LIl6j7LnA,37497 -docutils/utils/math/unichar2tex.py,sha256=Q1gvqUGWprAjMfpGfujAu4ldjJevoFG2moM46SWy6WU,18393 -docutils/utils/punctuation_chars.py,sha256=OMY7gEwy_TIo1MnjZ9cnT3wA9LmSApldJ3lSJZbEtuE,5747 -docutils/utils/roman.py,sha256=JCF_zBcbauEuVvDPuNTR1ZBxFgyNEUr6jZ2JwZwnQ9A,4280 -docutils/utils/smartquotes.py,sha256=KW27gHCkAhgDCdKisPbAu85yJjc0j2FcssSwzvPlCCI,39136 -docutils/utils/urischemes.py,sha256=5dcLKn-Xo5ldDCcdTtO7l8UM_Y3KZrcjyR3A8RWp91U,6260 -docutils/writers/__init__.py,sha256=sJuZewwKkiJHl0VkAtx-geKluXhd_34H9o3sg4Yp4AU,4945 -docutils/writers/__pycache__/__init__.cpython-310.pyc,, -docutils/writers/__pycache__/_html_base.cpython-310.pyc,, -docutils/writers/__pycache__/docutils_xml.cpython-310.pyc,, -docutils/writers/__pycache__/manpage.cpython-310.pyc,, -docutils/writers/__pycache__/null.cpython-310.pyc,, -docutils/writers/__pycache__/pseudoxml.cpython-310.pyc,, -docutils/writers/_html_base.py,sha256=pQqROc1bBMw_ozqT7T8jNtkHh7F7EWcxZUAbLDIvMrY,75401 -docutils/writers/docutils_xml.py,sha256=wi786IvTZqbojdiIxx-vZWS76BE0cUYJQ5RSqfJnkXc,6851 -docutils/writers/html4css1/__init__.py,sha256=KkCwD339fBkAsvUpO448_i2YcSyZ04bloCTr932wTJ8,38125 -docutils/writers/html4css1/__pycache__/__init__.cpython-310.pyc,, -docutils/writers/html4css1/html4css1.css,sha256=-Uk0s5gu8-k8f4f34R-bSIpcSzqkVGa5JN6FGSh7QbY,7300 -docutils/writers/html4css1/template.txt,sha256=HDzUUyAv7gT4ewGQTqfOE2_9HOVyGu9-wCRgsmoCmjQ,114 -docutils/writers/html5_polyglot/__init__.py,sha256=9wqpd5R89Ewug52mD3g_9tGhyIr5dSEv0vLC0oA8ZOw,15635 -docutils/writers/html5_polyglot/__pycache__/__init__.cpython-310.pyc,, -docutils/writers/html5_polyglot/italic-field-names.css,sha256=R9vxBFOn5NiQXWOLpghskCrn0RzMQEXy6w4EDDSNPXs,1145 -docutils/writers/html5_polyglot/math.css,sha256=eKRtxtJvYDzuQybRk6Ln_k9tr6TCoFhNqek40C9ErdM,6219 -docutils/writers/html5_polyglot/minimal.css,sha256=w78WllgDmLFNc-PwZRiD5_fhYB0zxAq7mGSh3c94TNM,8213 -docutils/writers/html5_polyglot/plain.css,sha256=-UeuH9qUxiAlETSiNjEm4Q1vAtpxD0QWwtN81pVo31Y,7552 -docutils/writers/html5_polyglot/responsive.css,sha256=PYhXgdnYR9hfGobvUMxi8OSPGr-KQUd2ucubuBRm5qg,11739 -docutils/writers/html5_polyglot/template.txt,sha256=HDzUUyAv7gT4ewGQTqfOE2_9HOVyGu9-wCRgsmoCmjQ,114 -docutils/writers/html5_polyglot/tuftig.css,sha256=ACotB-KbCWzGsanW1GncNwgMxa21vapRaAtv-NKdh8M,12023 -docutils/writers/latex2e/__init__.py,sha256=xs566hs7lq1MEteKh3PhCVj7jGdW3ivB_WNlrQ5pHVY,138165 -docutils/writers/latex2e/__pycache__/__init__.cpython-310.pyc,, -docutils/writers/latex2e/default.tex,sha256=JcaJnrdmKE9vqwATl9dSHOsnGnjF2dLX4vsiu7kvWEI,422 -docutils/writers/latex2e/docutils.sty,sha256=dgnu97-E5w_rOrdaKBYAw_PwnMKASOFLXV08k17DABQ,5472 -docutils/writers/latex2e/titlepage.tex,sha256=ampRiXY22vtJ22UPMv61mmPubJPItdrSkl9MSAOftpQ,480 -docutils/writers/latex2e/titlingpage.tex,sha256=Pa9ixIf9Yy6RLljbTPUEgmrQwRzYsyzrxl4KSrHA37E,424 -docutils/writers/latex2e/xelatex.tex,sha256=NbrtTphygnEaTmyJEz5HwkNuWCbV1ijlh_1M7_TXLu0,672 -docutils/writers/manpage.py,sha256=REpVzDREXjLJ8ChcBpzbk3Wv8MJ45QWJuPRLB7s5nt8,38128 -docutils/writers/null.py,sha256=Ue7kizk6_1GGrCmYiltg9MrYNetMyHM-yvQvNRrclN8,568 -docutils/writers/odf_odt/__init__.py,sha256=uW_W6F_zMdbcqEmo4swfou2elk4XG-ShZusngP1acS4,132081 -docutils/writers/odf_odt/__pycache__/__init__.cpython-310.pyc,, -docutils/writers/odf_odt/__pycache__/prepstyles.cpython-310.pyc,, -docutils/writers/odf_odt/__pycache__/pygmentsformatter.cpython-310.pyc,, -docutils/writers/odf_odt/prepstyles.py,sha256=XwC29yEYGKpNATr5yuGJH_WYkPBBUAkVr8fLaMEo6Yo,2142 -docutils/writers/odf_odt/pygmentsformatter.py,sha256=j1fMQPdv5fdczPkSKbyYjoh66G8Z_MZhTN52_XfRhHc,4681 -docutils/writers/odf_odt/styles.odt,sha256=xKv9z2sd1qNxAH28X-5st5JuDZeTw6jyDOxXohsFrKY,16500 -docutils/writers/pep_html/__init__.py,sha256=mrApLw1JROL32XM-eXrDjYnQq_DTS3_KqTNOEoiD55k,3503 -docutils/writers/pep_html/__pycache__/__init__.cpython-310.pyc,, -docutils/writers/pep_html/pep.css,sha256=AyHZfudmKKTu-ZmyoLaihM_e5bD3_gCO51hG_NPEDA8,6367 -docutils/writers/pep_html/template.txt,sha256=SPc44ICSNgps08fDtVaRlu6glPPwcCcx9mqybx5u6W4,1001 -docutils/writers/pseudoxml.py,sha256=gjnBxBIXYNdnQYt_XTXljV3fcp9dW1Ek0LE5VkGIvJk,1032 -docutils/writers/s5_html/__init__.py,sha256=UahNZB0U6WSRb3jR6x1MXy4A4ud2bd8lduqG9Nn5Vao,14712 -docutils/writers/s5_html/__pycache__/__init__.cpython-310.pyc,, -docutils/writers/s5_html/themes/README.txt,sha256=wYnu3iomgGD6odpZOtWTzOynI1dfIGE6AVF1MDR0FVY,278 -docutils/writers/s5_html/themes/big-black/__base__,sha256=WeKnChXCPkrXDs7Xr-Qnf1i-bgFjkeaKJ-ilXV0R5lM,38 -docutils/writers/s5_html/themes/big-black/framing.css,sha256=DtEo7Fti9JARMLmcCx0NIfir7QRR24_WN3UbG-EyH64,910 -docutils/writers/s5_html/themes/big-black/pretty.css,sha256=UP9r7eGX0qEFCIDyKcT5bcazMxCw43O2KSrs2ebBPwI,3605 -docutils/writers/s5_html/themes/big-white/framing.css,sha256=meBByeaKIduudfFCDxVw4uzSOj8q_ZJArnwp8oZ1S8g,905 -docutils/writers/s5_html/themes/big-white/pretty.css,sha256=RlQ7CZuN-WMrR8CmCeQ-U8WVmZj769z2zx2FfLwTS48,3565 -docutils/writers/s5_html/themes/default/framing.css,sha256=Sbh5wryeioxDMZ-kJFwzKNziO-3CRvLBMG7rcJjTLmU,1002 -docutils/writers/s5_html/themes/default/opera.css,sha256=guPZOg_BINv-LjV9_IAM7ILFQ-fKALNjlP1i06e5dmA,261 -docutils/writers/s5_html/themes/default/outline.css,sha256=z3ACJiW3_gnG8XFvX602PMTYvKhbRybqCeoWl3O_pA0,648 -docutils/writers/s5_html/themes/default/pretty.css,sha256=iT_51bIPLTk1hFFs3hCarnyJqtbB4I86BNrxlT1r3eo,4383 -docutils/writers/s5_html/themes/default/print.css,sha256=INhYRMsY7y2wd9p7tqjcDWBREXHUMO-2ApAWvITyetI,818 -docutils/writers/s5_html/themes/default/s5-core.css,sha256=D4WDPb581O-_G5jhzpAIwI88B1Zi8y3nWBB8rCxgzlg,450 -docutils/writers/s5_html/themes/default/slides.css,sha256=VKYQ1Oe8lZ8LHxzPqJiU79J0z295nkmIbzsXL-N_dfQ,283 -docutils/writers/s5_html/themes/default/slides.js,sha256=5BXUM5jSWu9hUQSVhGZhMTEvkdCYgqrOJO3ljwDgxWI,15801 -docutils/writers/s5_html/themes/medium-black/__base__,sha256=822LJG-LrdBZY6CA7wsLFCFzsYfxbyz2mr1j6rpb1UA,41 -docutils/writers/s5_html/themes/medium-black/pretty.css,sha256=OdL1xJ9f_FE1pmS7X0s0yxyIl1n2vUBQaGOcJrT2svg,4029 -docutils/writers/s5_html/themes/medium-white/framing.css,sha256=BF5YnRLGRhobO06xDet-0KZYpR10IgRjRbULPVm3PMM,943 -docutils/writers/s5_html/themes/medium-white/pretty.css,sha256=Zm-Pgk3SLAGmGTRF27nrqvpBb_LH2yQ5FIpDPM3p0Y0,3989 -docutils/writers/s5_html/themes/small-black/__base__,sha256=WmiB80z49RfMsy_7tFI042AfUgyztL5OXI3tap9EfQM,40 -docutils/writers/s5_html/themes/small-black/pretty.css,sha256=fmc73kx-zOp0jbiy4GAmpw2Xdz9Q_-WzebsgDJWUJos,4028 -docutils/writers/s5_html/themes/small-white/framing.css,sha256=qwNUgzqnrXgoX47SddbVIKEZwQDjGnTGA468jHHIXqc,940 -docutils/writers/s5_html/themes/small-white/pretty.css,sha256=qU8WOhY8TT6ZY6cXKXABb7T7JgpJQORzTZJhuAm0gGg,3999 -docutils/writers/xetex/__init__.py,sha256=o25hpaSPL9erEUe18GIzaVbP2QuYtjZ0SwSkDeXfZ6k,5736 -docutils/writers/xetex/__pycache__/__init__.cpython-310.pyc,, diff --git a/env/lib/python3.10/site-packages/docutils-0.21.2.dist-info/WHEEL b/env/lib/python3.10/site-packages/docutils-0.21.2.dist-info/WHEEL deleted file mode 100644 index 3b5e64b..0000000 --- a/env/lib/python3.10/site-packages/docutils-0.21.2.dist-info/WHEEL +++ /dev/null @@ -1,4 +0,0 @@ -Wheel-Version: 1.0 -Generator: flit 3.9.0 -Root-Is-Purelib: true -Tag: py3-none-any diff --git a/env/lib/python3.10/site-packages/docutils-0.21.2.dist-info/entry_points.txt b/env/lib/python3.10/site-packages/docutils-0.21.2.dist-info/entry_points.txt deleted file mode 100644 index 3e32d4e..0000000 --- a/env/lib/python3.10/site-packages/docutils-0.21.2.dist-info/entry_points.txt +++ /dev/null @@ -1,13 +0,0 @@ -[console_scripts] -docutils=docutils.__main__:main -rst2html=docutils.core:rst2html -rst2html4=docutils.core:rst2html4 -rst2html5=docutils.core:rst2html5 -rst2latex=docutils.core:rst2latex -rst2man=docutils.core:rst2man -rst2odt=docutils.core:rst2odt -rst2pseudoxml=docutils.core:rst2pseudoxml -rst2s5=docutils.core:rst2s5 -rst2xetex=docutils.core:rst2xetex -rst2xml=docutils.core:rst2xml - diff --git a/env/lib/python3.10/site-packages/docutils/__init__.py b/env/lib/python3.10/site-packages/docutils/__init__.py deleted file mode 100644 index 16af410..0000000 --- a/env/lib/python3.10/site-packages/docutils/__init__.py +++ /dev/null @@ -1,291 +0,0 @@ -# $Id: __init__.py 9649 2024-04-23 18:54:26Z grubert $ -# Author: David Goodger -# Copyright: This module has been placed in the public domain. - -""" -This is the Docutils (Python Documentation Utilities) package. - -Package Structure -================= - -Modules: - -- __init__.py: Contains component base classes, exception classes, and - Docutils version information. - -- core.py: Contains the ``Publisher`` class and ``publish_*()`` convenience - functions. - -- frontend.py: Runtime settings (command-line interface, configuration files) - processing, for Docutils front-ends. - -- io.py: Provides a uniform API for low-level input and output. - -- nodes.py: Docutils document tree (doctree) node class library. - -- statemachine.py: A finite state machine specialized for - regular-expression-based text filters. - -Subpackages: - -- languages: Language-specific mappings of terms. - -- parsers: Syntax-specific input parser modules or packages. - -- readers: Context-specific input handlers which understand the data - source and manage a parser. - -- transforms: Modules used by readers and writers to modify - the Docutils document tree. - -- utils: Contains the ``Reporter`` system warning class and miscellaneous - utilities used by readers, writers, and transforms. - - utils/urischemes.py: Contains a complete mapping of known URI addressing - scheme names to descriptions. - -- utils/math: Contains functions for conversion of mathematical notation - between different formats (LaTeX, MathML, text, ...). - -- writers: Format-specific output translators. -""" - -from collections import namedtuple - -__docformat__ = 'reStructuredText' - -__version__ = '0.21.2' -"""Docutils version identifier (complies with PEP 440):: - - major.minor[.micro][releaselevel[serial]][.dev] - -For version comparison operations, use `__version_info__` (see, below) -rather than parsing the text of `__version__`. - -https://docutils.sourceforge.io/docs/dev/policies.html#version-identification -""" - -__version_details__ = '' -"""Optional extra version details (e.g. 'snapshot 2005-05-29, r3410'). - -For development and release status, use `__version__ and `__version_info__`. -""" - - -class VersionInfo(namedtuple('VersionInfo', - 'major minor micro releaselevel serial release')): - - def __new__(cls, major=0, minor=0, micro=0, - releaselevel='final', serial=0, release=True): - releaselevels = ('alpha', 'beta', 'candidate', 'final') - if releaselevel not in releaselevels: - raise ValueError('releaselevel must be one of %r.' - % (releaselevels, )) - if releaselevel == 'final': - if not release: - raise ValueError('releaselevel "final" must not be used ' - 'with development versions (leads to wrong ' - 'version ordering of the related __version__') - # cf. https://peps.python.org/pep-0440/#summary-of-permitted-suffixes-and-relative-ordering # noqa - if serial != 0: - raise ValueError('"serial" must be 0 for final releases') - - return super().__new__(cls, major, minor, micro, - releaselevel, serial, release) - - def __lt__(self, other): - if isinstance(other, tuple): - other = VersionInfo(*other) - return tuple.__lt__(self, other) - - def __gt__(self, other): - if isinstance(other, tuple): - other = VersionInfo(*other) - return tuple.__gt__(self, other) - - def __le__(self, other): - if isinstance(other, tuple): - other = VersionInfo(*other) - return tuple.__le__(self, other) - - def __ge__(self, other): - if isinstance(other, tuple): - other = VersionInfo(*other) - return tuple.__ge__(self, other) - - -__version_info__ = VersionInfo( - major=0, - minor=21, - micro=2, - releaselevel='final', # one of 'alpha', 'beta', 'candidate', 'final' - serial=0, # pre-release number (0 for final releases and snapshots) - release=True # True for official releases and pre-releases - ) -"""Comprehensive version information tuple. - -https://docutils.sourceforge.io/docs/dev/policies.html#version-identification -""" - - -class ApplicationError(Exception): pass -class DataError(ApplicationError): pass - - -class SettingsSpec: - - """ - Runtime setting specification base class. - - SettingsSpec subclass objects used by `docutils.frontend.OptionParser`. - """ - - # TODO: replace settings_specs with a new data structure - # Backwards compatiblity: - # Drop-in components: - # Sphinx supplies settings_spec in the current format in some places - # Myst parser provides a settings_spec tuple - # - # Sphinx reads a settings_spec in order to set a default value - # in writers/html.py:59 - # https://github.com/sphinx-doc/sphinx/blob/4.x/sphinx/writers/html.py - # This should be changed (before retiring the old format) - # to use `settings_default_overrides` instead. - settings_spec = () - """Runtime settings specification. Override in subclasses. - - Defines runtime settings and associated command-line options, as used by - `docutils.frontend.OptionParser`. This is a tuple of: - - - Option group title (string or `None` which implies no group, just a list - of single options). - - - Description (string or `None`). - - - A sequence of option tuples. Each consists of: - - - Help text (string) - - - List of option strings (e.g. ``['-Q', '--quux']``). - - - Dictionary of keyword arguments sent to the OptionParser/OptionGroup - ``add_option`` method. - - Runtime setting names are derived implicitly from long option names - ('--a-setting' becomes ``settings.a_setting``) or explicitly from the - 'dest' keyword argument. - - Most settings will also have a 'validator' keyword & function. The - validator function validates setting values (from configuration files - and command-line option arguments) and converts them to appropriate - types. For example, the ``docutils.frontend.validate_boolean`` - function, **required by all boolean settings**, converts true values - ('1', 'on', 'yes', and 'true') to 1 and false values ('0', 'off', - 'no', 'false', and '') to 0. Validators need only be set once per - setting. See the `docutils.frontend.validate_*` functions. - - See the optparse docs for more details. - - - More triples of group title, description, options, as many times as - needed. Thus, `settings_spec` tuples can be simply concatenated. - """ - - settings_defaults = None - """A dictionary of defaults for settings not in `settings_spec` (internal - settings, intended to be inaccessible by command-line and config file). - Override in subclasses.""" - - settings_default_overrides = None - """A dictionary of auxiliary defaults, to override defaults for settings - defined in other components' `setting_specs`. Override in subclasses.""" - - relative_path_settings = () - """Settings containing filesystem paths. Override in subclasses. - Settings listed here are to be interpreted relative to the current working - directory.""" - - config_section = None - """The name of the config file section specific to this component - (lowercase, no brackets). Override in subclasses.""" - - config_section_dependencies = None - """A list of names of config file sections that are to be applied before - `config_section`, in order (from general to specific). In other words, - the settings in `config_section` are to be overlaid on top of the settings - from these sections. The "general" section is assumed implicitly. - Override in subclasses.""" - - -class TransformSpec: - """ - Runtime transform specification base class. - - Provides the interface to register "transforms" and helper functions - to resolve references with a `docutils.transforms.Transformer`. - - https://docutils.sourceforge.io/docs/ref/transforms.html - """ - - def get_transforms(self): - """Transforms required by this class. Override in subclasses.""" - if self.default_transforms != (): - import warnings - warnings.warn('TransformSpec: the "default_transforms" attribute ' - 'will be removed in Docutils 2.0.\n' - 'Use get_transforms() method instead.', - DeprecationWarning) - return list(self.default_transforms) - return [] - - # Deprecated; for compatibility. - default_transforms = () - - unknown_reference_resolvers = () - """List of functions to try to resolve unknown references. - - Unknown references have a 'refname' attribute which doesn't correspond - to any target in the document. Called when the transforms in - `docutils.transforms.references` are unable to find a correct target. - - The list should contain functions which will try to resolve unknown - references, with the following signature:: - - def reference_resolver(node): - '''Returns boolean: true if resolved, false if not.''' - - If the function is able to resolve the reference, it should also remove - the 'refname' attribute and mark the node as resolved:: - - del node['refname'] - node.resolved = 1 - - Each function must have a "priority" attribute which will affect the order - the unknown_reference_resolvers are run:: - - reference_resolver.priority = 100 - - This hook is provided for 3rd party extensions. - Example use case: the `MoinMoin - ReStructured Text Parser` - in ``sandbox/mmgilbe/rst.py``. - """ - - -class Component(SettingsSpec, TransformSpec): - - """Base class for Docutils components.""" - - component_type = None - """Name of the component type ('reader', 'parser', 'writer'). Override in - subclasses.""" - - supported = () - """Name and aliases for this component. Override in subclasses.""" - - def supports(self, format): - """ - Is `format` supported by this component? - - To be used by transforms to ask the dependent component if it supports - a certain input context or output format. - """ - return format in self.supported diff --git a/env/lib/python3.10/site-packages/docutils/__main__.py b/env/lib/python3.10/site-packages/docutils/__main__.py deleted file mode 100755 index ce61489..0000000 --- a/env/lib/python3.10/site-packages/docutils/__main__.py +++ /dev/null @@ -1,96 +0,0 @@ -#!/usr/bin/env python3 -# :Copyright: © 2020, 2022 Günter Milde. -# :License: Released under the terms of the `2-Clause BSD license`_, in short: -# -# Copying and distribution of this file, with or without modification, -# are permitted in any medium without royalty provided the copyright -# notice and this notice are preserved. -# This file is offered as-is, without any warranty. -# -# .. _2-Clause BSD license: https://opensource.org/licenses/BSD-2-Clause -# -# Revision: $Revision: 9107 $ -# Date: $Date: 2022-07-06 15:59:57 +0200 (Mi, 06. Jul 2022) $ - -"""Generic command line interface for the `docutils` package. - -See also -https://docs.python.org/3/library/__main__.html#main-py-in-python-packages -""" - -import argparse -import locale -import sys - -import docutils -from docutils.core import Publisher, publish_cmdline, default_description - - -class CliSettingsSpec(docutils.SettingsSpec): - """Runtime settings & command-line options for the generic CLI. - - Configurable reader, parser, and writer components. - - The "--writer" default will change to 'html' in Docutils 2.0 - when 'html' becomes an alias for the current value 'html5'. - """ - - settings_spec = ( - 'Docutils Application Options', - 'Reader, writer, and parser settings influence the available options. ' - ' Example: use `--help --writer=latex` to see LaTeX writer options. ', - # options: ('help text', [