diff --git a/Splunk_TA_paloalto/README/inputs.conf.spec b/Splunk_TA_paloalto/README/inputs.conf.spec index 57a6490e..2e6c4f5e 100644 --- a/Splunk_TA_paloalto/README/inputs.conf.spec +++ b/Splunk_TA_paloalto/README/inputs.conf.spec @@ -9,8 +9,8 @@ region = python.version = python3 [minemeld_feed://] -feed_url = -indicator_timeout = +feed_url = https:// +indicator_timeout = How long to retain indicators (in hours) credentials = python.version = python3 diff --git a/Splunk_TA_paloalto/Splunk_TA_paloalto.aob_meta b/Splunk_TA_paloalto/Splunk_TA_paloalto.aob_meta index 6961bd83..d19a62c1 100644 --- a/Splunk_TA_paloalto/Splunk_TA_paloalto.aob_meta +++ b/Splunk_TA_paloalto/Splunk_TA_paloalto.aob_meta @@ -1 +1 @@ -{"basic_builder": {"appname": "Splunk_TA_paloalto", "friendly_name": "Palo Alto Networks Add-on for Splunk", "version": "7.0.0", "author": "Palo Alto Networks", "description": "", "theme": "#FFFFFF", "large_icon": "iVBORw0KGgoAAAANSUhEUgAAAEgAAABICAYAAABV7bNHAAAFiklEQVR4nO2bXWgcVRTHtxZKGhMVjY2CjbYo1eI5s5tpGkpe1hdFH3wL3DP74bbFLaRFaEVBkXTrJ7RWH1QQH8S2ULS+CULfBEGlWrDSb8GPtvjVFVJbAvXeweNDsmuy2d05s3t3Z4X5w31L/nPPL+ecOXNnkkjEihUrVqxYsWLFihWrR2U8LPmET9jyYy+ZNJ7zhi2/SKUJp7QC1gqYvdTdljy/1wpYE75nwy8yaXK2VeBoBWwIZ/wMPNqqH+fdEUPwzWJP2G9zz12TVrB9YSCLV2pDWD/OjN9kCMv1/fBgJ2LomGozp3YZwhn2kkmpH0+lBwzhsWaevue83cmYrKl55tQGlXwsyG+urPCSyJPwQDdibFlh4FQbdyF5TyM/PjK5vHFZNVo9Wm5a4VNh4TRr3Jx3RzTht614aoL3o2DQUK1kzhJQnvNQxY9pfDh85vRoJhlyptuFoxWwUXDdZJNpplHUCs/b8NQKP+TNqdujhPO0nUCq64wm+Mimp6/w8UjgaM/ZYTMQo+Bvzo7ey4V0nyY4a8WX8DCXSjd0HY4hfNkuHJxlGsWKP2+ZGAyaewI9yZnuOphEwk5DXgInN/pA7XWYeZkmON4anIgeP4zC3VbhEFytB6ciLrr9RuHnRsE/Uk+f4N1uMqnKOhyFs0zu/UHX5S0Tg0bBdRlwfE4US8Z52GpvMuTssps5WG6WORVxaXKFUfCF0PNNSSy+l8zMZ++nzLysbTia8APrmSM4F+Ki268VnrSaOYSv1pT4l23BsT3nGAXXRJlTSPdpBWdswvEVPFnXg+BQi3DwdZtwtIILnMM1gXCKbr8m+E4I/EVhLM8EQD7KOzetDAEH9lsvKykcy5lTW1ZNYH8mhGO3Ic9ljqCstkwMiuF4zkuSWHwPt4baa1C5acIDVjOHsMw5vDEQTtHt14TnosicOutE3RHA/pwD10RzzlR6wH5ZtXdzMYSf1NK23ZDPiHrO1k23aoIfZGWFz8vgNG/IISAdZRofThjCj+1mDs4yjQ8Hwpm7W4me2rtQVvUX4bmE72HeEP5hyfCUqCHncI34SMNzdojgKHjB8h/6iu9hvrLhVUbBb+2Z4k98ZHJ5IJyi228IfpVljrNLBMdy5sy9lqqZ9jmTWh/mqXmxIfzJHqwNhJPfeJuOaM4Jtxq82OQsrtMKLoakfUxy7ss5XGMUXhb5krNNBEfhK5Yzpxz4QpPJHTIEf4kNBccG8w1ZdreKKHOMguucd0ck105wFkcDexLhKc7iukCvTGp9iDPm7SI49jNnxmSSEyI41cByuKpxueFJUUMmd8goNFbLyn5DLrf8WQ5vHltdp9wuiOacvDsin3PgWSGcvXbhwNW2v1niDN5V+XjAKPiai25/4O/k8EGjcFa0Sc95RLIPn/Ad+5kTfOcViT1YqwkPcyHdF/izU+kBQ/C7ZJO+h0py/Y7MOdKGbFNapTZohT+KN6pwd7AnHrSfOcn7usFjkebmnPCDpiF4rZGn9YM7wnIkmTPfkEVzToON710Kx3ZDjgiO9pyN0obcdC04zdMEh6zD2Ty2uutwOJty7AYC+ztwHn45ksxJJCoPuHjFHiBn2tb3RvOZ8wtn3TsjgVOFRO6QDvmAWzcYD0sVT+Nh6X9bVvU0D+lEG6W1p9bTEOxpGRDB2cjKqpF456aVrWRS9dSujhq+DQ3KnKJ7czdjF4vJHar+H4UkGIX7gjx9BW+FgHOp5zKnVlyaXFH7/xR1lwdFqaf2oCgoq9M8lR7oZGzWxIXkLc3KTXrWvFDN3vgawjIXxu7oRCwd0/xZ0FeShixV3cZNcLzny6qZNOHP/zVkJ9eun+85uQWALtrYY6Saa9xw2uaHlUbhPq3wfM/MObFixYoVK1asWLFixYoVTv8CfUr/kdHM+XgAAAAASUVORK5CYII=", "small_icon": "iVBORw0KGgoAAAANSUhEUgAAACQAAAAkCAYAAADhAJiYAAADEUlEQVRYhe2WS2sTURTHLxVBdCWIWnwlLZSU3HOzEERaH1CtrUo3tck9d9JWV34CUagLuxBEpAguxI2iCxcuXNiVglJ8fAWL2mofCLrUapvMPYPXRTJt5pFkGJKFmD+c3X/m/O553BnGWmrpfxchn9BKnK/rU6KPFNxsNswljWC0BKoFRYr3a8lXNIIh5LeaC+OGBNJSjAd94gQh/+nxKphqKIydg8ueBBVhK55bh1GZIxpBh/lIwu3GwCCfqAazHpYYp1xmoBpMRVXvmIsHN8eGIYQrdWEQDCG81Io/iuD7vqK6dsSDkeJqNBjxzoyJbSbLNmmExzW8y8bqPhCzMvxaRJi3ZujgVvc5M8naCOFZiPdzUaa6KnMYC7ZHgrEVvx6xTTOVMIwxpvPiMCH88MMYyTs9MFmR1AizjoIHDamMRnhVBea3z7dQyKb3V/rWkO/TEj5sePhdM8naYsMQwoy5kNjieTaX7vXD2MgX/TAmm96tkS+GbN89L4yCG5FgFLz2n4Zy6V4tYdXfpgBMXuwNhdmIhy7MVCQYKZ4HKlO6CNcCMxOojEhqhPkIeZ4wB7nUCHadbXrqnxmy4GRIZebWspk9ARgJX+rCSCBHQp4xxpijxDlCXghvE58OVMbKDARnBj7621TIimSdNrmhHSnQM0eOEkP+ShHCC+YTKdHnb5ON/FNsGAnkoBjx5ykn4/0agcozMx0KI+GP76XzZrS73dMmCzqiwBACORacCYWpSDroINwPbJMSgxrB8Z6Ov/ffuCYrkoTwNVJl6sFUk4NihILbZGyEBZMVSddXlLyTEL5FmhnkZ+PByMxwyGpXxpJRkDIKUhphKRJMHk7HgjFjYqeWpZmqE2/KUc9XJCUGY8G4Km/VrxpJFgqYShQwldAICzVmZjV2ZYJQcJwwcAkaG2HZKL7L9Znh7nZbhgy0hCJJfqohMK5KX3PP5TnnX3fGGDOj3e0aYa5yZsiCYw2FWU9mZQ7pEtRsAVOJar5y+2YJeYGQ9zQFxhUh7yn6frjCZCzoIBRHmwrTUkv/gv4CNzePiwDo44MAAAAASUVORK5CYII=", "visible": true, "tab_version": "4.0.0", "tab_build_no": "0", "build_no": 4}, "data_input_builder": {"datainputs": [{"index": "default", "sourcetype": "minemeld_feed", "interval": "30", "use_external_validation": true, "streaming_mode_xml": true, "name": "minemeld_feed", "title": "MineMeld Feed", "description": "", "type": "customized", "parameters": [{"required": true, "name": "feed_url", "label": " Output Node Feed URL", "default_value": "", "placeholder": "", "help_string": "", "type": "text", "format_type": "text", "value": ""}, {"required": true, "name": "indicator_timeout", "label": "Indicator Timeout", "default_value": "", "placeholder": "", "help_string": " How long to retain indicators (in hours)", "type": "text", "format_type": "text", "value": ""}, {"required": false, "name": "credentials", "label": "Feed Credentials", "default_value": "", "placeholder": "", "help_string": "", "possible_values": [], "type": "global_account", "format_type": "global_account", "value": ""}], "data_inputs_options": [{"type": "customized_var", "name": "feed_url", "title": " Output Node Feed URL", "description": "", "required_on_edit": false, "required_on_create": true, "format_type": "text", "default_value": "", "placeholder": ""}, {"type": "customized_var", "name": "indicator_timeout", "title": "Indicator Timeout", "description": " How long to retain indicators (in hours)", "required_on_edit": false, "required_on_create": true, "format_type": "text", "default_value": "", "placeholder": ""}, {"type": "customized_var", "name": "credentials", "title": "Feed Credentials", "description": "", "required_on_edit": false, "required_on_create": false, "possible_values": [], "format_type": "global_account", "default_value": "", "placeholder": ""}], "code": "\n# encoding = utf-8\n\nimport base64\nimport functools\nimport json\nimport os\nimport requests.exceptions\nimport sys\nimport time\n\nVERIFY_CERTIFICATE = True\n\n'''\n IMPORTANT\n Edit only the validate_input and collect_events functions.\n Do not edit any other part in this file.\n This file is generated only once when creating the modular input.\n'''\n'''\n# For advanced users, if you want to create single instance mod input, uncomment this method.\ndef use_single_instance_mode():\n return True\n'''\n\ndef timer(desc):\n def outer(func):\n @functools.wraps(func)\n def inner(*args):\n \"\"\"Decorator to time function execution.\n\n If an exception is raised during the function, then a time of \"-1\"\n will be saved for the given description.\n\n Note: Any function decorated with this should have the \"stats\" dict\n as the final argument in its arg list.\n\n \"\"\"\n # Setup.\n stats = args[-1]\n stats[desc] = -1\n start = time.time()\n\n # Execute the function.\n ret_val = func(*args)\n\n # No exception, so save the runtime and return ret_val.\n stats[desc] = time.time() - start\n return ret_val\n return inner\n return outer\n\n\ndef validate_input(helper, definition):\n \"\"\"Implement your own validation logic to validate the input stanza configurations\"\"\"\n # This example accesses the modular input variable\n # feed_url = definition.parameters.get('feed_url', None)\n # credentials = definition.parameters.get('credentials', None)\n pass\n\n\ndef collect_events(helper, ew):\n \"\"\"Collect the kvstore events from the feed.\"\"\"\n # Get the short name for this feed.\n name = helper.get_input_stanza_names()\n start = time.time()\n try:\n indicator_timeout = int(helper.get_arg('indicator_timeout')) * 3600\n except ValueError:\n # If this isn't set, timeout indicators immediately.\n indicator_timeout = 0\n stats = {'input_name': name}\n\n helper.log_info('START Splunk_TA_paloalto indicator retrieval for \"{0}\"'.format(\n name))\n\n # Get the current indicators.\n kvs_entries = pull_from_kvstore(helper, name, start, stats)\n stats['previous_indicators'] = len(kvs_entries)\n\n # Retrieve current entries from the MineMeld feed.\n mmf_entries = []\n try:\n mmf_entries = get_feed_entries(helper, name, start, stats)\n except requests.exceptions.HTTPError as e:\n helper.log_error('Failed to get entries for \"{0}\": {1}'.format(\n name, e))\n stats['error'] = str(e)\n stats['feed_indicators'] = len(mmf_entries)\n\n # Merge the two together, and determine which indicators should be expired.\n rm_entries, retained_indicators = merge_entries(\n mmf_entries, kvs_entries, start, indicator_timeout, stats)\n stats['expired_indicators'] = len(rm_entries)\n stats['indicators'] = len(mmf_entries) + retained_indicators\n\n # Save new/updated indicators to the kvstore.\n save_to_kvstore(helper, name, mmf_entries, stats)\n\n # Delete the expired indicators.\n remove_from_kvstore(helper, name, rm_entries, stats)\n\n # Write an event to the index giving some basic stats.\n stats['total_time'] = time.time() - start\n save_stats_as_event(helper, ew, stats)\n\n # Done\n helper.log_info('END Splunk_TA_paloalto indicator retrieval for \"{0}\"'.format(\n name))\n\n\n@timer('read_kvstore')\ndef pull_from_kvstore(helper, name, start, stats):\n \"\"\"Retrieves all current indicators.\"\"\"\n resp = helper.send_http_request(\n url=_uri(helper),\n headers=_headers(helper),\n method='GET',\n verify=False,\n parameters={'query': json.dumps({'splunk_source': name})})\n resp.raise_for_status()\n\n ans = {}\n for v in resp.json():\n ans[v['indicator']] = {\n '_key': v['_key'],\n 'is_present': False,\n 'splunk_last_seen': v.get('splunk_last_seen', 0.0)}\n\n return ans\n\n\n@timer('retrieve_indicators')\ndef get_feed_entries(helper, name, start, stats):\n \"\"\"Pulls the indicators from the minemeld feed.\"\"\"\n feed_url = helper.get_arg('feed_url')\n feed_creds = helper.get_arg('credentials')\n feed_headers = {}\n # If auth is specified, add it as a header.\n if feed_creds is not None:\n auth = '{0}:{1}'.format(feed_creds['username'], feed_creds['password']).encode('ascii')\n auth = base64.b64encode(auth)\n auth = auth.decode('utf-8')\n feed_headers['Authorization'] = 'Basic {0}'.format(auth)\n\n # Pull events as json.\n resp = helper.send_http_request(\n url=feed_url,\n method='GET',\n parameters={'v': 'json', 'tr': 1},\n headers=feed_headers,\n verify=VERIFY_CERTIFICATE,\n )\n\n # Raise exceptions on problems.\n resp.raise_for_status()\n feed_entries = resp.json()\n\n # Return the normalized events to be saved to the kv store.\n return normalized(name, feed_entries, start)\n\n\n@timer('merge_indicators')\ndef merge_entries(mmf_entries, kvs_entries, start, indicator_timeout, stats):\n \"\"\"\n Merges the current indicators with previous, determining which should\n be expired.\n \"\"\"\n rm_entries = []\n retained_indicators = 0\n\n for mmfe in mmf_entries:\n kvse = kvs_entries.get(mmfe['indicator'])\n if kvse is not None:\n kvse['is_present'] = True\n mmfe['_key'] = kvse['_key']\n\n for info in iter(kvs_entries.values()):\n if info['is_present']:\n pass\n elif info['splunk_last_seen'] + indicator_timeout < start:\n rm_entries.append(info['_key'])\n else:\n retained_indicators += 1\n\n return rm_entries, retained_indicators\n\n\n@timer('save_to_kvstore')\ndef save_to_kvstore(helper, name, entries, stats):\n \"\"\"Saves all normalized entries as `name` events.\"\"\"\n helper.log_info('Saving {0} entries for MineMeld feed \"{1}\"'.format(\n len(entries), name))\n url = '{0}/batch_save'.format(_uri(helper))\n\n # We need to batch in groups of 500, the default.\n for i in range(0, len(entries), 500):\n resp = helper.send_http_request(\n url=url,\n headers=_headers(helper),\n method='POST',\n verify=False,\n payload=entries[i:i+500])\n resp.raise_for_status()\n\n\n@timer('remove_from_kvstore')\ndef remove_from_kvstore(helper, name, rm_entries, stats):\n \"\"\"Removes the specified entries from the kvstore.\"\"\"\n if not rm_entries:\n return\n\n helper.log_info('Removing {0} kvstore entries for MineMeld feed \"{1}\"'.format(\n len(rm_entries), name))\n url = _uri(helper)\n headers = _headers(helper)\n\n # Batch a few at a time, as splunk 414s if the URI is too long, or times\n # out if it's within the length limits but still hits too many entries to\n # finish on time. From some tests, it seems like 500 is a good number,\n # which is nice since it matches the batch_save number.\n #\n # The _key field has been 24 characters in length on my system.\n for i in range(0, len(rm_entries), 500):\n rms = rm_entries[i:i+500]\n query = {'$or': list({'_key': x} for x in rms)}\n resp = helper.send_http_request(\n url=url,\n headers=headers,\n method='DELETE',\n verify=False,\n parameters={'query': json.dumps(query)})\n resp.raise_for_status()\n\n\ndef save_stats_as_event(helper, ew, stats):\n \"\"\"Saves the stats of getting feed events to the index.\"\"\"\n event = helper.new_event(\n source=helper.get_input_type(),\n index=helper.get_output_index(),\n sourcetype=helper.get_sourcetype(),\n data=json.dumps(stats),\n )\n ew.write_event(event)\n\n\ndef _uri(helper):\n \"\"\"Returns the URL of the kvstore.\"\"\"\n return '/'.join((\n helper.context_meta['server_uri'],\n 'servicesNS',\n 'nobody',\n 'Splunk_TA_paloalto',\n 'storage',\n 'collections',\n 'data',\n 'minemeldfeeds'))\n\n\ndef _headers(helper):\n \"\"\"Returns the auth header for Splunk.\"\"\"\n return {\n 'Authorization': 'Splunk {0}'.format(\n helper.context_meta['session_key'])}\n\n\ndef normalized(name, feed_entries, start):\n \"\"\"Returns a list of normalized kvstore entries.\"\"\"\n data = []\n for feed_entry in feed_entries:\n if 'indicator' not in feed_entry or 'value' not in feed_entry:\n continue\n\n # Make the entry dict.\n entry = feed_entry.copy()\n entry['splunk_source'] = name\n entry['splunk_last_seen'] = start\n\n data.append(entry)\n\n return data\n", "customized_options": [{"name": "feed_url", "value": ""}, {"name": "indicator_timeout", "value": ""}, {"name": "credentials", "value": ""}], "uuid": "81937edbc4ef44a89fb8041f1c1f1624"}, {"index": "default", "sourcetype": "aperture", "interval": "30", "use_external_validation": true, "streaming_mode_xml": true, "name": "aperture", "title": "Aperture", "description": "", "type": "customized", "parameters": [{"required": true, "name": "region", "label": "Region", "default_value": "us", "placeholder": "", "help_string": "", "possible_values": [{"value": "us", "label": "US"}, {"value": "eu", "label": "Europe"}, {"value": "apac", "label": "Asia Pacific"}], "type": "dropdownlist", "format_type": "dropdownlist", "value": "us"}, {"required": true, "name": "global_account", "label": "Global Account", "default_value": "", "placeholder": "", "help_string": "", "possible_values": [], "type": "global_account", "format_type": "global_account", "value": ""}], "data_inputs_options": [{"type": "customized_var", "name": "region", "title": "Region", "description": "", "required_on_edit": false, "required_on_create": true, "possible_values": [{"value": "us", "label": "US"}, {"value": "eu", "label": "Europe"}, {"value": "apac", "label": "Asia Pacific"}], "format_type": "dropdownlist", "default_value": "us", "placeholder": ""}, {"type": "customized_var", "name": "global_account", "title": "Global Account", "description": "", "required_on_edit": false, "required_on_create": true, "possible_values": [], "format_type": "global_account", "default_value": "", "placeholder": ""}], "code": "\n# encoding = utf-8\n\nimport os\nimport sys\nimport time\nimport datetime\nimport json\nimport base64\n\n'''\n IMPORTANT\n Edit only the validate_input and collect_events functions.\n Do not edit any other part in this file.\n This file is generated only once when creating the modular input.\n'''\n'''\n# For advanced users, if you want to create single\ninstance mod input, uncomment this method.\ndef use_single_instance_mode():\n return True\n'''\n\nREGION_DOMAIN = {\n 'us': 'api.aperture.paloaltonetworks.com',\n 'eu': 'api.aperture-eu.paloaltonetworks.com',\n 'apac': 'api.aperture-apac.paloaltonetworks.com',\n}\n\n\ndef validate_input(helper, definition):\n \"\"\"Implement your own validation logic to\n validate the input stanza configurations\"\"\"\n # This example accesses the modular input variable\n opt_global_account = definition.parameters.get('global_account', None)\n pass\n\n\ndef get_auth_token(helper, opt_global_account, proxy_enabled):\n helper.log_debug(\"Start get_auth_token.\")\n client_id = opt_global_account['username']\n secret = opt_global_account['password']\n region = helper.get_arg('region')\n url_domain = REGION_DOMAIN[region]\n url = \"https://{0}/oauth/token\".format(url_domain)\n method = \"POST\"\n parameters = {'scope': 'api_access',\n 'grant_type': 'client_credentials'}\n auth = base64.b64encode('{0}:{1}'.format(client_id, secret).encode('ascii'))\n auth = auth.decode('utf-8')\n header = {'Authorization': 'Basic ' + auth,\n 'Content-Type': 'application/x-www- \\\n form-urlencoded; charset=ISO-8859-1',\n 'Accept': 'application/json'}\n response = helper.send_http_request(url, method, parameters=parameters,\n payload=None, headers=header,\n cookies=None, verify=True, cert=None,\n timeout=30, use_proxy=proxy_enabled)\n r_status = response.status_code\n if r_status == 200:\n helper.log_debug('Token recieved')\n token = response.json()['access_token']\n return token\n elif r_status == 401:\n helper.log_error('ERROR: Invalid credentials.')\n raise ValueError(r_status)\n else:\n helper.log_error('ERROR: Unable to retrieve token.')\n helper.log_debug(r_status)\n raise ValueError(r_status)\n\n\ndef collect_events(helper, ew):\n log_level = helper.get_log_level()\n helper.set_log_level(log_level)\n opt_global_account = helper.get_arg('global_account')\n region = helper.get_arg('region')\n url_domain = REGION_DOMAIN[region]\n proxy_settings = helper.get_proxy()\n proxy_enabled = bool(proxy_settings)\n helper.log_debug(\"Checking if Proxy is enabled\")\n helper.log_debug(proxy_enabled)\n helper.log_debug(\"Current input type is set to:\")\n helper.log_debug(helper.get_input_stanza_names())\n token = get_auth_token(helper, opt_global_account, proxy_enabled)\n headers = {'Authorization': 'Bearer ' + token}\n method = 'GET'\n url = \"https://{0}/api/v1/log_events_bulk\".format(url_domain)\n r_status = 200\n while r_status != 204:\n response = helper.send_http_request(\n url, method, parameters=None, payload=None,\n headers=headers, cookies=None, verify=True, cert=None,\n timeout=30, use_proxy=proxy_enabled)\n r_status = response.status_code\n helper.log_debug('API status code is:')\n helper.log_debug(r_status)\n if r_status == 200:\n helper.log_debug(\"Adding data to index.\")\n events = response.json()['events']\n for data in events:\n helper.log_debug(data)\n timestamp = datetime.datetime.strptime(data['timestamp'], '%Y-%m-%dT%H:%M:%SZ')\n final_time = (timestamp - datetime.datetime.fromtimestamp(0)).total_seconds()\n helper.log_debug(final_time)\n try:\n event = helper.new_event(\n host=url_domain,\n source=helper.get_input_stanza_names(),\n index=helper.get_output_index(),\n sourcetype=helper.get_sourcetype(),\n time=final_time,\n data=json.dumps(data))\n ew.write_event(event)\n except Exception as e:\n ew.log_error('Error on parse event. ' + str(e))\n elif r_status == 204:\n helper.log_debug(\"STATUS 204: No new events were found.\")\n break\n elif r_status >= 400:\n helper.log_debug(\"ERROR Status is:\")\n helper.log_debug(r_status)\n raise ValueError(r_status)\n else:\n helper.log_error('There was a problem when trying to collect events using the aperture API call.')\n", "customized_options": [{"name": "region", "value": "us"}, {"name": "global_account", "value": ""}], "uuid": "0e312910c3d249f78b8e2386a4ddeaef"}, {"index": "default", "sourcetype": "AutoFocus", "interval": "60", "use_external_validation": true, "streaming_mode_xml": true, "name": "autofocus_export", "title": "AutoFocus Export", "description": "", "type": "customized", "parameters": [{"name": "label", "label": "Label", "help_string": "", "required": false, "format_type": "text", "default_value": "", "placeholder": "", "type": "text", "value": ""}], "data_inputs_options": [{"type": "customized_var", "name": "label", "title": "Label", "description": "", "required_on_edit": false, "required_on_create": false, "format_type": "text", "default_value": "", "placeholder": ""}], "code": "\n# encoding = utf-8\n\nimport os\nimport sys\nimport time\nimport datetime\n\nlibpath = os.path.dirname(os.path.abspath(__file__))\nsys.path[:0] = [os.path.join(libpath, 'lib')]\nimport common\nimport pan.afapi\nimport json\n\nfrom kvstore import KvStoreHandler\n\n'''\n IMPORTANT\n Edit only the validate_input and collect_events functions.\n Do not edit any other part in this file.\n This file is generated only once when creating the modular input.\n'''\n# For advanced users, if you want to create single instance mod input, uncomment this method.\ndef use_single_instance_mode():\n return True\n\n\ndef validate_input(helper, definition):\n \"\"\"Implement your own validation logic to validate the input stanza configurations\"\"\"\n # This example accesses the modular input variable\n opt_label = definition.parameters.get('label', None)\n pass\n\n\ndef collect_events(helper, ew):\n # Implement your data collection logic here\n\n # The following examples get the arguments of this input.\n # Note, for single instance mod input, args will be returned as a dict.\n # For multi instance mod input, args will be returned as a single value.\n opt_label = helper.get_arg('label')\n\n # In single instance mode, to get arguments of a particular input, use\n # opt_label = helper.get_arg('label', stanza_name)\n\n # get input type\n # helper.get_input_type()\n\n # The following examples get input stanzas.\n # get all detailed input stanzas\n # helper.get_input_stanza()\n # get specific input stanza with stanza name\n # helper.get_input_stanza(stanza_name)\n # get all stanza names\n # helper.get_input_stanza_names()\n\n # The following examples get options from setup page configuration.\n # get the loglevel from the setup page\n loglevel = helper.get_log_level()\n # get proxy setting configuration\n # proxy_settings = helper.get_proxy()\n # get global variable configuration\n global_autofocus_api_key = helper.get_global_setting(\"autofocus_api_key\")\n sessionKey = helper.context_meta['session_key']\n\n # The following examples show usage of logging related helper functions.\n # write to the log for this modular input using configured global log level or INFO as default\n # helper.log(\"log message\")\n # write to the log using specified log level\n # helper.log_debug(\"log message\")\n # helper.log_info(\"log message\")\n # helper.log_warning(\"log message\")\n # helper.log_error(\"log message\")\n # helper.log_critical(\"log message\")\n # set the log level for this modular input\n # (log_level can be \"debug\", \"info\", \"warning\", \"error\" or \"critical\", case insensitive)\n helper.set_log_level(loglevel)\n\n\n # sessionKey = inputs.metadata.get('session_key')\n for label in opt_label:\n helper.log_debug(\"Current Label: \" + label)\n # Check if Label already exsist and get last submit date\n helper.log_debug(\"Getting AutoFocus Export for results\")\n # Use API to get entries in Export List from AutoFocus\n values = {\n \"apiKey\": global_autofocus_api_key,\n # \"panosFormatted\": \"true\",\n \"exportMetadata\": \"true\",\n \"label\": label\n }\n try:\n afapi = pan.afapi.PanAFapi(api_key=global_autofocus_api_key)\n jsAfapi = afapi.export(json.dumps(values)).json\n af_export = jsAfapi['export_list']\n # helper.log_debug(jsAfapi)\n except pan.afapi.PanAFapiError as e:\n helper.log_debug(e)\n sys.exit(1)\n\n sync_kvstore = sync_to_kvstore(helper, sessionKey, label, af_export)\n helper.log_debug(sync_kvstore)\n # Label does not exsist in KVstore go ahead and batch import.\n if sync_kvstore == 1:\n helper.log_debug(\"New to KVSTORE\")\n send_to_kvstore(helper, sessionKey, jsAfapi['export_list'])\n # Label does exsist in KVstore. Change Detected.\n elif sync_kvstore == -1:\n helper.log_debug(\"Update KVSTORE\")\n # Delete entries for given label\n options = {\n \"app\": \"Splunk_TA_paloalto\",\n \"owner\": \"nobody\",\n \"collection\": \"autofocus_export\"\n }\n query = {\"label\": label}\n delete = True\n helper.log_debug(\"Delete entries for this label.\")\n remove = KvStoreHandler.query(query, sessionKey, options, delete)\n helper.log_debug(\"Add entries with this label to kvstore\")\n send_to_kvstore(helper, sessionKey, jsAfapi['export_list'])\n # NO CHANGE TO EXPORT LIST\n else:\n helper.log_debug(\"No Change\")\n\n \"\"\"\n # The following examples send rest requests to some endpoint.\n response = helper.send_http_request(url, method, parameters=None, payload=None,\n headers=None, cookies=None, verify=True, cert=None,\n timeout=None, use_proxy=True)\n # get the response headers\n r_headers = response.headers\n # get the response body as text\n r_text = response.text\n # get response body as json. If the body text is not a json string, raise a ValueError\n r_json = response.json()\n # get response cookies\n r_cookies = response.cookies\n # get redirect history\n historical_responses = response.history\n # get response status code\n r_status = response.status_code\n # check the response status, if the status is not sucessful, raise requests.HTTPError\n response.raise_for_status()\n# The following examples show usage of check pointing related helper functions.\n # save checkpoint\n helper.save_check_point(key, state)\n # delete checkpoint\n helper.delete_check_point(key)\n # get checkpoint\n state = helper.get_check_point(key)\n\n # To create a splunk event\n helper.new_event(data, time=None, host=None, index=None, source=None, sourcetype=None, done=True, unbroken=True)\n \"\"\"\n\n '''\n # The following example writes a random number as an event. (Multi Instance Mode)\n # Use this code template by default.\n import random\n data = str(random.randint(0,100))\n event = helper.new_event(source=helper.get_input_type(), index=helper.get_output_index(), sourcetype=helper.get_sourcetype(), data=data)\n ew.write_event(event)\n '''\n\n '''\n # The following example writes a random number as an event for each input config. (Single Instance Mode)\n # For advanced users, if you want to create single instance mod input, please use this code template.\n # Also, you need to uncomment use_single_instance_mode() above.\n import random\n input_type = helper.get_input_type()\n for stanza_name in helper.get_input_stanza_names():\n data = str(random.randint(0,100))\n event = helper.new_event(source=input_type, index=helper.get_output_index(stanza_name), sourcetype=helper.get_sourcetype(stanza_name), data=data)\n ew.write_event(event)\n '''\n\n\ndef sync_to_kvstore(helper, sessionKey, label, af_export):\n helper.log_debug(\"checking KVSTORE\")\n url_options = {\n \"app\": \"Splunk_TA_paloalto\",\n \"owner\": \"nobody\",\n \"collection\": \"autofocus_export\"\n }\n query = {\"label\": label}\n arg = {\n \"query\": query\n }\n response = KvStoreHandler.adv_query(arg, url_options, sessionKey)\n # helper.log_debug(response)\n results = 0\n kv_export = json.loads(response[1])\n # helper.log_debug(\"kv_export:\")\n # helper.log_debug(kv_export)\n # helper.log_debug(\"af_export:\")\n # helper.log_debug(af_export)\n\n # Check to see if we have entries in the KVstore already.\n if kv_export:\n helper.log_debug(\"Label Exist\")\n # Check if list are same size\n if len(kv_export) == len(af_export):\n for entry in kv_export:\n # Remove fields from kv_export so dicts will match.\n if '_key' in entry:\n del(entry['_key'])\n if '_user' in entry:\n del(entry['_user'])\n if entry not in af_export:\n helper.log_debug(\"not a match\")\n helper.log_debug(entry)\n results = -1\n return results\n else:\n helper.log_debug(\"Match\")\n else:\n helper.log_debug(\"List count not same.\")\n results = -1\n return results\n else:\n helper.log_debug(\"Label return empty\")\n results = 1\n return results\n\n\ndef send_to_kvstore(helper, sessionKey, export_list):\n helper.log_debug(\"Inside Send to KVSTORE\")\n url_options = {\n \"app\": \"Splunk_TA_paloalto\",\n \"owner\": \"nobody\",\n \"collection\": \"autofocus_export\"\n }\n helper.log_debug(export_list)\n response = KvStoreHandler.batch_create(export_list, sessionKey, url_options)\n helper.log_debug(response)", "customized_options": [{"name": "label", "value": ""}], "uuid": "d79f8dd69d41446cb0817e00cc4c34d7", "sample_count": 0}, {"index": "default", "sourcetype": "pan:iot", "interval": "30", "use_external_validation": true, "streaming_mode_xml": true, "name": "iot_security", "title": "IoT Security", "description": "", "type": "customized", "parameters": [{"name": "customer_id", "label": "Customer ID", "help_string": "", "required": true, "format_type": "text", "default_value": "", "placeholder": "", "type": "text", "value": "banff-enterprise-demo"}, {"name": "access_key_id", "label": "Access Key ID", "help_string": "", "required": true, "format_type": "password", "default_value": "", "placeholder": "", "type": "password", "value": "1921124944:55d41f13516184710c76efc9cf8f40fb6d1d2a81293aa9c80a563849d53916fb"}, {"name": "secret_access_key", "label": "Secret Access Key", "help_string": "", "required": true, "format_type": "password", "default_value": "", "placeholder": "", "type": "password", "value": "eyJ0eXAiOiJKV1QiLCJhbGciOiJIUzI1NiJ9.eyJzdWIiOiI1ZjE3NmZmZmQ5ZTVmZjFhNzAwOGVmYmQiLCJlbWFpbCI6InBhbmd1eWVuQHBhbG9hbHRvbmV0d29ya3MuY29tIiwianRpIjoiNTlaM01WTUdJZyIsInNjb3BlIjp7ImJhbmZmLWVudGVycHJpc2UtZGVtbyI6eyJhZG1pbiI6dHJ1ZSwicm9sZVNvdXJjZSI6IkFEIEdyb3VwIn19LCJpc2xvY2tlZCI6ZmFsc2UsInRlbmFudGlkIjoiYmFuZmYtZW50ZXJwcmlzZS1kZW1vIiwidXJsUGF0dGVybiI6Ii9wdWIvdjQuMC8iLCJ0eXBlIjoiZGV2aWNlX3JldHJpZXZhbF9rZXkiLCJpYXQiOjE2MDU3NjQ5NDQsImV4cCI6MTkyMTEyNDk0NCwiaXNzIjoiemluZ2JveCJ9._PE_XztIsin4w1nKAlcS3ZJdMYBAQUSH5cF71-ZC0EI"}], "data_inputs_options": [{"type": "customized_var", "name": "customer_id", "title": "Customer ID", "description": "", "required_on_edit": false, "required_on_create": true, "format_type": "text", "default_value": "", "placeholder": ""}, {"type": "customized_var", "name": "access_key_id", "title": "Access Key ID", "description": "", "required_on_edit": false, "required_on_create": true, "format_type": "password", "default_value": "", "placeholder": ""}, {"type": "customized_var", "name": "secret_access_key", "title": "Secret Access Key", "description": "", "required_on_edit": false, "required_on_create": true, "format_type": "password", "default_value": "", "placeholder": ""}], "code": "\n# encoding = utf-8\n\nimport os\nimport sys\nimport time\nimport datetime\nimport json\n\n'''\n IMPORTANT\n Edit only the validate_input and collect_events functions.\n Do not edit any other part in this file.\n This file is generated only once when creating the modular input.\n'''\n'''\n# For advanced users, if you want to create single instance mod input, uncomment this method.\ndef use_single_instance_mode():\n return True\n'''\n\ndef validate_input(helper, definition):\n \"\"\"Implement your own validation logic to validate the input stanza configurations\"\"\"\n # This example accesses the modular input variable\n customer_id = definition.parameters.get('customer_id', None)\n access_key_id = definition.parameters.get('access_key_id', None)\n secret_access_key = definition.parameters.get('secret_access_key', None)\n pass\n\ndef query_api(helper, url, parameters, api_type, proxy_enabled):\n global_page_length = 1000\n total = 1000\n results = []\n start_time = time.time()\n page_offset = 0\n \n if api_type == 'devices':\n items = 'devices'\n page_offset = helper.get_check_point(\"offset\")\n if not page_offset:\n page_offset = 1000\n page = 0\n max_pages = 20\n \n while page < max_pages:\n method = 'GET'\n response = helper.send_http_request(url, method, parameters,\n payload=None, headers=None,\n cookies=None, verify=True, cert=None,\n timeout=30, use_proxy=proxy_enabled)\n r_status = response.status_code\n if r_status == 200:\n entries = response.json()[items]\n results = results + entries\n total = len(entries)\n page_offset = page_offset + global_page_length\n parameters.update({'offset': page_offset})\n page += 1\n helper.log_debug(\"Current Offset: {0}, Total Entries: {1}, Next Page: {2}\".format(page_offset, total, page) )\n if total < global_page_length:\n helper.delete_check_point(\"offset\")\n helper.delete_check_point(\"last_run_end\")\n helper.log_debug(\"End of device list. Cleared checkpoint data.\")\n break\n else:\n helper.log_debug(r_status)\n break\n else: \n now = datetime.datetime.now()\n helper.save_check_point(\"offset\", page_offset)\n helper.save_check_point(\"last_run_timestamp\", datetime.datetime.strftime(now, \"%Y-%m-%d %H:%M:%S\"))\n helper.log_debug(\"We have reached max_page. Saved offset: {0} last_run_end: {1}\".format(page_offset, now))\n\n \n else:\n items = 'items'\n while total == global_page_length:\n method = 'GET'\n response = helper.send_http_request(url, method, parameters,\n payload=None, headers=None,\n cookies=None, verify=True, cert=None,\n timeout=30, use_proxy=proxy_enabled)\n r_status = response.status_code\n if r_status == 200:\n entries = response.json()[items]\n results = results + entries\n total = len(entries)\n page_offset = page_offset + global_page_length\n helper.log_debug(\"Current Offset: {0}, Total Entries: {1}\".format(page_offset, total) )\n parameters.update({'offset': page_offset})\n else:\n helper.log_debug(r_status)\n break\n run_time = time.time() - start_time\n helper.log_debug(\"End of {0} results. Function took {1} to run\".format(api_type, run_time))\n return (results)\n\ndef collect_events(helper, ew):\n # Set debug level\n log_level = helper.get_log_level()\n helper.set_log_level(log_level)\n # Get Proxy Settings\n proxy_settings = helper.get_proxy()\n proxy_enabled = bool(proxy_settings)\n # helper.log_debug(\"Checking if Proxy is enabled\")\n # helper.log_debug(proxy_enabled) \n\n opt_customer_id = helper.get_arg('customer_id')\n opt_access_key_id = helper.get_arg('access_key_id')\n opt_secret_access_key = helper.get_arg('secret_access_key')\n\n global_url = \"https://{0}.iot.paloaltonetworks.com/pub/v4.0\".format(\n opt_customer_id)\n global_url_params = {\n 'customerid': opt_customer_id,\n 'key_id': opt_access_key_id,\n 'access_key': opt_secret_access_key,\n 'pagelength': 1000,\n 'offset': 0,\n }\n\n last_device_pull = helper.get_check_point(\"last_run_timestamp\")\n\n if not last_device_pull or datetime.datetime.strptime(last_device_pull, \"%Y-%m-%d %H:%M:%S\") < datetime.datetime.now() - datetime.timedelta(minutes=5):\n # Lets get Device Inventory\n try:\n device_url = '{0}/device/list'.format(global_url)\n params = {\n 'filter_monitored': 'yes',\n 'detail': 'true',\n }\n params.update(global_url_params)\n devices = query_api(helper, device_url, params, 'devices', proxy_enabled)\n for data in devices:\n try:\n event = helper.new_event(\n host=global_url,\n source=helper.get_input_stanza_names(),\n index=helper.get_output_index(),\n sourcetype='pan:iot_device',\n data=json.dumps(data))\n ew.write_event(event)\n except Exception as e:\n helper.log_error('Error on parse event. ' + str(e))\n except Exception as e:\n print(str(e))\n else:\n helper.log_debug(\"Skipping device inventory pull. Last pulled: {0}\".format(last_device_pull))\n\n # Lets get Alerts\n try: \n alerts_url = '{0}/alert/list'.format(global_url)\n params = {\n 'type': 'policy_alert',\n }\n params.update(global_url_params)\n alerts = query_api(helper, alerts_url, params, 'alerts', proxy_enabled)\n for data in alerts:\n try:\n event = helper.new_event(\n host=global_url,\n source=helper.get_input_stanza_names(),\n index=helper.get_output_index(),\n sourcetype='pan:iot_alert',\n data=json.dumps(data))\n ew.write_event(event)\n except Exception as e:\n helper.log_error('Error on parse event. ' + str(e))\n except Exception as e:\n helper.log_error(str(e))\n\n # # Vulnerabilities\n try:\n vuln_url = '{0}/vulnerability/list'.format(global_url)\n params = {\n 'groupby': 'device',\n }\n params.update(global_url_params)\n vulnerabilities = query_api(helper, vuln_url, params, 'vulnerabilities', proxy_enabled)\n for data in vulnerabilities:\n try:\n event = helper.new_event(\n host=global_url,\n source=helper.get_input_stanza_names(),\n index=helper.get_output_index(),\n sourcetype='pan:iot_vulnerability',\n data=json.dumps(data))\n ew.write_event(event)\n except Exception as e:\n helper.log_error('Error on parse event. ' + str(e))\n except Exception as e:\n helper.log_error(str(e))\n", "customized_options": [{"name": "customer_id", "value": "banff-enterprise-demo"}], "uuid": "633c7b1243dc44178b70dd5e260fbd80", "sample_count": "10008"}]}, "field_extraction_builder": {"pan:aperture": {"data_format": "json"}, "pan:config": {"data_format": "tabular", "table_results": {"delim": ","}}, "pan:decryption": {"data_format": "tabular", "table_results": {"delim": ","}}, "pan:globalprotect": {"data_format": "tabular", "table_results": {"delim": ","}}, "pan:log": {"data_format": "tabular", "table_results": {"delim": ","}}, "pan:minemeld": {"data_format": "json"}, "pan:system": {"data_format": "tabular", "table_results": {"delim": ","}}, "pan:threat": {"data_format": "tabular", "table_results": {"delim": ","}}, "pan:traffic": {"data_format": "tabular", "table_results": {"delim": ","}}, "pan:userid": {"data_format": "tabular", "table_results": {"delim": ","}}, "pan:iot": {"data_format": "json"}, "pan:iot_alert": {"data_format": "json"}, "pan:iot_device": {"data_format": "json"}, "pan:iot_vulnerability": {"data_format": "json"}, "pan:firewall_cloud": {"data_format": "tabular", "table_results": {"delim": " "}}, "pan:xdr_incident": {"data_format": "json"}}, "global_settings_builder": {"global_settings": {"proxy_settings": {"proxy_type": "http"}, "log_settings": {}, "credential_settings": [], "customized_settings": [{"required": false, "name": "autofocus_api_key", "label": " AutoFocus API Key", "placeholder": "", "default_value": "", "help_string": " Used to retrieve metadata about AutoFocus tags. Requires a MineMeld Feed input to download threat indicators. More info: https://splunk.paloaltonetworks.com/autofocus-and-minemeld.html", "type": "password", "format_type": "password", "value": ""}, {"required": false, "name": "wildfire_api_key", "label": " WildFire API Key", "placeholder": "", "default_value": "", "help_string": " Used to retrieve reports from the WildFire Cloud. An API Key is available from the WildFire Portal. https://wildfire.paloaltonetworks.com", "type": "password", "format_type": "password", "value": ""}]}}, "sourcetype_builder": {}, "validation": {}} \ No newline at end of file +{"basic_builder": {"appname": "Splunk_TA_paloalto", "friendly_name": "Palo Alto Networks Add-on for Splunk", "version": "7.0.0", "author": "Palo Alto Networks", "description": "", "theme": "#FFFFFF", "large_icon": "iVBORw0KGgoAAAANSUhEUgAAAEgAAABICAYAAABV7bNHAAAFiklEQVR4nO2bXWgcVRTHtxZKGhMVjY2CjbYo1eI5s5tpGkpe1hdFH3wL3DP74bbFLaRFaEVBkXTrJ7RWH1QQH8S2ULS+CULfBEGlWrDSb8GPtvjVFVJbAvXeweNDsmuy2d05s3t3Z4X5w31L/nPPL+ecOXNnkkjEihUrVqxYsWLFihWrR2U8LPmET9jyYy+ZNJ7zhi2/SKUJp7QC1gqYvdTdljy/1wpYE75nwy8yaXK2VeBoBWwIZ/wMPNqqH+fdEUPwzWJP2G9zz12TVrB9YSCLV2pDWD/OjN9kCMv1/fBgJ2LomGozp3YZwhn2kkmpH0+lBwzhsWaevue83cmYrKl55tQGlXwsyG+urPCSyJPwQDdibFlh4FQbdyF5TyM/PjK5vHFZNVo9Wm5a4VNh4TRr3Jx3RzTht614aoL3o2DQUK1kzhJQnvNQxY9pfDh85vRoJhlyptuFoxWwUXDdZJNpplHUCs/b8NQKP+TNqdujhPO0nUCq64wm+Mimp6/w8UjgaM/ZYTMQo+Bvzo7ey4V0nyY4a8WX8DCXSjd0HY4hfNkuHJxlGsWKP2+ZGAyaewI9yZnuOphEwk5DXgInN/pA7XWYeZkmON4anIgeP4zC3VbhEFytB6ciLrr9RuHnRsE/Uk+f4N1uMqnKOhyFs0zu/UHX5S0Tg0bBdRlwfE4US8Z52GpvMuTssps5WG6WORVxaXKFUfCF0PNNSSy+l8zMZ++nzLysbTia8APrmSM4F+Ki268VnrSaOYSv1pT4l23BsT3nGAXXRJlTSPdpBWdswvEVPFnXg+BQi3DwdZtwtIILnMM1gXCKbr8m+E4I/EVhLM8EQD7KOzetDAEH9lsvKykcy5lTW1ZNYH8mhGO3Ic9ljqCstkwMiuF4zkuSWHwPt4baa1C5acIDVjOHsMw5vDEQTtHt14TnosicOutE3RHA/pwD10RzzlR6wH5ZtXdzMYSf1NK23ZDPiHrO1k23aoIfZGWFz8vgNG/IISAdZRofThjCj+1mDs4yjQ8Hwpm7W4me2rtQVvUX4bmE72HeEP5hyfCUqCHncI34SMNzdojgKHjB8h/6iu9hvrLhVUbBb+2Z4k98ZHJ5IJyi228IfpVljrNLBMdy5sy9lqqZ9jmTWh/mqXmxIfzJHqwNhJPfeJuOaM4Jtxq82OQsrtMKLoakfUxy7ss5XGMUXhb5krNNBEfhK5Yzpxz4QpPJHTIEf4kNBccG8w1ZdreKKHOMguucd0ck105wFkcDexLhKc7iukCvTGp9iDPm7SI49jNnxmSSEyI41cByuKpxueFJUUMmd8goNFbLyn5DLrf8WQ5vHltdp9wuiOacvDsin3PgWSGcvXbhwNW2v1niDN5V+XjAKPiai25/4O/k8EGjcFa0Sc95RLIPn/Ad+5kTfOcViT1YqwkPcyHdF/izU+kBQ/C7ZJO+h0py/Y7MOdKGbFNapTZohT+KN6pwd7AnHrSfOcn7usFjkebmnPCDpiF4rZGn9YM7wnIkmTPfkEVzToON710Kx3ZDjgiO9pyN0obcdC04zdMEh6zD2Ty2uutwOJty7AYC+ztwHn45ksxJJCoPuHjFHiBn2tb3RvOZ8wtn3TsjgVOFRO6QDvmAWzcYD0sVT+Nh6X9bVvU0D+lEG6W1p9bTEOxpGRDB2cjKqpF456aVrWRS9dSujhq+DQ3KnKJ7czdjF4vJHar+H4UkGIX7gjx9BW+FgHOp5zKnVlyaXFH7/xR1lwdFqaf2oCgoq9M8lR7oZGzWxIXkLc3KTXrWvFDN3vgawjIXxu7oRCwd0/xZ0FeShixV3cZNcLzny6qZNOHP/zVkJ9eun+85uQWALtrYY6Saa9xw2uaHlUbhPq3wfM/MObFixYoVK1asWLFixYoVTv8CfUr/kdHM+XgAAAAASUVORK5CYII=", "small_icon": "iVBORw0KGgoAAAANSUhEUgAAACQAAAAkCAYAAADhAJiYAAADEUlEQVRYhe2WS2sTURTHLxVBdCWIWnwlLZSU3HOzEERaH1CtrUo3tck9d9JWV34CUagLuxBEpAguxI2iCxcuXNiVglJ8fAWL2mofCLrUapvMPYPXRTJt5pFkGJKFmD+c3X/m/O553BnGWmrpfxchn9BKnK/rU6KPFNxsNswljWC0BKoFRYr3a8lXNIIh5LeaC+OGBNJSjAd94gQh/+nxKphqKIydg8ueBBVhK55bh1GZIxpBh/lIwu3GwCCfqAazHpYYp1xmoBpMRVXvmIsHN8eGIYQrdWEQDCG81Io/iuD7vqK6dsSDkeJqNBjxzoyJbSbLNmmExzW8y8bqPhCzMvxaRJi3ZujgVvc5M8naCOFZiPdzUaa6KnMYC7ZHgrEVvx6xTTOVMIwxpvPiMCH88MMYyTs9MFmR1AizjoIHDamMRnhVBea3z7dQyKb3V/rWkO/TEj5sePhdM8naYsMQwoy5kNjieTaX7vXD2MgX/TAmm96tkS+GbN89L4yCG5FgFLz2n4Zy6V4tYdXfpgBMXuwNhdmIhy7MVCQYKZ4HKlO6CNcCMxOojEhqhPkIeZ4wB7nUCHadbXrqnxmy4GRIZebWspk9ARgJX+rCSCBHQp4xxpijxDlCXghvE58OVMbKDARnBj7621TIimSdNrmhHSnQM0eOEkP+ShHCC+YTKdHnb5ON/FNsGAnkoBjx5ykn4/0agcozMx0KI+GP76XzZrS73dMmCzqiwBACORacCYWpSDroINwPbJMSgxrB8Z6Ov/ffuCYrkoTwNVJl6sFUk4NihILbZGyEBZMVSddXlLyTEL5FmhnkZ+PByMxwyGpXxpJRkDIKUhphKRJMHk7HgjFjYqeWpZmqE2/KUc9XJCUGY8G4Km/VrxpJFgqYShQwldAICzVmZjV2ZYJQcJwwcAkaG2HZKL7L9Znh7nZbhgy0hCJJfqohMK5KX3PP5TnnX3fGGDOj3e0aYa5yZsiCYw2FWU9mZQ7pEtRsAVOJar5y+2YJeYGQ9zQFxhUh7yn6frjCZCzoIBRHmwrTUkv/gv4CNzePiwDo44MAAAAASUVORK5CYII=", "visible": true, "tab_version": "4.0.0", "tab_build_no": "0", "build_no": 4}, "data_input_builder": {"datainputs": [{"index": "default", "sourcetype": "aperture", "interval": "30", "use_external_validation": true, "streaming_mode_xml": true, "name": "aperture", "title": "Aperture", "description": "", "type": "customized", "parameters": [{"required": true, "name": "region", "label": "Region", "default_value": "us", "placeholder": "", "help_string": "", "possible_values": [{"value": "us", "label": "US"}, {"value": "eu", "label": "Europe"}, {"value": "apac", "label": "Asia Pacific"}], "type": "dropdownlist", "format_type": "dropdownlist", "value": "us"}, {"required": true, "name": "global_account", "label": "Global Account", "default_value": "", "placeholder": "", "help_string": "", "possible_values": [], "type": "global_account", "format_type": "global_account", "value": ""}], "data_inputs_options": [{"type": "customized_var", "name": "region", "title": "Region", "description": "", "required_on_edit": false, "required_on_create": true, "possible_values": [{"value": "us", "label": "US"}, {"value": "eu", "label": "Europe"}, {"value": "apac", "label": "Asia Pacific"}], "format_type": "dropdownlist", "default_value": "us", "placeholder": ""}, {"type": "customized_var", "name": "global_account", "title": "Global Account", "description": "", "required_on_edit": false, "required_on_create": true, "possible_values": [], "format_type": "global_account", "default_value": "", "placeholder": ""}], "code": "\n# encoding = utf-8\n\nimport os\nimport sys\nimport time\nimport datetime\nimport json\nimport base64\n\n'''\n IMPORTANT\n Edit only the validate_input and collect_events functions.\n Do not edit any other part in this file.\n This file is generated only once when creating the modular input.\n'''\n'''\n# For advanced users, if you want to create single\ninstance mod input, uncomment this method.\ndef use_single_instance_mode():\n return True\n'''\n\nREGION_DOMAIN = {\n 'us': 'api.aperture.paloaltonetworks.com',\n 'eu': 'api.aperture-eu.paloaltonetworks.com',\n 'apac': 'api.aperture-apac.paloaltonetworks.com',\n}\n\n\ndef validate_input(helper, definition):\n \"\"\"Implement your own validation logic to\n validate the input stanza configurations\"\"\"\n # This example accesses the modular input variable\n opt_global_account = definition.parameters.get('global_account', None)\n pass\n\n\ndef get_auth_token(helper, opt_global_account, proxy_enabled):\n helper.log_debug(\"Start get_auth_token.\")\n client_id = opt_global_account['username']\n secret = opt_global_account['password']\n region = helper.get_arg('region')\n url_domain = REGION_DOMAIN[region]\n url = \"https://{0}/oauth/token\".format(url_domain)\n method = \"POST\"\n parameters = {'scope': 'api_access',\n 'grant_type': 'client_credentials'}\n auth = base64.b64encode('{0}:{1}'.format(client_id, secret).encode('ascii'))\n auth = auth.decode('utf-8')\n header = {'Authorization': 'Basic ' + auth,\n 'Content-Type': 'application/x-www- \\\n form-urlencoded; charset=ISO-8859-1',\n 'Accept': 'application/json'}\n response = helper.send_http_request(url, method, parameters=parameters,\n payload=None, headers=header,\n cookies=None, verify=True, cert=None,\n timeout=30, use_proxy=proxy_enabled)\n r_status = response.status_code\n if r_status == 200:\n helper.log_debug('Token recieved')\n token = response.json()['access_token']\n return token\n elif r_status == 401:\n helper.log_error('ERROR: Invalid credentials.')\n raise ValueError(r_status)\n else:\n helper.log_error('ERROR: Unable to retrieve token.')\n helper.log_debug(r_status)\n raise ValueError(r_status)\n\n\ndef collect_events(helper, ew):\n log_level = helper.get_log_level()\n helper.set_log_level(log_level)\n opt_global_account = helper.get_arg('global_account')\n region = helper.get_arg('region')\n url_domain = REGION_DOMAIN[region]\n proxy_settings = helper.get_proxy()\n proxy_enabled = bool(proxy_settings)\n helper.log_debug(\"Checking if Proxy is enabled\")\n helper.log_debug(proxy_enabled)\n helper.log_debug(\"Current input type is set to:\")\n helper.log_debug(helper.get_input_stanza_names())\n token = get_auth_token(helper, opt_global_account, proxy_enabled)\n headers = {'Authorization': 'Bearer ' + token}\n method = 'GET'\n url = \"https://{0}/api/v1/log_events_bulk\".format(url_domain)\n r_status = 200\n while r_status != 204:\n response = helper.send_http_request(\n url, method, parameters=None, payload=None,\n headers=headers, cookies=None, verify=True, cert=None,\n timeout=30, use_proxy=proxy_enabled)\n r_status = response.status_code\n helper.log_debug('API status code is:')\n helper.log_debug(r_status)\n if r_status == 200:\n helper.log_debug(\"Adding data to index.\")\n events = response.json()['events']\n for data in events:\n helper.log_debug(data)\n timestamp = datetime.datetime.strptime(data['timestamp'], '%Y-%m-%dT%H:%M:%SZ')\n final_time = (timestamp - datetime.datetime.fromtimestamp(0)).total_seconds()\n helper.log_debug(final_time)\n try:\n event = helper.new_event(\n host=url_domain,\n source=helper.get_input_stanza_names(),\n index=helper.get_output_index(),\n sourcetype=helper.get_sourcetype(),\n time=final_time,\n data=json.dumps(data))\n ew.write_event(event)\n except Exception as e:\n ew.log_error('Error on parse event. ' + str(e))\n elif r_status == 204:\n helper.log_debug(\"STATUS 204: No new events were found.\")\n break\n elif r_status >= 400:\n helper.log_debug(\"ERROR Status is:\")\n helper.log_debug(r_status)\n raise ValueError(r_status)\n else:\n helper.log_error('There was a problem when trying to collect events using the aperture API call.')\n", "customized_options": [{"name": "region", "value": "us"}, {"name": "global_account", "value": ""}], "uuid": "0e312910c3d249f78b8e2386a4ddeaef"}, {"index": "default", "sourcetype": "AutoFocus", "interval": "60", "use_external_validation": true, "streaming_mode_xml": true, "name": "autofocus_export", "title": "AutoFocus Export", "description": "", "type": "customized", "parameters": [{"name": "label", "label": "Label", "help_string": "", "required": false, "format_type": "text", "default_value": "", "placeholder": "", "type": "text", "value": ""}], "data_inputs_options": [{"type": "customized_var", "name": "label", "title": "Label", "description": "", "required_on_edit": false, "required_on_create": false, "format_type": "text", "default_value": "", "placeholder": ""}], "code": "\n# encoding = utf-8\n\nimport os\nimport sys\nimport time\nimport datetime\n\nlibpath = os.path.dirname(os.path.abspath(__file__))\nsys.path[:0] = [os.path.join(libpath, 'lib')]\nimport common\nimport pan.afapi\nimport json\n\nfrom kvstore import KvStoreHandler\n\n'''\n IMPORTANT\n Edit only the validate_input and collect_events functions.\n Do not edit any other part in this file.\n This file is generated only once when creating the modular input.\n'''\n# For advanced users, if you want to create single instance mod input, uncomment this method.\ndef use_single_instance_mode():\n return True\n\n\ndef validate_input(helper, definition):\n \"\"\"Implement your own validation logic to validate the input stanza configurations\"\"\"\n # This example accesses the modular input variable\n opt_label = definition.parameters.get('label', None)\n pass\n\n\ndef collect_events(helper, ew):\n # Implement your data collection logic here\n\n # The following examples get the arguments of this input.\n # Note, for single instance mod input, args will be returned as a dict.\n # For multi instance mod input, args will be returned as a single value.\n opt_label = helper.get_arg('label')\n\n # In single instance mode, to get arguments of a particular input, use\n # opt_label = helper.get_arg('label', stanza_name)\n\n # get input type\n # helper.get_input_type()\n\n # The following examples get input stanzas.\n # get all detailed input stanzas\n # helper.get_input_stanza()\n # get specific input stanza with stanza name\n # helper.get_input_stanza(stanza_name)\n # get all stanza names\n # helper.get_input_stanza_names()\n\n # The following examples get options from setup page configuration.\n # get the loglevel from the setup page\n loglevel = helper.get_log_level()\n # get proxy setting configuration\n # proxy_settings = helper.get_proxy()\n # get global variable configuration\n global_autofocus_api_key = helper.get_global_setting(\"autofocus_api_key\")\n sessionKey = helper.context_meta['session_key']\n\n # The following examples show usage of logging related helper functions.\n # write to the log for this modular input using configured global log level or INFO as default\n # helper.log(\"log message\")\n # write to the log using specified log level\n # helper.log_debug(\"log message\")\n # helper.log_info(\"log message\")\n # helper.log_warning(\"log message\")\n # helper.log_error(\"log message\")\n # helper.log_critical(\"log message\")\n # set the log level for this modular input\n # (log_level can be \"debug\", \"info\", \"warning\", \"error\" or \"critical\", case insensitive)\n helper.set_log_level(loglevel)\n\n\n # sessionKey = inputs.metadata.get('session_key')\n for label in opt_label:\n helper.log_debug(\"Current Label: \" + label)\n # Check if Label already exsist and get last submit date\n helper.log_debug(\"Getting AutoFocus Export for results\")\n # Use API to get entries in Export List from AutoFocus\n values = {\n \"apiKey\": global_autofocus_api_key,\n # \"panosFormatted\": \"true\",\n \"exportMetadata\": \"true\",\n \"label\": label\n }\n try:\n afapi = pan.afapi.PanAFapi(api_key=global_autofocus_api_key)\n jsAfapi = afapi.export(json.dumps(values)).json\n af_export = jsAfapi['export_list']\n # helper.log_debug(jsAfapi)\n except pan.afapi.PanAFapiError as e:\n helper.log_debug(e)\n sys.exit(1)\n\n sync_kvstore = sync_to_kvstore(helper, sessionKey, label, af_export)\n helper.log_debug(sync_kvstore)\n # Label does not exsist in KVstore go ahead and batch import.\n if sync_kvstore == 1:\n helper.log_debug(\"New to KVSTORE\")\n send_to_kvstore(helper, sessionKey, jsAfapi['export_list'])\n # Label does exsist in KVstore. Change Detected.\n elif sync_kvstore == -1:\n helper.log_debug(\"Update KVSTORE\")\n # Delete entries for given label\n options = {\n \"app\": \"Splunk_TA_paloalto\",\n \"owner\": \"nobody\",\n \"collection\": \"autofocus_export\"\n }\n query = {\"label\": label}\n delete = True\n helper.log_debug(\"Delete entries for this label.\")\n remove = KvStoreHandler.query(query, sessionKey, options, delete)\n helper.log_debug(\"Add entries with this label to kvstore\")\n send_to_kvstore(helper, sessionKey, jsAfapi['export_list'])\n # NO CHANGE TO EXPORT LIST\n else:\n helper.log_debug(\"No Change\")\n\n \"\"\"\n # The following examples send rest requests to some endpoint.\n response = helper.send_http_request(url, method, parameters=None, payload=None,\n headers=None, cookies=None, verify=True, cert=None,\n timeout=None, use_proxy=True)\n # get the response headers\n r_headers = response.headers\n # get the response body as text\n r_text = response.text\n # get response body as json. If the body text is not a json string, raise a ValueError\n r_json = response.json()\n # get response cookies\n r_cookies = response.cookies\n # get redirect history\n historical_responses = response.history\n # get response status code\n r_status = response.status_code\n # check the response status, if the status is not sucessful, raise requests.HTTPError\n response.raise_for_status()\n# The following examples show usage of check pointing related helper functions.\n # save checkpoint\n helper.save_check_point(key, state)\n # delete checkpoint\n helper.delete_check_point(key)\n # get checkpoint\n state = helper.get_check_point(key)\n\n # To create a splunk event\n helper.new_event(data, time=None, host=None, index=None, source=None, sourcetype=None, done=True, unbroken=True)\n \"\"\"\n\n '''\n # The following example writes a random number as an event. (Multi Instance Mode)\n # Use this code template by default.\n import random\n data = str(random.randint(0,100))\n event = helper.new_event(source=helper.get_input_type(), index=helper.get_output_index(), sourcetype=helper.get_sourcetype(), data=data)\n ew.write_event(event)\n '''\n\n '''\n # The following example writes a random number as an event for each input config. (Single Instance Mode)\n # For advanced users, if you want to create single instance mod input, please use this code template.\n # Also, you need to uncomment use_single_instance_mode() above.\n import random\n input_type = helper.get_input_type()\n for stanza_name in helper.get_input_stanza_names():\n data = str(random.randint(0,100))\n event = helper.new_event(source=input_type, index=helper.get_output_index(stanza_name), sourcetype=helper.get_sourcetype(stanza_name), data=data)\n ew.write_event(event)\n '''\n\n\ndef sync_to_kvstore(helper, sessionKey, label, af_export):\n helper.log_debug(\"checking KVSTORE\")\n url_options = {\n \"app\": \"Splunk_TA_paloalto\",\n \"owner\": \"nobody\",\n \"collection\": \"autofocus_export\"\n }\n query = {\"label\": label}\n arg = {\n \"query\": query\n }\n response = KvStoreHandler.adv_query(arg, url_options, sessionKey)\n # helper.log_debug(response)\n results = 0\n kv_export = json.loads(response[1])\n # helper.log_debug(\"kv_export:\")\n # helper.log_debug(kv_export)\n # helper.log_debug(\"af_export:\")\n # helper.log_debug(af_export)\n\n # Check to see if we have entries in the KVstore already.\n if kv_export:\n helper.log_debug(\"Label Exist\")\n # Check if list are same size\n if len(kv_export) == len(af_export):\n for entry in kv_export:\n # Remove fields from kv_export so dicts will match.\n if '_key' in entry:\n del(entry['_key'])\n if '_user' in entry:\n del(entry['_user'])\n if entry not in af_export:\n helper.log_debug(\"not a match\")\n helper.log_debug(entry)\n results = -1\n return results\n else:\n helper.log_debug(\"Match\")\n else:\n helper.log_debug(\"List count not same.\")\n results = -1\n return results\n else:\n helper.log_debug(\"Label return empty\")\n results = 1\n return results\n\n\ndef send_to_kvstore(helper, sessionKey, export_list):\n helper.log_debug(\"Inside Send to KVSTORE\")\n url_options = {\n \"app\": \"Splunk_TA_paloalto\",\n \"owner\": \"nobody\",\n \"collection\": \"autofocus_export\"\n }\n helper.log_debug(export_list)\n response = KvStoreHandler.batch_create(export_list, sessionKey, url_options)\n helper.log_debug(response)", "customized_options": [{"name": "label", "value": ""}], "uuid": "d79f8dd69d41446cb0817e00cc4c34d7", "sample_count": 0}, {"index": "default", "sourcetype": "pan:iot", "interval": "30", "use_external_validation": true, "streaming_mode_xml": true, "name": "iot_security", "title": "IoT Security", "description": "", "type": "customized", "parameters": [{"name": "customer_id", "label": "Customer ID", "help_string": "", "required": true, "format_type": "text", "default_value": "", "placeholder": "", "type": "text", "value": "banff-enterprise-demo"}, {"name": "access_key_id", "label": "Access Key ID", "help_string": "", "required": true, "format_type": "password", "default_value": "", "placeholder": "", "type": "password", "value": "1921124944:55d41f13516184710c76efc9cf8f40fb6d1d2a81293aa9c80a563849d53916fb"}, {"name": "secret_access_key", "label": "Secret Access Key", "help_string": "", "required": true, "format_type": "password", "default_value": "", "placeholder": "", "type": "password", "value": "eyJ0eXAiOiJKV1QiLCJhbGciOiJIUzI1NiJ9.eyJzdWIiOiI1ZjE3NmZmZmQ5ZTVmZjFhNzAwOGVmYmQiLCJlbWFpbCI6InBhbmd1eWVuQHBhbG9hbHRvbmV0d29ya3MuY29tIiwianRpIjoiNTlaM01WTUdJZyIsInNjb3BlIjp7ImJhbmZmLWVudGVycHJpc2UtZGVtbyI6eyJhZG1pbiI6dHJ1ZSwicm9sZVNvdXJjZSI6IkFEIEdyb3VwIn19LCJpc2xvY2tlZCI6ZmFsc2UsInRlbmFudGlkIjoiYmFuZmYtZW50ZXJwcmlzZS1kZW1vIiwidXJsUGF0dGVybiI6Ii9wdWIvdjQuMC8iLCJ0eXBlIjoiZGV2aWNlX3JldHJpZXZhbF9rZXkiLCJpYXQiOjE2MDU3NjQ5NDQsImV4cCI6MTkyMTEyNDk0NCwiaXNzIjoiemluZ2JveCJ9._PE_XztIsin4w1nKAlcS3ZJdMYBAQUSH5cF71-ZC0EI"}], "data_inputs_options": [{"type": "customized_var", "name": "customer_id", "title": "Customer ID", "description": "", "required_on_edit": false, "required_on_create": true, "format_type": "text", "default_value": "", "placeholder": ""}, {"type": "customized_var", "name": "access_key_id", "title": "Access Key ID", "description": "", "required_on_edit": false, "required_on_create": true, "format_type": "password", "default_value": "", "placeholder": ""}, {"type": "customized_var", "name": "secret_access_key", "title": "Secret Access Key", "description": "", "required_on_edit": false, "required_on_create": true, "format_type": "password", "default_value": "", "placeholder": ""}], "code": "\n# encoding = utf-8\n\nimport os\nimport sys\nimport time\nimport datetime\nimport json\n\n'''\n IMPORTANT\n Edit only the validate_input and collect_events functions.\n Do not edit any other part in this file.\n This file is generated only once when creating the modular input.\n'''\n'''\n# For advanced users, if you want to create single instance mod input, uncomment this method.\ndef use_single_instance_mode():\n return True\n'''\n\ndef validate_input(helper, definition):\n \"\"\"Implement your own validation logic to validate the input stanza configurations\"\"\"\n # This example accesses the modular input variable\n customer_id = definition.parameters.get('customer_id', None)\n access_key_id = definition.parameters.get('access_key_id', None)\n secret_access_key = definition.parameters.get('secret_access_key', None)\n pass\n\ndef query_api(helper, url, parameters, api_type, proxy_enabled):\n global_page_length = 1000\n total = 1000\n results = []\n start_time = time.time()\n page_offset = 0\n \n if api_type == 'devices':\n items = 'devices'\n page_offset = helper.get_check_point(\"offset\")\n if not page_offset:\n page_offset = 1000\n page = 0\n max_pages = 20\n \n while page < max_pages:\n method = 'GET'\n response = helper.send_http_request(url, method, parameters,\n payload=None, headers=None,\n cookies=None, verify=True, cert=None,\n timeout=30, use_proxy=proxy_enabled)\n r_status = response.status_code\n if r_status == 200:\n entries = response.json()[items]\n results = results + entries\n total = len(entries)\n page_offset = page_offset + global_page_length\n parameters.update({'offset': page_offset})\n page += 1\n helper.log_debug(\"Current Offset: {0}, Total Entries: {1}, Next Page: {2}\".format(page_offset, total, page) )\n if total < global_page_length:\n helper.delete_check_point(\"offset\")\n helper.delete_check_point(\"last_run_end\")\n helper.log_debug(\"End of device list. Cleared checkpoint data.\")\n break\n else:\n helper.log_debug(r_status)\n break\n else: \n now = datetime.datetime.now()\n helper.save_check_point(\"offset\", page_offset)\n helper.save_check_point(\"last_run_timestamp\", datetime.datetime.strftime(now, \"%Y-%m-%d %H:%M:%S\"))\n helper.log_debug(\"We have reached max_page. Saved offset: {0} last_run_end: {1}\".format(page_offset, now))\n\n \n else:\n items = 'items'\n while total == global_page_length:\n method = 'GET'\n response = helper.send_http_request(url, method, parameters,\n payload=None, headers=None,\n cookies=None, verify=True, cert=None,\n timeout=30, use_proxy=proxy_enabled)\n r_status = response.status_code\n if r_status == 200:\n entries = response.json()[items]\n results = results + entries\n total = len(entries)\n page_offset = page_offset + global_page_length\n helper.log_debug(\"Current Offset: {0}, Total Entries: {1}\".format(page_offset, total) )\n parameters.update({'offset': page_offset})\n else:\n helper.log_debug(r_status)\n break\n run_time = time.time() - start_time\n helper.log_debug(\"End of {0} results. Function took {1} to run\".format(api_type, run_time))\n return (results)\n\ndef collect_events(helper, ew):\n # Set debug level\n log_level = helper.get_log_level()\n helper.set_log_level(log_level)\n # Get Proxy Settings\n proxy_settings = helper.get_proxy()\n proxy_enabled = bool(proxy_settings)\n # helper.log_debug(\"Checking if Proxy is enabled\")\n # helper.log_debug(proxy_enabled) \n\n opt_customer_id = helper.get_arg('customer_id')\n opt_access_key_id = helper.get_arg('access_key_id')\n opt_secret_access_key = helper.get_arg('secret_access_key')\n\n global_url = \"https://{0}.iot.paloaltonetworks.com/pub/v4.0\".format(\n opt_customer_id)\n global_url_params = {\n 'customerid': opt_customer_id,\n 'key_id': opt_access_key_id,\n 'access_key': opt_secret_access_key,\n 'pagelength': 1000,\n 'offset': 0,\n }\n\n last_device_pull = helper.get_check_point(\"last_run_timestamp\")\n\n if not last_device_pull or datetime.datetime.strptime(last_device_pull, \"%Y-%m-%d %H:%M:%S\") < datetime.datetime.now() - datetime.timedelta(minutes=5):\n # Lets get Device Inventory\n try:\n device_url = '{0}/device/list'.format(global_url)\n params = {\n 'filter_monitored': 'yes',\n 'detail': 'true',\n }\n params.update(global_url_params)\n devices = query_api(helper, device_url, params, 'devices', proxy_enabled)\n for data in devices:\n try:\n event = helper.new_event(\n host=global_url,\n source=helper.get_input_stanza_names(),\n index=helper.get_output_index(),\n sourcetype='pan:iot_device',\n data=json.dumps(data))\n ew.write_event(event)\n except Exception as e:\n helper.log_error('Error on parse event. ' + str(e))\n except Exception as e:\n print(str(e))\n else:\n helper.log_debug(\"Skipping device inventory pull. Last pulled: {0}\".format(last_device_pull))\n\n # Lets get Alerts\n try: \n alerts_url = '{0}/alert/list'.format(global_url)\n params = {\n 'type': 'policy_alert',\n }\n params.update(global_url_params)\n alerts = query_api(helper, alerts_url, params, 'alerts', proxy_enabled)\n for data in alerts:\n try:\n event = helper.new_event(\n host=global_url,\n source=helper.get_input_stanza_names(),\n index=helper.get_output_index(),\n sourcetype='pan:iot_alert',\n data=json.dumps(data))\n ew.write_event(event)\n except Exception as e:\n helper.log_error('Error on parse event. ' + str(e))\n except Exception as e:\n helper.log_error(str(e))\n\n # # Vulnerabilities\n try:\n vuln_url = '{0}/vulnerability/list'.format(global_url)\n params = {\n 'groupby': 'device',\n }\n params.update(global_url_params)\n vulnerabilities = query_api(helper, vuln_url, params, 'vulnerabilities', proxy_enabled)\n for data in vulnerabilities:\n try:\n event = helper.new_event(\n host=global_url,\n source=helper.get_input_stanza_names(),\n index=helper.get_output_index(),\n sourcetype='pan:iot_vulnerability',\n data=json.dumps(data))\n ew.write_event(event)\n except Exception as e:\n helper.log_error('Error on parse event. ' + str(e))\n except Exception as e:\n helper.log_error(str(e))\n", "customized_options": [{"name": "customer_id", "value": "banff-enterprise-demo"}], "uuid": "633c7b1243dc44178b70dd5e260fbd80", "sample_count": "10008"}, {"index": "default", "sourcetype": "minemeld_feed", "interval": "30", "use_external_validation": true, "streaming_mode_xml": true, "name": "minemeld_feed", "title": "MineMeld Feed", "description": "", "type": "customized", "parameters": [{"name": "feed_url", "label": " Output Node Feed URL2, "help_string": "https://", "required": true, "format_type": "text", "default_value": "", "placeholder": "", "type": "text", "value": ""}, {"name": "indicator_timeout", "label": "Indicator Timeout", "help_string": " How long to retain indicators (in hours)", "required": true, "format_type": "text", "default_value": "", "placeholder": "", "type": "text", "value": ""}, {"name": "credentials", "label": "Feed Credentials", "help_string": "", "required": false, "possible_values": [], "format_type": "global_account", "default_value": "", "placeholder": "", "type": "global_account", "value": ""}], "data_inputs_options": [{"type": "customized_var", "name": "feed_url", "title": " Output Node Feed URL2", "description": "https://", "required_on_edit": false, "required_on_create": true, "format_type": "text", "default_value": "", "placeholder": ""}, {"type": "customized_var", "name": "indicator_timeout", "title": "Indicator Timeout", "description": " How long to retain indicators (in hours)", "required_on_edit": false, "required_on_create": true, "format_type": "text", "default_value": "", "placeholder": ""}, {"type": "customized_var", "name": "credentials", "title": "Feed Credentials", "description": "", "required_on_edit": false, "required_on_create": false, "possible_values": [], "format_type": "global_account", "default_value": "", "placeholder": ""}], "code": "\n# encoding = utf-8\n\nimport base64\nimport functools\nimport json\nimport os\nimport requests.exceptions\nimport sys\nimport time\n\nVERIFY_CERTIFICATE = True\n\n'''\n IMPORTANT\n Edit only the validate_input and collect_events functions.\n Do not edit any other part in this file.\n This file is generated only once when creating the modular input.\n'''\n'''\n# For advanced users, if you want to create single instance mod input, uncomment this method.\ndef use_single_instance_mode():\n return True\n'''\n\ndef timer(desc):\n def outer(func):\n @functools.wraps(func)\n def inner(*args):\n \"\"\"Decorator to time function execution.\n\n If an exception is raised during the function, then a time of \"-1\"\n will be saved for the given description.\n\n Note: Any function decorated with this should have the \"stats\" dict\n as the final argument in its arg list.\n\n \"\"\"\n # Setup.\n stats = args[-1]\n stats[desc] = -1\n start = time.time()\n\n # Execute the function.\n ret_val = func(*args)\n\n # No exception, so save the runtime and return ret_val.\n stats[desc] = time.time() - start\n return ret_val\n return inner\n return outer\n\n\ndef validate_input(helper, definition):\n \"\"\"Implement your own validation logic to validate the input stanza configurations\"\"\"\n # This example accesses the modular input variable\n # feed_url = definition.parameters.get('feed_url', None)\n # credentials = definition.parameters.get('credentials', None)\n pass\n\n\ndef collect_events(helper, ew):\n \"\"\"Collect the kvstore events from the feed.\"\"\"\n # Get the short name for this feed.\n name = helper.get_input_stanza_names()\n start = time.time()\n try:\n indicator_timeout = int(helper.get_arg('indicator_timeout')) * 3600\n except ValueError:\n # If this isn't set, timeout indicators immediately.\n indicator_timeout = 0\n stats = {'input_name': name}\n\n helper.log_info('START Splunk_TA_paloalto indicator retrieval for \"{0}\"'.format(\n name))\n\n # Get the current indicators.\n kvs_entries = pull_from_kvstore(helper, name, start, stats)\n stats['previous_indicators'] = len(kvs_entries)\n\n # Retrieve current entries from the MineMeld feed.\n mmf_entries = []\n try:\n mmf_entries = get_feed_entries(helper, name, start, stats)\n except requests.exceptions.HTTPError as e:\n helper.log_error('Failed to get entries for \"{0}\": {1}'.format(\n name, e))\n stats['error'] = str(e)\n stats['feed_indicators'] = len(mmf_entries)\n\n # Merge the two together, and determine which indicators should be expired.\n rm_entries, retained_indicators = merge_entries(\n mmf_entries, kvs_entries, start, indicator_timeout, stats)\n stats['expired_indicators'] = len(rm_entries)\n stats['indicators'] = len(mmf_entries) + retained_indicators\n\n # Save new/updated indicators to the kvstore.\n save_to_kvstore(helper, name, mmf_entries, stats)\n\n # Delete the expired indicators.\n remove_from_kvstore(helper, name, rm_entries, stats)\n\n # Write an event to the index giving some basic stats.\n stats['total_time'] = time.time() - start\n save_stats_as_event(helper, ew, stats)\n\n # Done\n helper.log_info('END Splunk_TA_paloalto indicator retrieval for \"{0}\"'.format(\n name))\n\n\n@timer('read_kvstore')\ndef pull_from_kvstore(helper, name, start, stats):\n \"\"\"Retrieves all current indicators.\"\"\"\n resp = helper.send_http_request(\n url=_uri(helper),\n headers=_headers(helper),\n method='GET',\n verify=False,\n parameters={'query': json.dumps({'splunk_source': name})})\n resp.raise_for_status()\n\n ans = {}\n for v in resp.json():\n ans[v['indicator']] = {\n '_key': v['_key'],\n 'is_present': False,\n 'splunk_last_seen': v.get('splunk_last_seen', 0.0)}\n\n return ans\n\n\n@timer('retrieve_indicators')\ndef get_feed_entries(helper, name, start, stats):\n \"\"\"Pulls the indicators from the minemeld feed.\"\"\"\n feed_url = helper.get_arg('feed_url')\n feed_creds = helper.get_arg('credentials')\n feed_headers = {}\n # If auth is specified, add it as a header.\n if feed_creds is not None:\n auth = '{0}:{1}'.format(feed_creds['username'], feed_creds['password']).encode('ascii')\n auth = base64.b64encode(auth)\n auth = auth.decode('utf-8')\n feed_headers['Authorization'] = 'Basic {0}'.format(auth)\n\n # Pull events as json.\n resp = helper.send_http_request(\n url=feed_url,\n method='GET',\n parameters={'v': 'json', 'tr': 1},\n headers=feed_headers,\n verify=VERIFY_CERTIFICATE,\n )\n\n # Raise exceptions on problems.\n resp.raise_for_status()\n feed_entries = resp.json()\n\n # Return the normalized events to be saved to the kv store.\n return normalized(name, feed_entries, start)\n\n\n@timer('merge_indicators')\ndef merge_entries(mmf_entries, kvs_entries, start, indicator_timeout, stats):\n \"\"\"\n Merges the current indicators with previous, determining which should\n be expired.\n \"\"\"\n rm_entries = []\n retained_indicators = 0\n\n for mmfe in mmf_entries:\n kvse = kvs_entries.get(mmfe['indicator'])\n if kvse is not None:\n kvse['is_present'] = True\n mmfe['_key'] = kvse['_key']\n\n for info in iter(kvs_entries.values()):\n if info['is_present']:\n pass\n elif info['splunk_last_seen'] + indicator_timeout < start:\n rm_entries.append(info['_key'])\n else:\n retained_indicators += 1\n\n return rm_entries, retained_indicators\n\n\n@timer('save_to_kvstore')\ndef save_to_kvstore(helper, name, entries, stats):\n \"\"\"Saves all normalized entries as `name` events.\"\"\"\n helper.log_info('Saving {0} entries for MineMeld feed \"{1}\"'.format(\n len(entries), name))\n url = '{0}/batch_save'.format(_uri(helper))\n\n # We need to batch in groups of 500, the default.\n for i in range(0, len(entries), 500):\n resp = helper.send_http_request(\n url=url,\n headers=_headers(helper),\n method='POST',\n verify=False,\n payload=entries[i:i+500])\n resp.raise_for_status()\n\n\n@timer('remove_from_kvstore')\ndef remove_from_kvstore(helper, name, rm_entries, stats):\n \"\"\"Removes the specified entries from the kvstore.\"\"\"\n if not rm_entries:\n return\n\n helper.log_info('Removing {0} kvstore entries for MineMeld feed \"{1}\"'.format(\n len(rm_entries), name))\n url = _uri(helper)\n headers = _headers(helper)\n\n # Batch a few at a time, as splunk 414s if the URI is too long, or times\n # out if it's within the length limits but still hits too many entries to\n # finish on time. From some tests, it seems like 500 is a good number,\n # which is nice since it matches the batch_save number.\n #\n # The _key field has been 24 characters in length on my system.\n for i in range(0, len(rm_entries), 500):\n rms = rm_entries[i:i+500]\n query = {'$or': list({'_key': x} for x in rms)}\n resp = helper.send_http_request(\n url=url,\n headers=headers,\n method='DELETE',\n verify=False,\n parameters={'query': json.dumps(query)})\n resp.raise_for_status()\n\n\ndef save_stats_as_event(helper, ew, stats):\n \"\"\"Saves the stats of getting feed events to the index.\"\"\"\n event = helper.new_event(\n source=helper.get_input_type(),\n index=helper.get_output_index(),\n sourcetype=helper.get_sourcetype(),\n data=json.dumps(stats),\n )\n ew.write_event(event)\n\n\ndef _uri(helper):\n \"\"\"Returns the URL of the kvstore.\"\"\"\n return '/'.join((\n helper.context_meta['server_uri'],\n 'servicesNS',\n 'nobody',\n 'Splunk_TA_paloalto',\n 'storage',\n 'collections',\n 'data',\n 'minemeldfeeds'))\n\n\ndef _headers(helper):\n \"\"\"Returns the auth header for Splunk.\"\"\"\n return {\n 'Authorization': 'Splunk {0}'.format(\n helper.context_meta['session_key'])}\n\n\ndef normalized(name, feed_entries, start):\n \"\"\"Returns a list of normalized kvstore entries.\"\"\"\n data = []\n for feed_entry in feed_entries:\n if 'indicator' not in feed_entry or 'value' not in feed_entry:\n continue\n\n # Make the entry dict.\n entry = feed_entry.copy()\n entry['splunk_source'] = name\n entry['splunk_last_seen'] = start\n\n data.append(entry)\n\n return data\n", "customized_options": [{"name": "feed_url", "value": ""}, {"name": "indicator_timeout", "value": ""}, {"name": "credentials", "value": ""}], "uuid": "81937edbc4ef44a89fb8041f1c1f1624", "sample_count": 0}]}, "field_extraction_builder": {"pan:aperture": {"data_format": "json"}, "pan:config": {"data_format": "tabular", "table_results": {"delim": ","}}, "pan:decryption": {"data_format": "tabular", "table_results": {"delim": ","}}, "pan:globalprotect": {"data_format": "tabular", "table_results": {"delim": ","}}, "pan:log": {"data_format": "tabular", "table_results": {"delim": ","}}, "pan:minemeld": {"data_format": "json"}, "pan:system": {"data_format": "tabular", "table_results": {"delim": ","}}, "pan:threat": {"data_format": "tabular", "table_results": {"delim": ","}}, "pan:traffic": {"data_format": "tabular", "table_results": {"delim": ","}}, "pan:userid": {"data_format": "tabular", "table_results": {"delim": ","}}, "pan:iot": {"data_format": "json"}, "pan:iot_alert": {"data_format": "json"}, "pan:iot_device": {"data_format": "json"}, "pan:iot_vulnerability": {"data_format": "json"}, "pan:firewall_cloud": {"data_format": "tabular", "table_results": {"delim": " "}}, "pan:xdr_incident": {"data_format": "json"}}, "global_settings_builder": {"global_settings": {"proxy_settings": {"proxy_type": "http"}, "log_settings": {}, "credential_settings": [], "customized_settings": [{"required": false, "name": "autofocus_api_key", "label": " AutoFocus API Key", "placeholder": "", "default_value": "", "help_string": " Used to retrieve metadata about AutoFocus tags. Requires a MineMeld Feed input to download threat indicators. More info: https://splunk.paloaltonetworks.com/autofocus-and-minemeld.html", "type": "password", "format_type": "password", "value": ""}, {"required": false, "name": "wildfire_api_key", "label": " WildFire API Key", "placeholder": "", "default_value": "", "help_string": " Used to retrieve reports from the WildFire Cloud. An API Key is available from the WildFire Portal. https://wildfire.paloaltonetworks.com", "type": "password", "format_type": "password", "value": ""}]}}, "sourcetype_builder": {"minemeld_feed": {"metadata": {"data_input_name": "minemeld_feed"}}}, "validation": {}} \ No newline at end of file diff --git a/Splunk_TA_paloalto/appserver/static/js/build/globalConfig.json b/Splunk_TA_paloalto/appserver/static/js/build/globalConfig.json index 11d3e321..1c68e684 100644 --- a/Splunk_TA_paloalto/appserver/static/js/build/globalConfig.json +++ b/Splunk_TA_paloalto/appserver/static/js/build/globalConfig.json @@ -424,15 +424,14 @@ { "field": "feed_url", "label": " Output Node Feed URL", - "help": "", + "help": "https://", "required": true, "type": "text", "validators": [ { - "type": "string", - "minLength": 0, - "maxLength": 8192, - "errorMsg": "Max length of text input is 8192" + "type": "regex", + "pattern": "^[^https://][a-zA-Z0-9.:/]*$", + "errorMsg": "Remove feed URL protocal(http(s)://). Invalid Feed URL." } ] }, diff --git a/Splunk_TA_paloalto/bin/Splunk_TA_paloalto_rh_minemeld_feed.py b/Splunk_TA_paloalto/bin/Splunk_TA_paloalto_rh_minemeld_feed.py index 462d6ad2..035711dc 100644 --- a/Splunk_TA_paloalto/bin/Splunk_TA_paloalto_rh_minemeld_feed.py +++ b/Splunk_TA_paloalto/bin/Splunk_TA_paloalto_rh_minemeld_feed.py @@ -47,9 +47,8 @@ required=True, encrypted=False, default=None, - validator=validator.String( - min_len=0, - max_len=8192, + validator=validator.Pattern( + regex=r"""^[^https:\/\/][a-zA-Z0-9.:\/]*$""", ) ), field.RestField( diff --git a/Splunk_TA_paloalto/bin/input_module_minemeld_feed.py b/Splunk_TA_paloalto/bin/input_module_minemeld_feed.py index 262540b6..22225cba 100644 --- a/Splunk_TA_paloalto/bin/input_module_minemeld_feed.py +++ b/Splunk_TA_paloalto/bin/input_module_minemeld_feed.py @@ -133,7 +133,7 @@ def pull_from_kvstore(helper, name, start, stats): @timer('retrieve_indicators') def get_feed_entries(helper, name, start, stats): """Pulls the indicators from the minemeld feed.""" - feed_url = helper.get_arg('feed_url') + feed_url = 'https://{0}'.format(helper.get_arg('feed_url')) feed_creds = helper.get_arg('credentials') feed_headers = {} # If auth is specified, add it as a header. diff --git a/Splunk_TA_paloalto/bin/minemeld_feed.py b/Splunk_TA_paloalto/bin/minemeld_feed.py old mode 100644 new mode 100755 index fa38cdc6..46123b06 --- a/Splunk_TA_paloalto/bin/minemeld_feed.py +++ b/Splunk_TA_paloalto/bin/minemeld_feed.py @@ -7,7 +7,7 @@ import json import modinput_wrapper.base_modinput -from solnlib.packages.splunklib import modularinput as smi +from splunklib import modularinput as smi @@ -27,7 +27,7 @@ def __init__(self): use_single_instance = input_module.use_single_instance_mode() else: use_single_instance = False - super(ModInputminemeld_feed, self).__init__("Splunk_TA_paloalto", "minemeld_feed", use_single_instance) + super(ModInputminemeld_feed, self).__init__("splunk_ta_paloalto", "minemeld_feed", use_single_instance) self.global_checkbox_fields = None def get_scheme(self): @@ -46,12 +46,12 @@ def get_scheme(self): For customized inputs, hard code the arguments here to hide argument detail from users. For other input types, arguments should be get from input_module. Defining new input types could be easier. """ - scheme.add_argument(smi.Argument("indicator_timeout", title="The Timeout For Indicators", - description="How long to retain indicators (in hours)", - required_on_create=False, + scheme.add_argument(smi.Argument("feed_url", title=" Output Node Feed URL", + description="https://", + required_on_create=True, required_on_edit=False)) - scheme.add_argument(smi.Argument("feed_url", title="Output Node Feed URL", - description="", + scheme.add_argument(smi.Argument("indicator_timeout", title="Indicator Timeout", + description="How long to retain indicators (in hours)", required_on_create=True, required_on_edit=False)) scheme.add_argument(smi.Argument("credentials", title="Feed Credentials", diff --git a/Splunk_TA_paloalto/bin/splunk_ta_paloalto/aob_py3/Jinja2-2.11.3.dist-info/INSTALLER b/Splunk_TA_paloalto/bin/splunk_ta_paloalto/aob_py3/Jinja2-2.11.3.dist-info/INSTALLER new file mode 100644 index 00000000..a1b589e3 --- /dev/null +++ b/Splunk_TA_paloalto/bin/splunk_ta_paloalto/aob_py3/Jinja2-2.11.3.dist-info/INSTALLER @@ -0,0 +1 @@ +pip diff --git a/Splunk_TA_paloalto/bin/splunk_ta_paloalto/aob_py3/Jinja2-2.11.3.dist-info/LICENSE.rst b/Splunk_TA_paloalto/bin/splunk_ta_paloalto/aob_py3/Jinja2-2.11.3.dist-info/LICENSE.rst new file mode 100644 index 00000000..c37cae49 --- /dev/null +++ b/Splunk_TA_paloalto/bin/splunk_ta_paloalto/aob_py3/Jinja2-2.11.3.dist-info/LICENSE.rst @@ -0,0 +1,28 @@ +Copyright 2007 Pallets + +Redistribution and use in source and binary forms, with or without +modification, are permitted provided that the following conditions are +met: + +1. Redistributions of source code must retain the above copyright + notice, this list of conditions and the following disclaimer. + +2. Redistributions in binary form must reproduce the above copyright + notice, this list of conditions and the following disclaimer in the + documentation and/or other materials provided with the distribution. + +3. Neither the name of the copyright holder nor the names of its + contributors may be used to endorse or promote products derived from + this software without specific prior written permission. + +THIS SOFTWARE IS PROVIDED BY THE COPYRIGHT HOLDERS AND CONTRIBUTORS +"AS IS" AND ANY EXPRESS OR IMPLIED WARRANTIES, INCLUDING, BUT NOT +LIMITED TO, THE IMPLIED WARRANTIES OF MERCHANTABILITY AND FITNESS FOR A +PARTICULAR PURPOSE ARE DISCLAIMED. IN NO EVENT SHALL THE COPYRIGHT +HOLDER OR CONTRIBUTORS BE LIABLE FOR ANY DIRECT, INDIRECT, INCIDENTAL, +SPECIAL, EXEMPLARY, OR CONSEQUENTIAL DAMAGES (INCLUDING, BUT NOT LIMITED +TO, PROCUREMENT OF SUBSTITUTE GOODS OR SERVICES; LOSS OF USE, DATA, OR +PROFITS; OR BUSINESS INTERRUPTION) HOWEVER CAUSED AND ON ANY THEORY OF +LIABILITY, WHETHER IN CONTRACT, STRICT LIABILITY, OR TORT (INCLUDING +NEGLIGENCE OR OTHERWISE) ARISING IN ANY WAY OUT OF THE USE OF THIS +SOFTWARE, EVEN IF ADVISED OF THE POSSIBILITY OF SUCH DAMAGE. diff --git a/Splunk_TA_paloalto/bin/splunk_ta_paloalto/aob_py3/Jinja2-2.11.3.dist-info/METADATA b/Splunk_TA_paloalto/bin/splunk_ta_paloalto/aob_py3/Jinja2-2.11.3.dist-info/METADATA new file mode 100644 index 00000000..1af8df0f --- /dev/null +++ b/Splunk_TA_paloalto/bin/splunk_ta_paloalto/aob_py3/Jinja2-2.11.3.dist-info/METADATA @@ -0,0 +1,106 @@ +Metadata-Version: 2.1 +Name: Jinja2 +Version: 2.11.3 +Summary: A very fast and expressive template engine. +Home-page: https://palletsprojects.com/p/jinja/ +Author: Armin Ronacher +Author-email: armin.ronacher@active-4.com +Maintainer: Pallets +Maintainer-email: contact@palletsprojects.com +License: BSD-3-Clause +Project-URL: Documentation, https://jinja.palletsprojects.com/ +Project-URL: Code, https://github.com/pallets/jinja +Project-URL: Issue tracker, https://github.com/pallets/jinja/issues +Platform: UNKNOWN +Classifier: Development Status :: 5 - Production/Stable +Classifier: Environment :: Web Environment +Classifier: Intended Audience :: Developers +Classifier: License :: OSI Approved :: BSD License +Classifier: Operating System :: OS Independent +Classifier: Programming Language :: Python +Classifier: Programming Language :: Python :: 2 +Classifier: Programming Language :: Python :: 2.7 +Classifier: Programming Language :: Python :: 3 +Classifier: Programming Language :: Python :: 3.5 +Classifier: Programming Language :: Python :: 3.6 +Classifier: Programming Language :: Python :: 3.7 +Classifier: Programming Language :: Python :: 3.8 +Classifier: Programming Language :: Python :: Implementation :: CPython +Classifier: Programming Language :: Python :: Implementation :: PyPy +Classifier: Topic :: Internet :: WWW/HTTP :: Dynamic Content +Classifier: Topic :: Software Development :: Libraries :: Python Modules +Classifier: Topic :: Text Processing :: Markup :: HTML +Requires-Python: >=2.7, !=3.0.*, !=3.1.*, !=3.2.*, !=3.3.*, !=3.4.* +Description-Content-Type: text/x-rst +Requires-Dist: MarkupSafe (>=0.23) +Provides-Extra: i18n +Requires-Dist: Babel (>=0.8) ; extra == 'i18n' + +Jinja +===== + +Jinja is a fast, expressive, extensible templating engine. Special +placeholders in the template allow writing code similar to Python +syntax. Then the template is passed data to render the final document. + +It includes: + +- Template inheritance and inclusion. +- Define and import macros within templates. +- HTML templates can use autoescaping to prevent XSS from untrusted + user input. +- A sandboxed environment can safely render untrusted templates. +- AsyncIO support for generating templates and calling async + functions. +- I18N support with Babel. +- Templates are compiled to optimized Python code just-in-time and + cached, or can be compiled ahead-of-time. +- Exceptions point to the correct line in templates to make debugging + easier. +- Extensible filters, tests, functions, and even syntax. + +Jinja's philosophy is that while application logic belongs in Python if +possible, it shouldn't make the template designer's job difficult by +restricting functionality too much. + + +Installing +---------- + +Install and update using `pip`_: + +.. code-block:: text + + $ pip install -U Jinja2 + +.. _pip: https://pip.pypa.io/en/stable/quickstart/ + + +In A Nutshell +------------- + +.. code-block:: jinja + + {% extends "base.html" %} + {% block title %}Members{% endblock %} + {% block content %} + + {% endblock %} + + +Links +----- + +- Website: https://palletsprojects.com/p/jinja/ +- Documentation: https://jinja.palletsprojects.com/ +- Releases: https://pypi.org/project/Jinja2/ +- Code: https://github.com/pallets/jinja +- Issue tracker: https://github.com/pallets/jinja/issues +- Test status: https://dev.azure.com/pallets/jinja/_build +- Official chat: https://discord.gg/t6rrQZH + + diff --git a/Splunk_TA_paloalto/bin/splunk_ta_paloalto/aob_py3/Jinja2-2.11.3.dist-info/RECORD b/Splunk_TA_paloalto/bin/splunk_ta_paloalto/aob_py3/Jinja2-2.11.3.dist-info/RECORD new file mode 100644 index 00000000..75f9fa3a --- /dev/null +++ b/Splunk_TA_paloalto/bin/splunk_ta_paloalto/aob_py3/Jinja2-2.11.3.dist-info/RECORD @@ -0,0 +1,35 @@ +Jinja2-2.11.3.dist-info/INSTALLER,sha256=zuuue4knoyJ-UwPPXg8fezS7VCrXJQrAP7zeNuwvFQg,4 +Jinja2-2.11.3.dist-info/LICENSE.rst,sha256=O0nc7kEF6ze6wQ-vG-JgQI_oXSUrjp3y4JefweCUQ3s,1475 +Jinja2-2.11.3.dist-info/METADATA,sha256=PscpJ1C3RSp8xcjV3fAuTz13rKbGxmzJXnMQFH-WKhs,3535 +Jinja2-2.11.3.dist-info/RECORD,, +Jinja2-2.11.3.dist-info/REQUESTED,sha256=47DEQpj8HBSa-_TImW-5JCeuQeRkm5NMpJWZG3hSuFU,0 +Jinja2-2.11.3.dist-info/WHEEL,sha256=Z-nyYpwrcSqxfdux5Mbn_DQ525iP7J2DG3JgGvOYyTQ,110 +Jinja2-2.11.3.dist-info/entry_points.txt,sha256=Qy_DkVo6Xj_zzOtmErrATe8lHZhOqdjpt3e4JJAGyi8,61 +Jinja2-2.11.3.dist-info/top_level.txt,sha256=PkeVWtLb3-CqjWi1fO29OCbj55EhX_chhKrCdrVe_zs,7 +jinja2/__init__.py,sha256=LZUXmxJc2GIchfSAeMWsxCWiQYO-w1-736f2Q3I8ms8,1549 +jinja2/_compat.py,sha256=B6Se8HjnXVpzz9-vfHejn-DV2NjaVK-Iewupc5kKlu8,3191 +jinja2/_identifier.py,sha256=EdgGJKi7O1yvr4yFlvqPNEqV6M1qHyQr8Gt8GmVTKVM,1775 +jinja2/asyncfilters.py,sha256=XJtYXTxFvcJ5xwk6SaDL4S0oNnT0wPYvXBCSzc482fI,4250 +jinja2/asyncsupport.py,sha256=ZBFsDLuq3Gtji3Ia87lcyuDbqaHZJRdtShZcqwpFnSQ,7209 +jinja2/bccache.py,sha256=3Pmp4jo65M9FQuIxdxoDBbEDFwe4acDMQf77nEJfrHA,12139 +jinja2/compiler.py,sha256=Ta9W1Lit542wItAHXlDcg0sEOsFDMirCdlFPHAurg4o,66284 +jinja2/constants.py,sha256=RR1sTzNzUmKco6aZicw4JpQpJGCuPuqm1h1YmCNUEFY,1458 +jinja2/debug.py,sha256=neR7GIGGjZH3_ILJGVUYy3eLQCCaWJMXOb7o0kGInWc,8529 +jinja2/defaults.py,sha256=85B6YUUCyWPSdrSeVhcqFVuu_bHUAQXeey--FIwSeVQ,1126 +jinja2/environment.py,sha256=XDSLKc4SqNLMOwTSq3TbWEyA5WyXfuLuVD0wAVjEFwM,50629 +jinja2/exceptions.py,sha256=VjNLawcmf2ODffqVMCQK1cRmvFaUfQWF4u8ouP3QPcE,5425 +jinja2/ext.py,sha256=AtwL5O5enT_L3HR9-oBvhGyUTdGoyaqG_ICtnR_EVd4,26441 +jinja2/filters.py,sha256=9ORilsZrUoydSI9upz8_qGy7gozDWLYoFmlIBFSVRnQ,41439 +jinja2/idtracking.py,sha256=J3O4VHsrbf3wzwiBc7Cro26kHb6_5kbULeIOzocchIU,9211 +jinja2/lexer.py,sha256=nUFLRKhhKmmEWkLI65nQePgcQs7qsRdjVYZETMt_v0g,30331 +jinja2/loaders.py,sha256=C-fST_dmFjgWkp0ZuCkrgICAoOsoSIF28wfAFink0oU,17666 +jinja2/meta.py,sha256=QjyYhfNRD3QCXjBJpiPl9KgkEkGXJbAkCUq4-Ur10EQ,4131 +jinja2/nativetypes.py,sha256=Ul__gtVw4xH-0qvUvnCNHedQeNDwmEuyLJztzzSPeRg,2753 +jinja2/nodes.py,sha256=Mk1oJPVgIjnQw9WOqILvcu3rLepcFZ0ahxQm2mbwDwc,31095 +jinja2/optimizer.py,sha256=gQLlMYzvQhluhzmAIFA1tXS0cwgWYOjprN-gTRcHVsc,1457 +jinja2/parser.py,sha256=fcfdqePNTNyvosIvczbytVA332qpsURvYnCGcjDHSkA,35660 +jinja2/runtime.py,sha256=0y-BRyIEZ9ltByL2Id6GpHe1oDRQAwNeQvI0SKobNMw,30618 +jinja2/sandbox.py,sha256=knayyUvXsZ-F0mk15mO2-ehK9gsw04UhB8td-iUOtLc,17127 +jinja2/tests.py,sha256=iO_Y-9Vo60zrVe1lMpSl5sKHqAxe2leZHC08OoZ8K24,4799 +jinja2/utils.py,sha256=Wy4yC3IByqUWwnKln6SdaixdzgK74P6F5nf-gQZrYnU,22436 +jinja2/visitor.py,sha256=DUHupl0a4PGp7nxRtZFttUzAi1ccxzqc2hzetPYUz8U,3240 diff --git a/Splunk_TA_paloalto/bin/splunk_ta_paloalto/aob_py3/Jinja2-2.11.3.dist-info/REQUESTED b/Splunk_TA_paloalto/bin/splunk_ta_paloalto/aob_py3/Jinja2-2.11.3.dist-info/REQUESTED new file mode 100644 index 00000000..e69de29b diff --git a/Splunk_TA_paloalto/bin/splunk_ta_paloalto/aob_py3/Jinja2-2.11.3.dist-info/WHEEL b/Splunk_TA_paloalto/bin/splunk_ta_paloalto/aob_py3/Jinja2-2.11.3.dist-info/WHEEL new file mode 100644 index 00000000..01b8fc7d --- /dev/null +++ b/Splunk_TA_paloalto/bin/splunk_ta_paloalto/aob_py3/Jinja2-2.11.3.dist-info/WHEEL @@ -0,0 +1,6 @@ +Wheel-Version: 1.0 +Generator: bdist_wheel (0.36.2) +Root-Is-Purelib: true +Tag: py2-none-any +Tag: py3-none-any + diff --git a/Splunk_TA_paloalto/bin/splunk_ta_paloalto/aob_py3/Jinja2-2.11.3.dist-info/entry_points.txt b/Splunk_TA_paloalto/bin/splunk_ta_paloalto/aob_py3/Jinja2-2.11.3.dist-info/entry_points.txt new file mode 100644 index 00000000..3619483f --- /dev/null +++ b/Splunk_TA_paloalto/bin/splunk_ta_paloalto/aob_py3/Jinja2-2.11.3.dist-info/entry_points.txt @@ -0,0 +1,3 @@ +[babel.extractors] +jinja2 = jinja2.ext:babel_extract [i18n] + diff --git a/Splunk_TA_paloalto/bin/splunk_ta_paloalto/aob_py3/Jinja2-2.11.3.dist-info/top_level.txt b/Splunk_TA_paloalto/bin/splunk_ta_paloalto/aob_py3/Jinja2-2.11.3.dist-info/top_level.txt new file mode 100644 index 00000000..7f7afbf3 --- /dev/null +++ b/Splunk_TA_paloalto/bin/splunk_ta_paloalto/aob_py3/Jinja2-2.11.3.dist-info/top_level.txt @@ -0,0 +1 @@ +jinja2 diff --git a/Splunk_TA_paloalto/bin/splunk_ta_paloalto/aob_py3/Mako-1.1.0.dist-info/AUTHORS b/Splunk_TA_paloalto/bin/splunk_ta_paloalto/aob_py3/Mako-1.1.0.dist-info/AUTHORS new file mode 100644 index 00000000..81d16dc1 --- /dev/null +++ b/Splunk_TA_paloalto/bin/splunk_ta_paloalto/aob_py3/Mako-1.1.0.dist-info/AUTHORS @@ -0,0 +1,13 @@ +Mako was created by Michael Bayer. + +Major contributing authors include: + +- Michael Bayer +- Geoffrey T. Dairiki +- Philip Jenvey +- David Peckam +- Armin Ronacher +- Ben Bangert +- Ben Trofatter + + diff --git a/Splunk_TA_paloalto/bin/splunk_ta_paloalto/aob_py3/Mako-1.1.0.dist-info/INSTALLER b/Splunk_TA_paloalto/bin/splunk_ta_paloalto/aob_py3/Mako-1.1.0.dist-info/INSTALLER new file mode 100644 index 00000000..a1b589e3 --- /dev/null +++ b/Splunk_TA_paloalto/bin/splunk_ta_paloalto/aob_py3/Mako-1.1.0.dist-info/INSTALLER @@ -0,0 +1 @@ +pip diff --git a/Splunk_TA_paloalto/bin/splunk_ta_paloalto/aob_py3/Mako-1.1.0.dist-info/LICENSE b/Splunk_TA_paloalto/bin/splunk_ta_paloalto/aob_py3/Mako-1.1.0.dist-info/LICENSE new file mode 100644 index 00000000..ec32acf0 --- /dev/null +++ b/Splunk_TA_paloalto/bin/splunk_ta_paloalto/aob_py3/Mako-1.1.0.dist-info/LICENSE @@ -0,0 +1,19 @@ +Copyright 2006-2019 the Mako authors and contributors . + +Permission is hereby granted, free of charge, to any person obtaining a copy of +this software and associated documentation files (the "Software"), to deal in +the Software without restriction, including without limitation the rights to +use, copy, modify, merge, publish, distribute, sublicense, and/or sell copies +of the Software, and to permit persons to whom the Software is furnished to do +so, subject to the following conditions: + +The above copyright notice and this permission notice shall be included in all +copies or substantial portions of the Software. + +THE SOFTWARE IS PROVIDED "AS IS", WITHOUT WARRANTY OF ANY KIND, EXPRESS OR +IMPLIED, INCLUDING BUT NOT LIMITED TO THE WARRANTIES OF MERCHANTABILITY, +FITNESS FOR A PARTICULAR PURPOSE AND NONINFRINGEMENT. IN NO EVENT SHALL THE +AUTHORS OR COPYRIGHT HOLDERS BE LIABLE FOR ANY CLAIM, DAMAGES OR OTHER +LIABILITY, WHETHER IN AN ACTION OF CONTRACT, TORT OR OTHERWISE, ARISING FROM, +OUT OF OR IN CONNECTION WITH THE SOFTWARE OR THE USE OR OTHER DEALINGS IN THE +SOFTWARE. \ No newline at end of file diff --git a/Splunk_TA_paloalto/bin/splunk_ta_paloalto/aob_py3/Mako-1.1.0.dist-info/METADATA b/Splunk_TA_paloalto/bin/splunk_ta_paloalto/aob_py3/Mako-1.1.0.dist-info/METADATA new file mode 100644 index 00000000..53281690 --- /dev/null +++ b/Splunk_TA_paloalto/bin/splunk_ta_paloalto/aob_py3/Mako-1.1.0.dist-info/METADATA @@ -0,0 +1,80 @@ +Metadata-Version: 2.1 +Name: Mako +Version: 1.1.0 +Summary: A super-fast templating language that borrows the best ideas from the existing templating languages. +Home-page: https://www.makotemplates.org/ +Author: Mike Bayer +Author-email: mike@zzzcomputing.com +License: MIT +Project-URL: Documentation, https://docs.makotemplates.org +Project-URL: Issue Tracker, https://github.com/sqlalchemy/mako +Keywords: templates +Platform: UNKNOWN +Classifier: Development Status :: 5 - Production/Stable +Classifier: License :: OSI Approved :: MIT License +Classifier: Environment :: Web Environment +Classifier: Intended Audience :: Developers +Classifier: Programming Language :: Python +Classifier: Programming Language :: Python :: 3 +Classifier: Programming Language :: Python :: Implementation :: CPython +Classifier: Programming Language :: Python :: Implementation :: PyPy +Classifier: Topic :: Internet :: WWW/HTTP :: Dynamic Content +Requires-Python: >=2.7, !=3.0.*, !=3.1.*, !=3.2.*, !=3.3.* +License-File: LICENSE +License-File: AUTHORS +Requires-Dist: MarkupSafe (>=0.9.2) + +========================= +Mako Templates for Python +========================= + +Mako is a template library written in Python. It provides a familiar, non-XML +syntax which compiles into Python modules for maximum performance. Mako's +syntax and API borrows from the best ideas of many others, including Django +templates, Cheetah, Myghty, and Genshi. Conceptually, Mako is an embedded +Python (i.e. Python Server Page) language, which refines the familiar ideas +of componentized layout and inheritance to produce one of the most +straightforward and flexible models available, while also maintaining close +ties to Python calling and scoping semantics. + +Nutshell +======== + +:: + + <%inherit file="base.html"/> + <% + rows = [[v for v in range(0,10)] for row in range(0,10)] + %> + + % for row in rows: + ${makerow(row)} + % endfor +
+ + <%def name="makerow(row)"> + + % for name in row: + ${name}\ + % endfor + + + +Philosophy +=========== + +Python is a great scripting language. Don't reinvent the wheel...your templates can handle it ! + +Documentation +============== + +See documentation for Mako at https://docs.makotemplates.org/en/latest/ + +License +======== + +Mako is licensed under an MIT-style license (see LICENSE). +Other incorporated projects may be licensed under different licenses. +All licenses allow for non-commercial and commercial use. + + diff --git a/Splunk_TA_paloalto/bin/splunk_ta_paloalto/aob_py3/Mako-1.1.0.dist-info/RECORD b/Splunk_TA_paloalto/bin/splunk_ta_paloalto/aob_py3/Mako-1.1.0.dist-info/RECORD new file mode 100644 index 00000000..459e0e40 --- /dev/null +++ b/Splunk_TA_paloalto/bin/splunk_ta_paloalto/aob_py3/Mako-1.1.0.dist-info/RECORD @@ -0,0 +1,36 @@ +../../bin/mako-render,sha256=Abuf-Q4pfuZRPuU3bFhVoP58TnIFa4hsg1lDpMVEqIk,213 +Mako-1.1.0.dist-info/AUTHORS,sha256=Io2Vw70mjYS7yFcUuJxhIGiMUQt8FWJuxiiwyUW1WRg,282 +Mako-1.1.0.dist-info/INSTALLER,sha256=zuuue4knoyJ-UwPPXg8fezS7VCrXJQrAP7zeNuwvFQg,4 +Mako-1.1.0.dist-info/LICENSE,sha256=w7EkZzyThyFWdXqTTHW0mNaO7AlRDiXlSdncIBN3ZJk,1097 +Mako-1.1.0.dist-info/METADATA,sha256=rYwZpxohFsfbyCtc2EkFdkX3AwsNYtcbej_gSo1LM5E,2517 +Mako-1.1.0.dist-info/RECORD,, +Mako-1.1.0.dist-info/REQUESTED,sha256=47DEQpj8HBSa-_TImW-5JCeuQeRkm5NMpJWZG3hSuFU,0 +Mako-1.1.0.dist-info/WHEEL,sha256=G16H4A3IeoQmnOrYV4ueZGKSjhipXx8zc8nu9FGlvMA,92 +Mako-1.1.0.dist-info/entry_points.txt,sha256=pRPyt8V0Ss7h84ZQVCX9MkK0Xo5MClQWlQ67pJ4bGdU,586 +Mako-1.1.0.dist-info/top_level.txt,sha256=LItdH8cDPetpUu8rUyBG3DObS6h9Gcpr9j_WLj2S-R0,5 +mako/__init__.py,sha256=_aGTYpZhxW3tTjfMiZRDMe7a5t83KyFC8SBrakW187M,242 +mako/_ast_util.py,sha256=kCfMGlNJz3ooAOh70SmPjgcEeodfyTHOIYGKQTJq0dQ,20414 +mako/ast.py,sha256=U3eOguvzWZJmEHqosCs9E7OdCzozF96GHr2kD4ryjdw,6789 +mako/cache.py,sha256=N1RtINCiEAYPWA45PEnePMqU8ZMiFsWdcbd703sbm9I,7736 +mako/cmd.py,sha256=KTeCI4vqL2pV2jlRjV2lgZrOlmbDqj_xPUzzRR8pMJ4,2471 +mako/codegen.py,sha256=P0lAlTRKI7MRVoGor4T2Mc3mETldJvZw03pDNQ5Ms_c,47892 +mako/compat.py,sha256=Kgaru5DR4TU51iOh797RmdGVjuNOxu2-G0ZnuPDCkQI,3848 +mako/exceptions.py,sha256=aNqpkbKDVP_C9k1j7OgM7UrtFZhriDR82uRfj_7iyjk,13158 +mako/ext/__init__.py,sha256=47DEQpj8HBSa-_TImW-5JCeuQeRkm5NMpJWZG3hSuFU,0 +mako/ext/autohandler.py,sha256=9U1iazp-tQrODVOUDqgsw5-ctVjRKy7vDVieOuLjO54,1885 +mako/ext/babelplugin.py,sha256=CGu4LFEPFg83AATTgqOBOC-Ce3U_597ERb6MQYwkLs8,2138 +mako/ext/beaker_cache.py,sha256=anK7g8cjmDLHTJ5o8aDS3LrzudeBvrEMe0eUx2p1nSs,2599 +mako/ext/extract.py,sha256=PSgnjKSVRBnSm0mQj7hReqICpwb0oR7Gn_S9Xl3kaN4,4616 +mako/ext/linguaplugin.py,sha256=uct4HiA9Mc-vwRTxM3mmrtN3pEVsERft01_xp_Go8as,1954 +mako/ext/preprocessors.py,sha256=CpTghGtGi0hTpg9mgbyJQ0LzL23OKUp3nPe2Jz-BUwM,576 +mako/ext/pygmentplugin.py,sha256=JOGLEGKZiBRZ1TQrjmqpSJEprr7PHYLpTAA_TBDCAsM,4951 +mako/ext/turbogears.py,sha256=SnaqDOA03mwExUmmtSsBKTKpTdZqbxCjsONUWTgj4ds,2165 +mako/filters.py,sha256=gsNMSq2KSZfAiCaqVeojrUevFxCrI5uZOKnyLkRGMcU,6063 +mako/lexer.py,sha256=lPjsDAuam4Sc8VmIHumMVHV8azmFpIRISyWOiunblVs,16926 +mako/lookup.py,sha256=Yl4W3OMAh875v7YMQnLmHHlf-TKHm99-gBQVtuIb3Pc,12718 +mako/parsetree.py,sha256=HvZxqgYEtY3BOIagiSIYyysEPmNRRmAUzq3JeqwgtQ4,19411 +mako/pygen.py,sha256=edYGqAn2XcBPkwgfoALRziLEcIG231szwAGwmSu-9Q8,10073 +mako/pyparser.py,sha256=moglp-107EIdIA6bMVt6oHvZmPbERJV8ieIyhUSZ6vM,7789 +mako/runtime.py,sha256=y38I58jUR5kCB1DxOzXqvneuAnkxc8xifh21vli73EY,28040 +mako/template.py,sha256=wfRlvGu3IaRZ2WfZjy6Zf6c2qksvAVwDpTggKTVIpic,26530 +mako/util.py,sha256=wIeZpUByHPBnIAXgMXvEpyTY7uJAGJ5dZmXOnpKVRuY,11015 diff --git a/Splunk_TA_paloalto/bin/splunk_ta_paloalto/aob_py3/Mako-1.1.0.dist-info/REQUESTED b/Splunk_TA_paloalto/bin/splunk_ta_paloalto/aob_py3/Mako-1.1.0.dist-info/REQUESTED new file mode 100644 index 00000000..e69de29b diff --git a/Splunk_TA_paloalto/bin/splunk_ta_paloalto/aob_py3/Mako-1.1.0.dist-info/WHEEL b/Splunk_TA_paloalto/bin/splunk_ta_paloalto/aob_py3/Mako-1.1.0.dist-info/WHEEL new file mode 100644 index 00000000..becc9a66 --- /dev/null +++ b/Splunk_TA_paloalto/bin/splunk_ta_paloalto/aob_py3/Mako-1.1.0.dist-info/WHEEL @@ -0,0 +1,5 @@ +Wheel-Version: 1.0 +Generator: bdist_wheel (0.37.1) +Root-Is-Purelib: true +Tag: py3-none-any + diff --git a/Splunk_TA_paloalto/bin/splunk_ta_paloalto/aob_py3/Mako-1.1.0.dist-info/entry_points.txt b/Splunk_TA_paloalto/bin/splunk_ta_paloalto/aob_py3/Mako-1.1.0.dist-info/entry_points.txt new file mode 100644 index 00000000..3b150064 --- /dev/null +++ b/Splunk_TA_paloalto/bin/splunk_ta_paloalto/aob_py3/Mako-1.1.0.dist-info/entry_points.txt @@ -0,0 +1,20 @@ + + [python.templating.engines] + mako = mako.ext.turbogears:TGPlugin + + [pygments.lexers] + mako = mako.ext.pygmentplugin:MakoLexer + html+mako = mako.ext.pygmentplugin:MakoHtmlLexer + xml+mako = mako.ext.pygmentplugin:MakoXmlLexer + js+mako = mako.ext.pygmentplugin:MakoJavascriptLexer + css+mako = mako.ext.pygmentplugin:MakoCssLexer + + [babel.extractors] + mako = mako.ext.babelplugin:extract + + [lingua.extractors] + mako = mako.ext.linguaplugin:LinguaMakoExtractor + + [console_scripts] + mako-render = mako.cmd:cmdline + \ No newline at end of file diff --git a/Splunk_TA_paloalto/bin/splunk_ta_paloalto/aob_py3/Mako-1.1.0.dist-info/top_level.txt b/Splunk_TA_paloalto/bin/splunk_ta_paloalto/aob_py3/Mako-1.1.0.dist-info/top_level.txt new file mode 100644 index 00000000..2951cdd4 --- /dev/null +++ b/Splunk_TA_paloalto/bin/splunk_ta_paloalto/aob_py3/Mako-1.1.0.dist-info/top_level.txt @@ -0,0 +1 @@ +mako diff --git a/Splunk_TA_paloalto/bin/splunk_ta_paloalto/aob_py3/Mako-1.2.4.dist-info/INSTALLER b/Splunk_TA_paloalto/bin/splunk_ta_paloalto/aob_py3/Mako-1.2.4.dist-info/INSTALLER new file mode 100644 index 00000000..a1b589e3 --- /dev/null +++ b/Splunk_TA_paloalto/bin/splunk_ta_paloalto/aob_py3/Mako-1.2.4.dist-info/INSTALLER @@ -0,0 +1 @@ +pip diff --git a/Splunk_TA_paloalto/bin/splunk_ta_paloalto/aob_py3/Mako-1.2.4.dist-info/LICENSE b/Splunk_TA_paloalto/bin/splunk_ta_paloalto/aob_py3/Mako-1.2.4.dist-info/LICENSE new file mode 100644 index 00000000..be84a38e --- /dev/null +++ b/Splunk_TA_paloalto/bin/splunk_ta_paloalto/aob_py3/Mako-1.2.4.dist-info/LICENSE @@ -0,0 +1,19 @@ +Copyright 2006-2022 the Mako authors and contributors . + +Permission is hereby granted, free of charge, to any person obtaining a copy of +this software and associated documentation files (the "Software"), to deal in +the Software without restriction, including without limitation the rights to +use, copy, modify, merge, publish, distribute, sublicense, and/or sell copies +of the Software, and to permit persons to whom the Software is furnished to do +so, subject to the following conditions: + +The above copyright notice and this permission notice shall be included in all +copies or substantial portions of the Software. + +THE SOFTWARE IS PROVIDED "AS IS", WITHOUT WARRANTY OF ANY KIND, EXPRESS OR +IMPLIED, INCLUDING BUT NOT LIMITED TO THE WARRANTIES OF MERCHANTABILITY, +FITNESS FOR A PARTICULAR PURPOSE AND NONINFRINGEMENT. IN NO EVENT SHALL THE +AUTHORS OR COPYRIGHT HOLDERS BE LIABLE FOR ANY CLAIM, DAMAGES OR OTHER +LIABILITY, WHETHER IN AN ACTION OF CONTRACT, TORT OR OTHERWISE, ARISING FROM, +OUT OF OR IN CONNECTION WITH THE SOFTWARE OR THE USE OR OTHER DEALINGS IN THE +SOFTWARE. diff --git a/Splunk_TA_paloalto/bin/splunk_ta_paloalto/aob_py3/Mako-1.2.4.dist-info/METADATA b/Splunk_TA_paloalto/bin/splunk_ta_paloalto/aob_py3/Mako-1.2.4.dist-info/METADATA new file mode 100644 index 00000000..33a75808 --- /dev/null +++ b/Splunk_TA_paloalto/bin/splunk_ta_paloalto/aob_py3/Mako-1.2.4.dist-info/METADATA @@ -0,0 +1,87 @@ +Metadata-Version: 2.1 +Name: Mako +Version: 1.2.4 +Summary: A super-fast templating language that borrows the best ideas from the existing templating languages. +Home-page: https://www.makotemplates.org/ +Author: Mike Bayer +Author-email: mike@zzzcomputing.com +License: MIT +Project-URL: Documentation, https://docs.makotemplates.org +Project-URL: Issue Tracker, https://github.com/sqlalchemy/mako +Classifier: Development Status :: 5 - Production/Stable +Classifier: License :: OSI Approved :: MIT License +Classifier: Environment :: Web Environment +Classifier: Intended Audience :: Developers +Classifier: Programming Language :: Python +Classifier: Programming Language :: Python :: 3 +Classifier: Programming Language :: Python :: 3.7 +Classifier: Programming Language :: Python :: 3.8 +Classifier: Programming Language :: Python :: 3.9 +Classifier: Programming Language :: Python :: 3.10 +Classifier: Programming Language :: Python :: Implementation :: CPython +Classifier: Programming Language :: Python :: Implementation :: PyPy +Classifier: Topic :: Internet :: WWW/HTTP :: Dynamic Content +Requires-Python: >=3.7 +Description-Content-Type: text/x-rst +License-File: LICENSE +Requires-Dist: MarkupSafe (>=0.9.2) +Requires-Dist: importlib-metadata ; python_version < "3.8" +Provides-Extra: babel +Requires-Dist: Babel ; extra == 'babel' +Provides-Extra: lingua +Requires-Dist: lingua ; extra == 'lingua' +Provides-Extra: testing +Requires-Dist: pytest ; extra == 'testing' + +========================= +Mako Templates for Python +========================= + +Mako is a template library written in Python. It provides a familiar, non-XML +syntax which compiles into Python modules for maximum performance. Mako's +syntax and API borrows from the best ideas of many others, including Django +templates, Cheetah, Myghty, and Genshi. Conceptually, Mako is an embedded +Python (i.e. Python Server Page) language, which refines the familiar ideas +of componentized layout and inheritance to produce one of the most +straightforward and flexible models available, while also maintaining close +ties to Python calling and scoping semantics. + +Nutshell +======== + +:: + + <%inherit file="base.html"/> + <% + rows = [[v for v in range(0,10)] for row in range(0,10)] + %> + + % for row in rows: + ${makerow(row)} + % endfor +
+ + <%def name="makerow(row)"> + + % for name in row: + ${name}\ + % endfor + + + +Philosophy +=========== + +Python is a great scripting language. Don't reinvent the wheel...your templates can handle it ! + +Documentation +============== + +See documentation for Mako at https://docs.makotemplates.org/en/latest/ + +License +======== + +Mako is licensed under an MIT-style license (see LICENSE). +Other incorporated projects may be licensed under different licenses. +All licenses allow for non-commercial and commercial use. diff --git a/Splunk_TA_paloalto/bin/splunk_ta_paloalto/aob_py3/Mako-1.2.4.dist-info/RECORD b/Splunk_TA_paloalto/bin/splunk_ta_paloalto/aob_py3/Mako-1.2.4.dist-info/RECORD new file mode 100644 index 00000000..89c5b592 --- /dev/null +++ b/Splunk_TA_paloalto/bin/splunk_ta_paloalto/aob_py3/Mako-1.2.4.dist-info/RECORD @@ -0,0 +1,42 @@ +../../bin/mako-render,sha256=Abuf-Q4pfuZRPuU3bFhVoP58TnIFa4hsg1lDpMVEqIk,213 +Mako-1.2.4.dist-info/INSTALLER,sha256=zuuue4knoyJ-UwPPXg8fezS7VCrXJQrAP7zeNuwvFQg,4 +Mako-1.2.4.dist-info/LICENSE,sha256=dg8is-nqSlDrmSAb2N0RiGnygQjPtkzM5tGzBc-a6fo,1098 +Mako-1.2.4.dist-info/METADATA,sha256=MlPkZcQ5bASEMtzkRaH8aRSQE6gmLH3KTnASUawz6eA,2909 +Mako-1.2.4.dist-info/RECORD,, +Mako-1.2.4.dist-info/REQUESTED,sha256=47DEQpj8HBSa-_TImW-5JCeuQeRkm5NMpJWZG3hSuFU,0 +Mako-1.2.4.dist-info/WHEEL,sha256=2wepM1nk4DS4eFpYrW1TTqPcoGNfHhhO_i5m4cOimbo,92 +Mako-1.2.4.dist-info/entry_points.txt,sha256=LsKkUsOsJQYbJ2M72hZCm968wi5K8Ywb5uFxCuN8Obk,512 +Mako-1.2.4.dist-info/top_level.txt,sha256=LItdH8cDPetpUu8rUyBG3DObS6h9Gcpr9j_WLj2S-R0,5 +mako/__init__.py,sha256=R1cQoVGhYA-fl43kNSPKm6kzdJOs28e8sq8WYMHctMQ,242 +mako/_ast_util.py,sha256=BcwJLuE4E-aiFXi_fanO378Cn3Ou03bJxc6Incjse4Y,20247 +mako/ast.py,sha256=h07xBpz2l19RSwpejrhkhgB4r5efpwGmsYOy_L8xvUc,6642 +mako/cache.py,sha256=jkspun9tLgu0IVKSmo_fkL_DAbSTl2P5a5zkMBkjZvk,7680 +mako/cmd.py,sha256=vQg9ip89KMsuZEGamCRAPg7UyDNlpMmnG3XHDNLHS5o,2814 +mako/codegen.py,sha256=h1z8DGLkB92nbUz2OZGVmUKqPr9kVNbnNL8KnLizYAk,47309 +mako/compat.py,sha256=Sa3Rzrjl44xo25nXUHbhfIrEoMgceq5-Ohl0FO6cCHk,1913 +mako/exceptions.py,sha256=xQZKYdb-4d8rcrNFsFzjGSEuNG4upFqGNPErtSCDqfI,12530 +mako/ext/__init__.py,sha256=47DEQpj8HBSa-_TImW-5JCeuQeRkm5NMpJWZG3hSuFU,0 +mako/ext/autohandler.py,sha256=-hNv4VHbQplLGDt5e4mFsBC-QpfWMjKokOe0axDP308,1885 +mako/ext/babelplugin.py,sha256=s6ZIAh1hUhsJIiF3j4soVHrFN_1cRJ_e3sEbz7ein7k,2091 +mako/ext/beaker_cache.py,sha256=D6gh_ke7QOKiSJtq9v67RvmqCRMDJx-IwTcd-NDjKvk,2578 +mako/ext/extract.py,sha256=EhXglj2eW5u80T3xWWB7jMgL8oNDfAQaD5E5IRiL9N0,4659 +mako/ext/linguaplugin.py,sha256=iLip2gZ0ya5pooHrxwZrP8VFQfJidXmgPZ5h1j30Kow,1935 +mako/ext/preprocessors.py,sha256=pEUbmfSO2zb4DuCt_-_oYnWypWiXs4MnJHxjTMiks5A,576 +mako/ext/pygmentplugin.py,sha256=GuOd93TjetzpTfW5oUEtuPS7jKDHgJIH3Faiaq76S0c,4753 +mako/ext/turbogears.py,sha256=mxFDF59NFK6cm__3qwGjZ1VAW0qdjJWNj23l6dcwqEg,2141 +mako/filters.py,sha256=rlHJ2L5RFr5Gf-MyOJKZI7TSJpM5oBXH58niJWCp2-4,4658 +mako/lexer.py,sha256=GOHNLeSlTIEa_yV8W5Qr27SjaPlJcO0Kij7Z2rpUkCA,15982 +mako/lookup.py,sha256=_2VPSA2CgCiT0Vd9GnSIjyY5wlpXiB2C5luXJP7gym8,12429 +mako/parsetree.py,sha256=pXbZP0orsT3iBIgWa9yD1TEfvytsCaXu2Ttws8RTMGM,19007 +mako/pygen.py,sha256=K-l_hsvXfWdMTunfHyVxvA5EG4Uzr4Qaw6IUc3hw8zI,10416 +mako/pyparser.py,sha256=diSXgo-ZwdZxbRsNZ1DmARQKVnlOFc6Qgx9Dc3wZB_U,7032 +mako/runtime.py,sha256=MwO5T1rGy0yLeJiFh2hh5cO_kfd5_9fJq_vfBzLFe_0,27806 +mako/template.py,sha256=gEhMPjKZ1Q_sYWWg6PLnRX-KBeTF0kBnyRZimlmgQks,23858 +mako/testing/__init__.py,sha256=47DEQpj8HBSa-_TImW-5JCeuQeRkm5NMpJWZG3hSuFU,0 +mako/testing/_config.py,sha256=k-qpnsnbXUoN-ykMN5BRpg84i1x0p6UsAddKQnrIytU,3566 +mako/testing/assertions.py,sha256=XnYDPSnDFiEX9eO95OZ5LndZrUpJ6_xGofe6qDzJxqU,5162 +mako/testing/config.py,sha256=wmYVZfzGvOK3mJUZpzmgO8-iIgvaCH41Woi4yDpxq6E,323 +mako/testing/exclusions.py,sha256=_t6ADKdatk3f18tOfHV_ZY6u_ZwQsKphZ2MXJVSAOcI,1553 +mako/testing/fixtures.py,sha256=nEp7wTusf7E0n3Q-BHJW2s_t1vx0KB9poadQ1BmIJzE,3044 +mako/testing/helpers.py,sha256=kTaIg8OL1uvcuLptbRA_aJtGndIDDaxAzacYbv_Km1Q,1521 +mako/util.py,sha256=XmYQmq6WfMAt-BPM7zhT9lybEqHVIWCM9wF1ukzqpew,10638 diff --git a/Splunk_TA_paloalto/bin/splunk_ta_paloalto/aob_py3/Mako-1.2.4.dist-info/REQUESTED b/Splunk_TA_paloalto/bin/splunk_ta_paloalto/aob_py3/Mako-1.2.4.dist-info/REQUESTED new file mode 100644 index 00000000..e69de29b diff --git a/Splunk_TA_paloalto/bin/splunk_ta_paloalto/aob_py3/Mako-1.2.4.dist-info/WHEEL b/Splunk_TA_paloalto/bin/splunk_ta_paloalto/aob_py3/Mako-1.2.4.dist-info/WHEEL new file mode 100644 index 00000000..57e3d840 --- /dev/null +++ b/Splunk_TA_paloalto/bin/splunk_ta_paloalto/aob_py3/Mako-1.2.4.dist-info/WHEEL @@ -0,0 +1,5 @@ +Wheel-Version: 1.0 +Generator: bdist_wheel (0.38.4) +Root-Is-Purelib: true +Tag: py3-none-any + diff --git a/Splunk_TA_paloalto/bin/splunk_ta_paloalto/aob_py3/Mako-1.2.4.dist-info/entry_points.txt b/Splunk_TA_paloalto/bin/splunk_ta_paloalto/aob_py3/Mako-1.2.4.dist-info/entry_points.txt new file mode 100644 index 00000000..30f31b2b --- /dev/null +++ b/Splunk_TA_paloalto/bin/splunk_ta_paloalto/aob_py3/Mako-1.2.4.dist-info/entry_points.txt @@ -0,0 +1,18 @@ +[babel.extractors] +mako = mako.ext.babelplugin:extract [babel] + +[console_scripts] +mako-render = mako.cmd:cmdline + +[lingua.extractors] +mako = mako.ext.linguaplugin:LinguaMakoExtractor [lingua] + +[pygments.lexers] +css+mako = mako.ext.pygmentplugin:MakoCssLexer +html+mako = mako.ext.pygmentplugin:MakoHtmlLexer +js+mako = mako.ext.pygmentplugin:MakoJavascriptLexer +mako = mako.ext.pygmentplugin:MakoLexer +xml+mako = mako.ext.pygmentplugin:MakoXmlLexer + +[python.templating.engines] +mako = mako.ext.turbogears:TGPlugin diff --git a/Splunk_TA_paloalto/bin/splunk_ta_paloalto/aob_py3/Mako-1.2.4.dist-info/top_level.txt b/Splunk_TA_paloalto/bin/splunk_ta_paloalto/aob_py3/Mako-1.2.4.dist-info/top_level.txt new file mode 100644 index 00000000..2951cdd4 --- /dev/null +++ b/Splunk_TA_paloalto/bin/splunk_ta_paloalto/aob_py3/Mako-1.2.4.dist-info/top_level.txt @@ -0,0 +1 @@ +mako diff --git a/Splunk_TA_paloalto/bin/splunk_ta_paloalto/aob_py3/MarkupSafe-2.0.1.dist-info/INSTALLER b/Splunk_TA_paloalto/bin/splunk_ta_paloalto/aob_py3/MarkupSafe-2.0.1.dist-info/INSTALLER new file mode 100644 index 00000000..a1b589e3 --- /dev/null +++ b/Splunk_TA_paloalto/bin/splunk_ta_paloalto/aob_py3/MarkupSafe-2.0.1.dist-info/INSTALLER @@ -0,0 +1 @@ +pip diff --git a/Splunk_TA_paloalto/bin/splunk_ta_paloalto/aob_py3/MarkupSafe-2.0.1.dist-info/LICENSE.rst b/Splunk_TA_paloalto/bin/splunk_ta_paloalto/aob_py3/MarkupSafe-2.0.1.dist-info/LICENSE.rst new file mode 100644 index 00000000..9d227a0c --- /dev/null +++ b/Splunk_TA_paloalto/bin/splunk_ta_paloalto/aob_py3/MarkupSafe-2.0.1.dist-info/LICENSE.rst @@ -0,0 +1,28 @@ +Copyright 2010 Pallets + +Redistribution and use in source and binary forms, with or without +modification, are permitted provided that the following conditions are +met: + +1. Redistributions of source code must retain the above copyright + notice, this list of conditions and the following disclaimer. + +2. Redistributions in binary form must reproduce the above copyright + notice, this list of conditions and the following disclaimer in the + documentation and/or other materials provided with the distribution. + +3. Neither the name of the copyright holder nor the names of its + contributors may be used to endorse or promote products derived from + this software without specific prior written permission. + +THIS SOFTWARE IS PROVIDED BY THE COPYRIGHT HOLDERS AND CONTRIBUTORS +"AS IS" AND ANY EXPRESS OR IMPLIED WARRANTIES, INCLUDING, BUT NOT +LIMITED TO, THE IMPLIED WARRANTIES OF MERCHANTABILITY AND FITNESS FOR A +PARTICULAR PURPOSE ARE DISCLAIMED. IN NO EVENT SHALL THE COPYRIGHT +HOLDER OR CONTRIBUTORS BE LIABLE FOR ANY DIRECT, INDIRECT, INCIDENTAL, +SPECIAL, EXEMPLARY, OR CONSEQUENTIAL DAMAGES (INCLUDING, BUT NOT LIMITED +TO, PROCUREMENT OF SUBSTITUTE GOODS OR SERVICES; LOSS OF USE, DATA, OR +PROFITS; OR BUSINESS INTERRUPTION) HOWEVER CAUSED AND ON ANY THEORY OF +LIABILITY, WHETHER IN CONTRACT, STRICT LIABILITY, OR TORT (INCLUDING +NEGLIGENCE OR OTHERWISE) ARISING IN ANY WAY OUT OF THE USE OF THIS +SOFTWARE, EVEN IF ADVISED OF THE POSSIBILITY OF SUCH DAMAGE. diff --git a/Splunk_TA_paloalto/bin/splunk_ta_paloalto/aob_py3/MarkupSafe-2.0.1.dist-info/METADATA b/Splunk_TA_paloalto/bin/splunk_ta_paloalto/aob_py3/MarkupSafe-2.0.1.dist-info/METADATA new file mode 100644 index 00000000..e87ebb99 --- /dev/null +++ b/Splunk_TA_paloalto/bin/splunk_ta_paloalto/aob_py3/MarkupSafe-2.0.1.dist-info/METADATA @@ -0,0 +1,101 @@ +Metadata-Version: 2.1 +Name: MarkupSafe +Version: 2.0.1 +Summary: Safely add untrusted strings to HTML/XML markup. +Home-page: https://palletsprojects.com/p/markupsafe/ +Author: Armin Ronacher +Author-email: armin.ronacher@active-4.com +Maintainer: Pallets +Maintainer-email: contact@palletsprojects.com +License: BSD-3-Clause +Project-URL: Donate, https://palletsprojects.com/donate +Project-URL: Documentation, https://markupsafe.palletsprojects.com/ +Project-URL: Changes, https://markupsafe.palletsprojects.com/changes/ +Project-URL: Source Code, https://github.com/pallets/markupsafe/ +Project-URL: Issue Tracker, https://github.com/pallets/markupsafe/issues/ +Project-URL: Twitter, https://twitter.com/PalletsTeam +Project-URL: Chat, https://discord.gg/pallets +Platform: UNKNOWN +Classifier: Development Status :: 5 - Production/Stable +Classifier: Environment :: Web Environment +Classifier: Intended Audience :: Developers +Classifier: License :: OSI Approved :: BSD License +Classifier: Operating System :: OS Independent +Classifier: Programming Language :: Python +Classifier: Topic :: Internet :: WWW/HTTP :: Dynamic Content +Classifier: Topic :: Text Processing :: Markup :: HTML +Requires-Python: >=3.6 +Description-Content-Type: text/x-rst +License-File: LICENSE.rst + +MarkupSafe +========== + +MarkupSafe implements a text object that escapes characters so it is +safe to use in HTML and XML. Characters that have special meanings are +replaced so that they display as the actual characters. This mitigates +injection attacks, meaning untrusted user input can safely be displayed +on a page. + + +Installing +---------- + +Install and update using `pip`_: + +.. code-block:: text + + pip install -U MarkupSafe + +.. _pip: https://pip.pypa.io/en/stable/quickstart/ + + +Examples +-------- + +.. code-block:: pycon + + >>> from markupsafe import Markup, escape + + >>> # escape replaces special characters and wraps in Markup + >>> escape("") + Markup('<script>alert(document.cookie);</script>') + + >>> # wrap in Markup to mark text "safe" and prevent escaping + >>> Markup("Hello") + Markup('hello') + + >>> escape(Markup("Hello")) + Markup('hello') + + >>> # Markup is a str subclass + >>> # methods and operators escape their arguments + >>> template = Markup("Hello {name}") + >>> template.format(name='"World"') + Markup('Hello "World"') + + +Donate +------ + +The Pallets organization develops and supports MarkupSafe and other +popular packages. In order to grow the community of contributors and +users, and allow the maintainers to devote more time to the projects, +`please donate today`_. + +.. _please donate today: https://palletsprojects.com/donate + + +Links +----- + +- Documentation: https://markupsafe.palletsprojects.com/ +- Changes: https://markupsafe.palletsprojects.com/changes/ +- PyPI Releases: https://pypi.org/project/MarkupSafe/ +- Source Code: https://github.com/pallets/markupsafe/ +- Issue Tracker: https://github.com/pallets/markupsafe/issues/ +- Website: https://palletsprojects.com/p/markupsafe/ +- Twitter: https://twitter.com/PalletsTeam +- Chat: https://discord.gg/pallets + + diff --git a/Splunk_TA_paloalto/bin/splunk_ta_paloalto/aob_py3/MarkupSafe-2.0.1.dist-info/RECORD b/Splunk_TA_paloalto/bin/splunk_ta_paloalto/aob_py3/MarkupSafe-2.0.1.dist-info/RECORD new file mode 100644 index 00000000..ab618d33 --- /dev/null +++ b/Splunk_TA_paloalto/bin/splunk_ta_paloalto/aob_py3/MarkupSafe-2.0.1.dist-info/RECORD @@ -0,0 +1,13 @@ +MarkupSafe-2.0.1.dist-info/INSTALLER,sha256=zuuue4knoyJ-UwPPXg8fezS7VCrXJQrAP7zeNuwvFQg,4 +MarkupSafe-2.0.1.dist-info/LICENSE.rst,sha256=SJqOEQhQntmKN7uYPhHg9-HTHwvY-Zp5yESOf_N9B-o,1475 +MarkupSafe-2.0.1.dist-info/METADATA,sha256=lknelt-VPHWai5EJcvZpATGKVbXkg74h7CQuPwDS71U,3237 +MarkupSafe-2.0.1.dist-info/RECORD,, +MarkupSafe-2.0.1.dist-info/REQUESTED,sha256=47DEQpj8HBSa-_TImW-5JCeuQeRkm5NMpJWZG3hSuFU,0 +MarkupSafe-2.0.1.dist-info/WHEEL,sha256=T7Cp5xu87yB0VfKahSR3N0JT_FVycX4pq6-fNwtW39g,221 +MarkupSafe-2.0.1.dist-info/top_level.txt,sha256=qy0Plje5IJuvsCBjejJyhDCjEAdcDLK_2agVcex8Z6U,11 +markupsafe/__init__.py,sha256=9Tez4UIlI7J6_sQcUFK1dKniT_b_8YefpGIyYJ3Sr2Q,8923 +markupsafe/_native.py,sha256=GTKEV-bWgZuSjklhMHOYRHU9k0DMewTf5mVEZfkbuns,1986 +markupsafe/_speedups.c,sha256=CDDtwaV21D2nYtypnMQzxvvpZpcTvIs8OZ6KDa1g4t0,7400 +markupsafe/_speedups.cpython-37m-x86_64-linux-gnu.so,sha256=-pXHCix2zJNQJAxiYofThwxd31HBcNtF5a-bsrUgcuc,53568 +markupsafe/_speedups.pyi,sha256=vfMCsOgbAXRNLUXkyuyonG8uEWKYU4PDqNuMaDELAYw,229 +markupsafe/py.typed,sha256=47DEQpj8HBSa-_TImW-5JCeuQeRkm5NMpJWZG3hSuFU,0 diff --git a/Splunk_TA_paloalto/bin/splunk_ta_paloalto/aob_py3/MarkupSafe-2.0.1.dist-info/REQUESTED b/Splunk_TA_paloalto/bin/splunk_ta_paloalto/aob_py3/MarkupSafe-2.0.1.dist-info/REQUESTED new file mode 100644 index 00000000..e69de29b diff --git a/Splunk_TA_paloalto/bin/splunk_ta_paloalto/aob_py3/MarkupSafe-2.0.1.dist-info/WHEEL b/Splunk_TA_paloalto/bin/splunk_ta_paloalto/aob_py3/MarkupSafe-2.0.1.dist-info/WHEEL new file mode 100644 index 00000000..1b8892ab --- /dev/null +++ b/Splunk_TA_paloalto/bin/splunk_ta_paloalto/aob_py3/MarkupSafe-2.0.1.dist-info/WHEEL @@ -0,0 +1,8 @@ +Wheel-Version: 1.0 +Generator: bdist_wheel (0.36.2) +Root-Is-Purelib: false +Tag: cp37-cp37m-manylinux_2_5_x86_64 +Tag: cp37-cp37m-manylinux1_x86_64 +Tag: cp37-cp37m-manylinux_2_12_x86_64 +Tag: cp37-cp37m-manylinux2010_x86_64 + diff --git a/Splunk_TA_paloalto/bin/splunk_ta_paloalto/aob_py3/MarkupSafe-2.0.1.dist-info/top_level.txt b/Splunk_TA_paloalto/bin/splunk_ta_paloalto/aob_py3/MarkupSafe-2.0.1.dist-info/top_level.txt new file mode 100644 index 00000000..75bf7292 --- /dev/null +++ b/Splunk_TA_paloalto/bin/splunk_ta_paloalto/aob_py3/MarkupSafe-2.0.1.dist-info/top_level.txt @@ -0,0 +1 @@ +markupsafe diff --git a/Splunk_TA_paloalto/bin/splunk_ta_paloalto/aob_py3/PySocks-1.7.1.dist-info/INSTALLER b/Splunk_TA_paloalto/bin/splunk_ta_paloalto/aob_py3/PySocks-1.7.1.dist-info/INSTALLER new file mode 100644 index 00000000..a1b589e3 --- /dev/null +++ b/Splunk_TA_paloalto/bin/splunk_ta_paloalto/aob_py3/PySocks-1.7.1.dist-info/INSTALLER @@ -0,0 +1 @@ +pip diff --git a/Splunk_TA_paloalto/bin/splunk_ta_paloalto/aob_py3/PySocks-1.7.1.dist-info/LICENSE b/Splunk_TA_paloalto/bin/splunk_ta_paloalto/aob_py3/PySocks-1.7.1.dist-info/LICENSE new file mode 100644 index 00000000..04b6b1f3 --- /dev/null +++ b/Splunk_TA_paloalto/bin/splunk_ta_paloalto/aob_py3/PySocks-1.7.1.dist-info/LICENSE @@ -0,0 +1,22 @@ +Copyright 2006 Dan-Haim. All rights reserved. + +Redistribution and use in source and binary forms, with or without modification, +are permitted provided that the following conditions are met: +1. Redistributions of source code must retain the above copyright notice, this + list of conditions and the following disclaimer. +2. Redistributions in binary form must reproduce the above copyright notice, + this list of conditions and the following disclaimer in the documentation + and/or other materials provided with the distribution. +3. Neither the name of Dan Haim nor the names of his contributors may be used + to endorse or promote products derived from this software without specific + prior written permission. + +THIS SOFTWARE IS PROVIDED BY DAN HAIM "AS IS" AND ANY EXPRESS OR IMPLIED +WARRANTIES, INCLUDING, BUT NOT LIMITED TO, THE IMPLIED WARRANTIES OF +MERCHANTABILITY AND FITNESS FOR A PARTICULAR PURPOSE ARE DISCLAIMED. IN NO +EVENT SHALL DAN HAIM OR HIS CONTRIBUTORS BE LIABLE FOR ANY DIRECT, INDIRECT, +INCIDENTAL, SPECIAL, EXEMPLARY, OR CONSEQUENTIAL DAMAGES (INCLUDING, BUT NOT +LIMITED TO, PROCUREMENT OF SUBSTITUTE GOODS OR SERVICES; LOSS OF USE, DATA +OR PROFITS; OR BUSINESS INTERRUPTION) HOWEVER CAUSED AND ON ANY THEORY OF +LIABILITY, OR TORT (INCLUDING NEGLIGENCE OR OTHERWISE) ARISING IN ANY WAY OUT +OF THE USE OF THIS SOFTWARE, EVEN IF ADVISED OF THE POSSIBILITY OF SUCH DAMANGE. diff --git a/Splunk_TA_paloalto/bin/splunk_ta_paloalto/aob_py3/PySocks-1.7.1.dist-info/METADATA b/Splunk_TA_paloalto/bin/splunk_ta_paloalto/aob_py3/PySocks-1.7.1.dist-info/METADATA new file mode 100644 index 00000000..ae2ae341 --- /dev/null +++ b/Splunk_TA_paloalto/bin/splunk_ta_paloalto/aob_py3/PySocks-1.7.1.dist-info/METADATA @@ -0,0 +1,321 @@ +Metadata-Version: 2.1 +Name: PySocks +Version: 1.7.1 +Summary: A Python SOCKS client module. See https://github.com/Anorov/PySocks for more information. +Home-page: https://github.com/Anorov/PySocks +Author: Anorov +Author-email: anorov.vorona@gmail.com +License: BSD +Keywords: socks,proxy +Platform: UNKNOWN +Classifier: Programming Language :: Python :: 2 +Classifier: Programming Language :: Python :: 2.7 +Classifier: Programming Language :: Python :: 3 +Classifier: Programming Language :: Python :: 3.4 +Classifier: Programming Language :: Python :: 3.5 +Classifier: Programming Language :: Python :: 3.6 +Requires-Python: >=2.7, !=3.0.*, !=3.1.*, !=3.2.*, !=3.3.* +Description-Content-Type: text/markdown + +PySocks +======= + +PySocks lets you send traffic through SOCKS and HTTP proxy servers. It is a modern fork of [SocksiPy](http://socksipy.sourceforge.net/) with bug fixes and extra features. + +Acts as a drop-in replacement to the socket module. Seamlessly configure SOCKS proxies for any socket object by calling `socket_object.set_proxy()`. + +---------------- + +Features +======== + +* SOCKS proxy client for Python 2.7 and 3.4+ +* TCP supported +* UDP mostly supported (issues may occur in some edge cases) +* HTTP proxy client included but not supported or recommended (you should use urllib2's or requests' own HTTP proxy interface) +* urllib2 handler included. `pip install` / `setup.py install` will automatically install the `sockshandler` module. + +Installation +============ + + pip install PySocks + +Or download the tarball / `git clone` and... + + python setup.py install + +These will install both the `socks` and `sockshandler` modules. + +Alternatively, include just `socks.py` in your project. + +-------------------------------------------- + +*Warning:* PySocks/SocksiPy only supports HTTP proxies that use CONNECT tunneling. Certain HTTP proxies may not work with this library. If you wish to use HTTP (not SOCKS) proxies, it is recommended that you rely on your HTTP client's native proxy support (`proxies` dict for `requests`, or `urllib2.ProxyHandler` for `urllib2`) instead. + +-------------------------------------------- + +Usage +===== + +## socks.socksocket ## + + import socks + + s = socks.socksocket() # Same API as socket.socket in the standard lib + + s.set_proxy(socks.SOCKS5, "localhost") # SOCKS4 and SOCKS5 use port 1080 by default + # Or + s.set_proxy(socks.SOCKS4, "localhost", 4444) + # Or + s.set_proxy(socks.HTTP, "5.5.5.5", 8888) + + # Can be treated identical to a regular socket object + s.connect(("www.somesite.com", 80)) + s.sendall("GET / HTTP/1.1 ...") + print s.recv(4096) + +## Monkeypatching ## + +To monkeypatch the entire standard library with a single default proxy: + + import urllib2 + import socket + import socks + + socks.set_default_proxy(socks.SOCKS5, "localhost") + socket.socket = socks.socksocket + + urllib2.urlopen("http://www.somesite.com/") # All requests will pass through the SOCKS proxy + +Note that monkeypatching may not work for all standard modules or for all third party modules, and generally isn't recommended. Monkeypatching is usually an anti-pattern in Python. + +## urllib2 Handler ## + +Example use case with the `sockshandler` urllib2 handler. Note that you must import both `socks` and `sockshandler`, as the handler is its own module separate from PySocks. The module is included in the PyPI package. + + import urllib2 + import socks + from sockshandler import SocksiPyHandler + + opener = urllib2.build_opener(SocksiPyHandler(socks.SOCKS5, "127.0.0.1", 9050)) + print opener.open("http://www.somesite.com/") # All requests made by the opener will pass through the SOCKS proxy + +-------------------------------------------- + +Original SocksiPy README attached below, amended to reflect API changes. + +-------------------------------------------- + +SocksiPy + +A Python SOCKS module. + +(C) 2006 Dan-Haim. All rights reserved. + +See LICENSE file for details. + + +*WHAT IS A SOCKS PROXY?* + +A SOCKS proxy is a proxy server at the TCP level. In other words, it acts as +a tunnel, relaying all traffic going through it without modifying it. +SOCKS proxies can be used to relay traffic using any network protocol that +uses TCP. + +*WHAT IS SOCKSIPY?* + +This Python module allows you to create TCP connections through a SOCKS +proxy without any special effort. +It also supports relaying UDP packets with a SOCKS5 proxy. + +*PROXY COMPATIBILITY* + +SocksiPy is compatible with three different types of proxies: + +1. SOCKS Version 4 (SOCKS4), including the SOCKS4a extension. +2. SOCKS Version 5 (SOCKS5). +3. HTTP Proxies which support tunneling using the CONNECT method. + +*SYSTEM REQUIREMENTS* + +Being written in Python, SocksiPy can run on any platform that has a Python +interpreter and TCP/IP support. +This module has been tested with Python 2.3 and should work with greater versions +just as well. + + +INSTALLATION +------------- + +Simply copy the file "socks.py" to your Python's `lib/site-packages` directory, +and you're ready to go. [Editor's note: it is better to use `python setup.py install` for PySocks] + + +USAGE +------ + +First load the socks module with the command: + + >>> import socks + >>> + +The socks module provides a class called `socksocket`, which is the base to all of the module's functionality. + +The `socksocket` object has the same initialization parameters as the normal socket +object to ensure maximal compatibility, however it should be noted that `socksocket` will only function with family being `AF_INET` and +type being either `SOCK_STREAM` or `SOCK_DGRAM`. +Generally, it is best to initialize the `socksocket` object with no parameters + + >>> s = socks.socksocket() + >>> + +The `socksocket` object has an interface which is very similiar to socket's (in fact +the `socksocket` class is derived from socket) with a few extra methods. +To select the proxy server you would like to use, use the `set_proxy` method, whose +syntax is: + + set_proxy(proxy_type, addr[, port[, rdns[, username[, password]]]]) + +Explanation of the parameters: + +`proxy_type` - The type of the proxy server. This can be one of three possible +choices: `PROXY_TYPE_SOCKS4`, `PROXY_TYPE_SOCKS5` and `PROXY_TYPE_HTTP` for SOCKS4, +SOCKS5 and HTTP servers respectively. `SOCKS4`, `SOCKS5`, and `HTTP` are all aliases, respectively. + +`addr` - The IP address or DNS name of the proxy server. + +`port` - The port of the proxy server. Defaults to 1080 for socks and 8080 for http. + +`rdns` - This is a boolean flag than modifies the behavior regarding DNS resolving. +If it is set to True, DNS resolving will be preformed remotely, on the server. +If it is set to False, DNS resolving will be preformed locally. Please note that +setting this to True with SOCKS4 servers actually use an extension to the protocol, +called SOCKS4a, which may not be supported on all servers (SOCKS5 and http servers +always support DNS). The default is True. + +`username` - For SOCKS5 servers, this allows simple username / password authentication +with the server. For SOCKS4 servers, this parameter will be sent as the userid. +This parameter is ignored if an HTTP server is being used. If it is not provided, +authentication will not be used (servers may accept unauthenticated requests). + +`password` - This parameter is valid only for SOCKS5 servers and specifies the +respective password for the username provided. + +Example of usage: + + >>> s.set_proxy(socks.SOCKS5, "socks.example.com") # uses default port 1080 + >>> s.set_proxy(socks.SOCKS4, "socks.test.com", 1081) + +After the set_proxy method has been called, simply call the connect method with the +traditional parameters to establish a connection through the proxy: + + >>> s.connect(("www.sourceforge.net", 80)) + >>> + +Connection will take a bit longer to allow negotiation with the proxy server. +Please note that calling connect without calling `set_proxy` earlier will connect +without a proxy (just like a regular socket). + +Errors: Any errors in the connection process will trigger exceptions. The exception +may either be generated by the underlying socket layer or may be custom module +exceptions, whose details follow: + +class `ProxyError` - This is a base exception class. It is not raised directly but +rather all other exception classes raised by this module are derived from it. +This allows an easy way to catch all proxy-related errors. It descends from `IOError`. + +All `ProxyError` exceptions have an attribute `socket_err`, which will contain either a +caught `socket.error` exception, or `None` if there wasn't any. + +class `GeneralProxyError` - When thrown, it indicates a problem which does not fall +into another category. + +* `Sent invalid data` - This error means that unexpected data has been received from +the server. The most common reason is that the server specified as the proxy is +not really a SOCKS4/SOCKS5/HTTP proxy, or maybe the proxy type specified is wrong. + +* `Connection closed unexpectedly` - The proxy server unexpectedly closed the connection. +This may indicate that the proxy server is experiencing network or software problems. + +* `Bad proxy type` - This will be raised if the type of the proxy supplied to the +set_proxy function was not one of `SOCKS4`/`SOCKS5`/`HTTP`. + +* `Bad input` - This will be raised if the `connect()` method is called with bad input +parameters. + +class `SOCKS5AuthError` - This indicates that the connection through a SOCKS5 server +failed due to an authentication problem. + +* `Authentication is required` - This will happen if you use a SOCKS5 server which +requires authentication without providing a username / password at all. + +* `All offered authentication methods were rejected` - This will happen if the proxy +requires a special authentication method which is not supported by this module. + +* `Unknown username or invalid password` - Self descriptive. + +class `SOCKS5Error` - This will be raised for SOCKS5 errors which are not related to +authentication. +The parameter is a tuple containing a code, as given by the server, +and a description of the +error. The possible errors, according to the RFC, are: + +* `0x01` - General SOCKS server failure - If for any reason the proxy server is unable to +fulfill your request (internal server error). +* `0x02` - connection not allowed by ruleset - If the address you're trying to connect to +is blacklisted on the server or requires authentication. +* `0x03` - Network unreachable - The target could not be contacted. A router on the network +had replied with a destination net unreachable error. +* `0x04` - Host unreachable - The target could not be contacted. A router on the network +had replied with a destination host unreachable error. +* `0x05` - Connection refused - The target server has actively refused the connection +(the requested port is closed). +* `0x06` - TTL expired - The TTL value of the SYN packet from the proxy to the target server +has expired. This usually means that there are network problems causing the packet +to be caught in a router-to-router "ping-pong". +* `0x07` - Command not supported - For instance if the server does not support UDP. +* `0x08` - Address type not supported - The client has provided an invalid address type. +When using this module, this error should not occur. + +class `SOCKS4Error` - This will be raised for SOCKS4 errors. The parameter is a tuple +containing a code and a description of the error, as given by the server. The +possible error, according to the specification are: + +* `0x5B` - Request rejected or failed - Will be raised in the event of an failure for any +reason other then the two mentioned next. +* `0x5C` - request rejected because SOCKS server cannot connect to identd on the client - +The Socks server had tried an ident lookup on your computer and has failed. In this +case you should run an identd server and/or configure your firewall to allow incoming +connections to local port 113 from the remote server. +* `0x5D` - request rejected because the client program and identd report different user-ids - +The Socks server had performed an ident lookup on your computer and has received a +different userid than the one you have provided. Change your userid (through the +username parameter of the set_proxy method) to match and try again. + +class `HTTPError` - This will be raised for HTTP errors. The message will contain +the HTTP status code and provided error message. + +After establishing the connection, the object behaves like a standard socket. + +Methods like `makefile()` and `settimeout()` should behave just like regular sockets. +Call the `close()` method to close the connection. + +In addition to the `socksocket` class, an additional function worth mentioning is the +`set_default_proxy` function. The parameters are the same as the `set_proxy` method. +This function will set default proxy settings for newly created `socksocket` objects, +in which the proxy settings haven't been changed via the `set_proxy` method. +This is quite useful if you wish to force 3rd party modules to use a SOCKS proxy, +by overriding the socket object. +For example: + + >>> socks.set_default_proxy(socks.SOCKS5, "socks.example.com") + >>> socket.socket = socks.socksocket + >>> urllib.urlopen("http://www.sourceforge.net/") + + +PROBLEMS +--------- + +Please open a GitHub issue at https://github.com/Anorov/PySocks + + diff --git a/Splunk_TA_paloalto/bin/splunk_ta_paloalto/aob_py3/PySocks-1.7.1.dist-info/RECORD b/Splunk_TA_paloalto/bin/splunk_ta_paloalto/aob_py3/PySocks-1.7.1.dist-info/RECORD new file mode 100644 index 00000000..0c8247e0 --- /dev/null +++ b/Splunk_TA_paloalto/bin/splunk_ta_paloalto/aob_py3/PySocks-1.7.1.dist-info/RECORD @@ -0,0 +1,8 @@ +PySocks-1.7.1.dist-info/INSTALLER,sha256=zuuue4knoyJ-UwPPXg8fezS7VCrXJQrAP7zeNuwvFQg,4 +PySocks-1.7.1.dist-info/LICENSE,sha256=cCfiFOAU63i3rcwc7aWspxOnn8T2oMUsnaWz5wfm_-k,1401 +PySocks-1.7.1.dist-info/METADATA,sha256=zbQMizjPOOP4DhEiEX24XXjNrYuIxF9UGUpN0uFDB6Y,13235 +PySocks-1.7.1.dist-info/RECORD,, +PySocks-1.7.1.dist-info/WHEEL,sha256=t_MpApv386-8PVts2R6wsTifdIn0vbUDTVv61IbqFC8,92 +PySocks-1.7.1.dist-info/top_level.txt,sha256=TKSOIfCFBoK9EY8FBYbYqC3PWd3--G15ph9n8-QHPDk,19 +socks.py,sha256=xOYn27t9IGrbTBzWsUUuPa0YBuplgiUykzkOB5V5iFY,31086 +sockshandler.py,sha256=2SYGj-pwt1kjgLoZAmyeaEXCeZDWRmfVS_QG6kErGtY,3966 diff --git a/Splunk_TA_paloalto/bin/splunk_ta_paloalto/aob_py3/PySocks-1.7.1.dist-info/WHEEL b/Splunk_TA_paloalto/bin/splunk_ta_paloalto/aob_py3/PySocks-1.7.1.dist-info/WHEEL new file mode 100644 index 00000000..129a6735 --- /dev/null +++ b/Splunk_TA_paloalto/bin/splunk_ta_paloalto/aob_py3/PySocks-1.7.1.dist-info/WHEEL @@ -0,0 +1,5 @@ +Wheel-Version: 1.0 +Generator: bdist_wheel (0.33.3) +Root-Is-Purelib: true +Tag: py3-none-any + diff --git a/Splunk_TA_paloalto/bin/splunk_ta_paloalto/aob_py3/PySocks-1.7.1.dist-info/top_level.txt b/Splunk_TA_paloalto/bin/splunk_ta_paloalto/aob_py3/PySocks-1.7.1.dist-info/top_level.txt new file mode 100644 index 00000000..9476163a --- /dev/null +++ b/Splunk_TA_paloalto/bin/splunk_ta_paloalto/aob_py3/PySocks-1.7.1.dist-info/top_level.txt @@ -0,0 +1,2 @@ +socks +sockshandler diff --git a/Splunk_TA_paloalto/bin/splunk_ta_paloalto/aob_py3/_distutils_hack/__init__.py b/Splunk_TA_paloalto/bin/splunk_ta_paloalto/aob_py3/_distutils_hack/__init__.py new file mode 100644 index 00000000..605a6edc --- /dev/null +++ b/Splunk_TA_paloalto/bin/splunk_ta_paloalto/aob_py3/_distutils_hack/__init__.py @@ -0,0 +1,187 @@ +# don't import any costly modules +import sys +import os + + +is_pypy = '__pypy__' in sys.builtin_module_names + + +def warn_distutils_present(): + if 'distutils' not in sys.modules: + return + if is_pypy and sys.version_info < (3, 7): + # PyPy for 3.6 unconditionally imports distutils, so bypass the warning + # https://foss.heptapod.net/pypy/pypy/-/blob/be829135bc0d758997b3566062999ee8b23872b4/lib-python/3/site.py#L250 + return + import warnings + warnings.warn( + "Distutils was imported before Setuptools, but importing Setuptools " + "also replaces the `distutils` module in `sys.modules`. This may lead " + "to undesirable behaviors or errors. To avoid these issues, avoid " + "using distutils directly, ensure that setuptools is installed in the " + "traditional way (e.g. not an editable install), and/or make sure " + "that setuptools is always imported before distutils.") + + +def clear_distutils(): + if 'distutils' not in sys.modules: + return + import warnings + warnings.warn("Setuptools is replacing distutils.") + mods = [ + name for name in sys.modules + if name == "distutils" or name.startswith("distutils.") + ] + for name in mods: + del sys.modules[name] + + +def enabled(): + """ + Allow selection of distutils by environment variable. + """ + which = os.environ.get('SETUPTOOLS_USE_DISTUTILS', 'local') + return which == 'local' + + +def ensure_local_distutils(): + import importlib + clear_distutils() + + # With the DistutilsMetaFinder in place, + # perform an import to cause distutils to be + # loaded from setuptools._distutils. Ref #2906. + with shim(): + importlib.import_module('distutils') + + # check that submodules load as expected + core = importlib.import_module('distutils.core') + assert '_distutils' in core.__file__, core.__file__ + assert 'setuptools._distutils.log' not in sys.modules + + +def do_override(): + """ + Ensure that the local copy of distutils is preferred over stdlib. + + See https://github.com/pypa/setuptools/issues/417#issuecomment-392298401 + for more motivation. + """ + if enabled(): + warn_distutils_present() + ensure_local_distutils() + + +class _TrivialRe: + def __init__(self, *patterns): + self._patterns = patterns + + def match(self, string): + return all(pat in string for pat in self._patterns) + + +class DistutilsMetaFinder: + def find_spec(self, fullname, path, target=None): + if path is not None: + return + + method_name = 'spec_for_{fullname}'.format(**locals()) + method = getattr(self, method_name, lambda: None) + return method() + + def spec_for_distutils(self): + if self.is_cpython(): + return + + import importlib + import importlib.abc + import importlib.util + + try: + mod = importlib.import_module('setuptools._distutils') + except Exception: + # There are a couple of cases where setuptools._distutils + # may not be present: + # - An older Setuptools without a local distutils is + # taking precedence. Ref #2957. + # - Path manipulation during sitecustomize removes + # setuptools from the path but only after the hook + # has been loaded. Ref #2980. + # In either case, fall back to stdlib behavior. + return + + class DistutilsLoader(importlib.abc.Loader): + + def create_module(self, spec): + mod.__name__ = 'distutils' + return mod + + def exec_module(self, module): + pass + + return importlib.util.spec_from_loader( + 'distutils', DistutilsLoader(), origin=mod.__file__ + ) + + @staticmethod + def is_cpython(): + """ + Suppress supplying distutils for CPython (build and tests). + Ref #2965 and #3007. + """ + return os.path.isfile('pybuilddir.txt') + + def spec_for_pip(self): + """ + Ensure stdlib distutils when running under pip. + See pypa/pip#8761 for rationale. + """ + if self.pip_imported_during_build(): + return + clear_distutils() + self.spec_for_distutils = lambda: None + + @classmethod + def pip_imported_during_build(cls): + """ + Detect if pip is being imported in a build script. Ref #2355. + """ + import traceback + return any( + cls.frame_file_is_setup(frame) + for frame, line in traceback.walk_stack(None) + ) + + @staticmethod + def frame_file_is_setup(frame): + """ + Return True if the indicated frame suggests a setup.py file. + """ + # some frames may not have __file__ (#2940) + return frame.f_globals.get('__file__', '').endswith('setup.py') + + +DISTUTILS_FINDER = DistutilsMetaFinder() + + +def add_shim(): + DISTUTILS_FINDER in sys.meta_path or insert_shim() + + +class shim: + def __enter__(self): + insert_shim() + + def __exit__(self, exc, value, tb): + remove_shim() + + +def insert_shim(): + sys.meta_path.insert(0, DISTUTILS_FINDER) + + +def remove_shim(): + try: + sys.meta_path.remove(DISTUTILS_FINDER) + except ValueError: + pass diff --git a/Splunk_TA_paloalto/bin/splunk_ta_paloalto/aob_py3/_distutils_hack/override.py b/Splunk_TA_paloalto/bin/splunk_ta_paloalto/aob_py3/_distutils_hack/override.py new file mode 100644 index 00000000..2cc433a4 --- /dev/null +++ b/Splunk_TA_paloalto/bin/splunk_ta_paloalto/aob_py3/_distutils_hack/override.py @@ -0,0 +1 @@ +__import__('_distutils_hack').do_override() diff --git a/Splunk_TA_paloalto/bin/splunk_ta_paloalto/aob_py3/_pyrsistent_version.py b/Splunk_TA_paloalto/bin/splunk_ta_paloalto/aob_py3/_pyrsistent_version.py new file mode 100644 index 00000000..b6991384 --- /dev/null +++ b/Splunk_TA_paloalto/bin/splunk_ta_paloalto/aob_py3/_pyrsistent_version.py @@ -0,0 +1 @@ +__version__ = '0.19.3' diff --git a/Splunk_TA_paloalto/bin/splunk_ta_paloalto/aob_py3/attr/__init__.py b/Splunk_TA_paloalto/bin/splunk_ta_paloalto/aob_py3/attr/__init__.py new file mode 100644 index 00000000..04243782 --- /dev/null +++ b/Splunk_TA_paloalto/bin/splunk_ta_paloalto/aob_py3/attr/__init__.py @@ -0,0 +1,93 @@ +# SPDX-License-Identifier: MIT + +import sys +import warnings + +from functools import partial + +from . import converters, exceptions, filters, setters, validators +from ._cmp import cmp_using +from ._config import get_run_validators, set_run_validators +from ._funcs import asdict, assoc, astuple, evolve, has, resolve_types +from ._make import ( + NOTHING, + Attribute, + Factory, + attrib, + attrs, + fields, + fields_dict, + make_class, + validate, +) +from ._next_gen import define, field, frozen, mutable +from ._version_info import VersionInfo + + +if sys.version_info < (3, 7): # pragma: no cover + warnings.warn( + "Running attrs on Python 3.6 is deprecated & we intend to drop " + "support soon. If that's a problem for you, please let us know why & " + "we MAY re-evaluate: ", + DeprecationWarning, + ) + +__version__ = "22.2.0" +__version_info__ = VersionInfo._from_version_string(__version__) + +__title__ = "attrs" +__description__ = "Classes Without Boilerplate" +__url__ = "https://www.attrs.org/" +__uri__ = __url__ +__doc__ = __description__ + " <" + __uri__ + ">" + +__author__ = "Hynek Schlawack" +__email__ = "hs@ox.cx" + +__license__ = "MIT" +__copyright__ = "Copyright (c) 2015 Hynek Schlawack" + + +s = attributes = attrs +ib = attr = attrib +dataclass = partial(attrs, auto_attribs=True) # happy Easter ;) + + +class AttrsInstance: + pass + + +__all__ = [ + "Attribute", + "AttrsInstance", + "Factory", + "NOTHING", + "asdict", + "assoc", + "astuple", + "attr", + "attrib", + "attributes", + "attrs", + "cmp_using", + "converters", + "define", + "evolve", + "exceptions", + "field", + "fields", + "fields_dict", + "filters", + "frozen", + "get_run_validators", + "has", + "ib", + "make_class", + "mutable", + "resolve_types", + "s", + "set_run_validators", + "setters", + "validate", + "validators", +] diff --git a/Splunk_TA_paloalto/bin/splunk_ta_paloalto/aob_py3/attr/__init__.pyi b/Splunk_TA_paloalto/bin/splunk_ta_paloalto/aob_py3/attr/__init__.pyi new file mode 100644 index 00000000..42a2ee2c --- /dev/null +++ b/Splunk_TA_paloalto/bin/splunk_ta_paloalto/aob_py3/attr/__init__.pyi @@ -0,0 +1,509 @@ +import enum +import sys + +from typing import ( + Any, + Callable, + Dict, + Generic, + List, + Mapping, + Optional, + Protocol, + Sequence, + Tuple, + Type, + TypeVar, + Union, + overload, +) + +# `import X as X` is required to make these public +from . import converters as converters +from . import exceptions as exceptions +from . import filters as filters +from . import setters as setters +from . import validators as validators +from ._cmp import cmp_using as cmp_using +from ._typing_compat import AttrsInstance_ +from ._version_info import VersionInfo + +if sys.version_info >= (3, 10): + from typing import TypeGuard +else: + from typing_extensions import TypeGuard + +__version__: str +__version_info__: VersionInfo +__title__: str +__description__: str +__url__: str +__uri__: str +__author__: str +__email__: str +__license__: str +__copyright__: str + +_T = TypeVar("_T") +_C = TypeVar("_C", bound=type) + +_EqOrderType = Union[bool, Callable[[Any], Any]] +_ValidatorType = Callable[[Any, "Attribute[_T]", _T], Any] +_ConverterType = Callable[[Any], Any] +_FilterType = Callable[["Attribute[_T]", _T], bool] +_ReprType = Callable[[Any], str] +_ReprArgType = Union[bool, _ReprType] +_OnSetAttrType = Callable[[Any, "Attribute[Any]", Any], Any] +_OnSetAttrArgType = Union[ + _OnSetAttrType, List[_OnSetAttrType], setters._NoOpType +] +_FieldTransformer = Callable[ + [type, List["Attribute[Any]"]], List["Attribute[Any]"] +] +# FIXME: in reality, if multiple validators are passed they must be in a list +# or tuple, but those are invariant and so would prevent subtypes of +# _ValidatorType from working when passed in a list or tuple. +_ValidatorArgType = Union[_ValidatorType[_T], Sequence[_ValidatorType[_T]]] + +# We subclass this here to keep the protocol's qualified name clean. +class AttrsInstance(AttrsInstance_, Protocol): + pass + +# _make -- + +class _Nothing(enum.Enum): + NOTHING = enum.auto() + +NOTHING = _Nothing.NOTHING + +# NOTE: Factory lies about its return type to make this possible: +# `x: List[int] # = Factory(list)` +# Work around mypy issue #4554 in the common case by using an overload. +if sys.version_info >= (3, 8): + from typing import Literal + @overload + def Factory(factory: Callable[[], _T]) -> _T: ... + @overload + def Factory( + factory: Callable[[Any], _T], + takes_self: Literal[True], + ) -> _T: ... + @overload + def Factory( + factory: Callable[[], _T], + takes_self: Literal[False], + ) -> _T: ... + +else: + @overload + def Factory(factory: Callable[[], _T]) -> _T: ... + @overload + def Factory( + factory: Union[Callable[[Any], _T], Callable[[], _T]], + takes_self: bool = ..., + ) -> _T: ... + +# Static type inference support via __dataclass_transform__ implemented as per: +# https://github.com/microsoft/pyright/blob/1.1.135/specs/dataclass_transforms.md +# This annotation must be applied to all overloads of "define" and "attrs" +# +# NOTE: This is a typing construct and does not exist at runtime. Extensions +# wrapping attrs decorators should declare a separate __dataclass_transform__ +# signature in the extension module using the specification linked above to +# provide pyright support. +def __dataclass_transform__( + *, + eq_default: bool = True, + order_default: bool = False, + kw_only_default: bool = False, + field_descriptors: Tuple[Union[type, Callable[..., Any]], ...] = (()), +) -> Callable[[_T], _T]: ... + +class Attribute(Generic[_T]): + name: str + default: Optional[_T] + validator: Optional[_ValidatorType[_T]] + repr: _ReprArgType + cmp: _EqOrderType + eq: _EqOrderType + order: _EqOrderType + hash: Optional[bool] + init: bool + converter: Optional[_ConverterType] + metadata: Dict[Any, Any] + type: Optional[Type[_T]] + kw_only: bool + on_setattr: _OnSetAttrType + alias: Optional[str] + + def evolve(self, **changes: Any) -> "Attribute[Any]": ... + +# NOTE: We had several choices for the annotation to use for type arg: +# 1) Type[_T] +# - Pros: Handles simple cases correctly +# - Cons: Might produce less informative errors in the case of conflicting +# TypeVars e.g. `attr.ib(default='bad', type=int)` +# 2) Callable[..., _T] +# - Pros: Better error messages than #1 for conflicting TypeVars +# - Cons: Terrible error messages for validator checks. +# e.g. attr.ib(type=int, validator=validate_str) +# -> error: Cannot infer function type argument +# 3) type (and do all of the work in the mypy plugin) +# - Pros: Simple here, and we could customize the plugin with our own errors. +# - Cons: Would need to write mypy plugin code to handle all the cases. +# We chose option #1. + +# `attr` lies about its return type to make the following possible: +# attr() -> Any +# attr(8) -> int +# attr(validator=) -> Whatever the callable expects. +# This makes this type of assignments possible: +# x: int = attr(8) +# +# This form catches explicit None or no default but with no other arguments +# returns Any. +@overload +def attrib( + default: None = ..., + validator: None = ..., + repr: _ReprArgType = ..., + cmp: Optional[_EqOrderType] = ..., + hash: Optional[bool] = ..., + init: bool = ..., + metadata: Optional[Mapping[Any, Any]] = ..., + type: None = ..., + converter: None = ..., + factory: None = ..., + kw_only: bool = ..., + eq: Optional[_EqOrderType] = ..., + order: Optional[_EqOrderType] = ..., + on_setattr: Optional[_OnSetAttrArgType] = ..., + alias: Optional[str] = ..., +) -> Any: ... + +# This form catches an explicit None or no default and infers the type from the +# other arguments. +@overload +def attrib( + default: None = ..., + validator: Optional[_ValidatorArgType[_T]] = ..., + repr: _ReprArgType = ..., + cmp: Optional[_EqOrderType] = ..., + hash: Optional[bool] = ..., + init: bool = ..., + metadata: Optional[Mapping[Any, Any]] = ..., + type: Optional[Type[_T]] = ..., + converter: Optional[_ConverterType] = ..., + factory: Optional[Callable[[], _T]] = ..., + kw_only: bool = ..., + eq: Optional[_EqOrderType] = ..., + order: Optional[_EqOrderType] = ..., + on_setattr: Optional[_OnSetAttrArgType] = ..., + alias: Optional[str] = ..., +) -> _T: ... + +# This form catches an explicit default argument. +@overload +def attrib( + default: _T, + validator: Optional[_ValidatorArgType[_T]] = ..., + repr: _ReprArgType = ..., + cmp: Optional[_EqOrderType] = ..., + hash: Optional[bool] = ..., + init: bool = ..., + metadata: Optional[Mapping[Any, Any]] = ..., + type: Optional[Type[_T]] = ..., + converter: Optional[_ConverterType] = ..., + factory: Optional[Callable[[], _T]] = ..., + kw_only: bool = ..., + eq: Optional[_EqOrderType] = ..., + order: Optional[_EqOrderType] = ..., + on_setattr: Optional[_OnSetAttrArgType] = ..., + alias: Optional[str] = ..., +) -> _T: ... + +# This form covers type=non-Type: e.g. forward references (str), Any +@overload +def attrib( + default: Optional[_T] = ..., + validator: Optional[_ValidatorArgType[_T]] = ..., + repr: _ReprArgType = ..., + cmp: Optional[_EqOrderType] = ..., + hash: Optional[bool] = ..., + init: bool = ..., + metadata: Optional[Mapping[Any, Any]] = ..., + type: object = ..., + converter: Optional[_ConverterType] = ..., + factory: Optional[Callable[[], _T]] = ..., + kw_only: bool = ..., + eq: Optional[_EqOrderType] = ..., + order: Optional[_EqOrderType] = ..., + on_setattr: Optional[_OnSetAttrArgType] = ..., + alias: Optional[str] = ..., +) -> Any: ... +@overload +def field( + *, + default: None = ..., + validator: None = ..., + repr: _ReprArgType = ..., + hash: Optional[bool] = ..., + init: bool = ..., + metadata: Optional[Mapping[Any, Any]] = ..., + converter: None = ..., + factory: None = ..., + kw_only: bool = ..., + eq: Optional[bool] = ..., + order: Optional[bool] = ..., + on_setattr: Optional[_OnSetAttrArgType] = ..., + alias: Optional[str] = ..., +) -> Any: ... + +# This form catches an explicit None or no default and infers the type from the +# other arguments. +@overload +def field( + *, + default: None = ..., + validator: Optional[_ValidatorArgType[_T]] = ..., + repr: _ReprArgType = ..., + hash: Optional[bool] = ..., + init: bool = ..., + metadata: Optional[Mapping[Any, Any]] = ..., + converter: Optional[_ConverterType] = ..., + factory: Optional[Callable[[], _T]] = ..., + kw_only: bool = ..., + eq: Optional[_EqOrderType] = ..., + order: Optional[_EqOrderType] = ..., + on_setattr: Optional[_OnSetAttrArgType] = ..., + alias: Optional[str] = ..., +) -> _T: ... + +# This form catches an explicit default argument. +@overload +def field( + *, + default: _T, + validator: Optional[_ValidatorArgType[_T]] = ..., + repr: _ReprArgType = ..., + hash: Optional[bool] = ..., + init: bool = ..., + metadata: Optional[Mapping[Any, Any]] = ..., + converter: Optional[_ConverterType] = ..., + factory: Optional[Callable[[], _T]] = ..., + kw_only: bool = ..., + eq: Optional[_EqOrderType] = ..., + order: Optional[_EqOrderType] = ..., + on_setattr: Optional[_OnSetAttrArgType] = ..., + alias: Optional[str] = ..., +) -> _T: ... + +# This form covers type=non-Type: e.g. forward references (str), Any +@overload +def field( + *, + default: Optional[_T] = ..., + validator: Optional[_ValidatorArgType[_T]] = ..., + repr: _ReprArgType = ..., + hash: Optional[bool] = ..., + init: bool = ..., + metadata: Optional[Mapping[Any, Any]] = ..., + converter: Optional[_ConverterType] = ..., + factory: Optional[Callable[[], _T]] = ..., + kw_only: bool = ..., + eq: Optional[_EqOrderType] = ..., + order: Optional[_EqOrderType] = ..., + on_setattr: Optional[_OnSetAttrArgType] = ..., + alias: Optional[str] = ..., +) -> Any: ... +@overload +@__dataclass_transform__(order_default=True, field_descriptors=(attrib, field)) +def attrs( + maybe_cls: _C, + these: Optional[Dict[str, Any]] = ..., + repr_ns: Optional[str] = ..., + repr: bool = ..., + cmp: Optional[_EqOrderType] = ..., + hash: Optional[bool] = ..., + init: bool = ..., + slots: bool = ..., + frozen: bool = ..., + weakref_slot: bool = ..., + str: bool = ..., + auto_attribs: bool = ..., + kw_only: bool = ..., + cache_hash: bool = ..., + auto_exc: bool = ..., + eq: Optional[_EqOrderType] = ..., + order: Optional[_EqOrderType] = ..., + auto_detect: bool = ..., + collect_by_mro: bool = ..., + getstate_setstate: Optional[bool] = ..., + on_setattr: Optional[_OnSetAttrArgType] = ..., + field_transformer: Optional[_FieldTransformer] = ..., + match_args: bool = ..., + unsafe_hash: Optional[bool] = ..., +) -> _C: ... +@overload +@__dataclass_transform__(order_default=True, field_descriptors=(attrib, field)) +def attrs( + maybe_cls: None = ..., + these: Optional[Dict[str, Any]] = ..., + repr_ns: Optional[str] = ..., + repr: bool = ..., + cmp: Optional[_EqOrderType] = ..., + hash: Optional[bool] = ..., + init: bool = ..., + slots: bool = ..., + frozen: bool = ..., + weakref_slot: bool = ..., + str: bool = ..., + auto_attribs: bool = ..., + kw_only: bool = ..., + cache_hash: bool = ..., + auto_exc: bool = ..., + eq: Optional[_EqOrderType] = ..., + order: Optional[_EqOrderType] = ..., + auto_detect: bool = ..., + collect_by_mro: bool = ..., + getstate_setstate: Optional[bool] = ..., + on_setattr: Optional[_OnSetAttrArgType] = ..., + field_transformer: Optional[_FieldTransformer] = ..., + match_args: bool = ..., + unsafe_hash: Optional[bool] = ..., +) -> Callable[[_C], _C]: ... +@overload +@__dataclass_transform__(field_descriptors=(attrib, field)) +def define( + maybe_cls: _C, + *, + these: Optional[Dict[str, Any]] = ..., + repr: bool = ..., + unsafe_hash: Optional[bool] = ..., + hash: Optional[bool] = ..., + init: bool = ..., + slots: bool = ..., + frozen: bool = ..., + weakref_slot: bool = ..., + str: bool = ..., + auto_attribs: bool = ..., + kw_only: bool = ..., + cache_hash: bool = ..., + auto_exc: bool = ..., + eq: Optional[bool] = ..., + order: Optional[bool] = ..., + auto_detect: bool = ..., + getstate_setstate: Optional[bool] = ..., + on_setattr: Optional[_OnSetAttrArgType] = ..., + field_transformer: Optional[_FieldTransformer] = ..., + match_args: bool = ..., +) -> _C: ... +@overload +@__dataclass_transform__(field_descriptors=(attrib, field)) +def define( + maybe_cls: None = ..., + *, + these: Optional[Dict[str, Any]] = ..., + repr: bool = ..., + unsafe_hash: Optional[bool] = ..., + hash: Optional[bool] = ..., + init: bool = ..., + slots: bool = ..., + frozen: bool = ..., + weakref_slot: bool = ..., + str: bool = ..., + auto_attribs: bool = ..., + kw_only: bool = ..., + cache_hash: bool = ..., + auto_exc: bool = ..., + eq: Optional[bool] = ..., + order: Optional[bool] = ..., + auto_detect: bool = ..., + getstate_setstate: Optional[bool] = ..., + on_setattr: Optional[_OnSetAttrArgType] = ..., + field_transformer: Optional[_FieldTransformer] = ..., + match_args: bool = ..., +) -> Callable[[_C], _C]: ... + +mutable = define +frozen = define # they differ only in their defaults + +def fields(cls: Type[AttrsInstance]) -> Any: ... +def fields_dict(cls: Type[AttrsInstance]) -> Dict[str, Attribute[Any]]: ... +def validate(inst: AttrsInstance) -> None: ... +def resolve_types( + cls: _C, + globalns: Optional[Dict[str, Any]] = ..., + localns: Optional[Dict[str, Any]] = ..., + attribs: Optional[List[Attribute[Any]]] = ..., +) -> _C: ... + +# TODO: add support for returning a proper attrs class from the mypy plugin +# we use Any instead of _CountingAttr so that e.g. `make_class('Foo', +# [attr.ib()])` is valid +def make_class( + name: str, + attrs: Union[List[str], Tuple[str, ...], Dict[str, Any]], + bases: Tuple[type, ...] = ..., + repr_ns: Optional[str] = ..., + repr: bool = ..., + cmp: Optional[_EqOrderType] = ..., + hash: Optional[bool] = ..., + init: bool = ..., + slots: bool = ..., + frozen: bool = ..., + weakref_slot: bool = ..., + str: bool = ..., + auto_attribs: bool = ..., + kw_only: bool = ..., + cache_hash: bool = ..., + auto_exc: bool = ..., + eq: Optional[_EqOrderType] = ..., + order: Optional[_EqOrderType] = ..., + collect_by_mro: bool = ..., + on_setattr: Optional[_OnSetAttrArgType] = ..., + field_transformer: Optional[_FieldTransformer] = ..., +) -> type: ... + +# _funcs -- + +# TODO: add support for returning TypedDict from the mypy plugin +# FIXME: asdict/astuple do not honor their factory args. Waiting on one of +# these: +# https://github.com/python/mypy/issues/4236 +# https://github.com/python/typing/issues/253 +# XXX: remember to fix attrs.asdict/astuple too! +def asdict( + inst: AttrsInstance, + recurse: bool = ..., + filter: Optional[_FilterType[Any]] = ..., + dict_factory: Type[Mapping[Any, Any]] = ..., + retain_collection_types: bool = ..., + value_serializer: Optional[ + Callable[[type, Attribute[Any], Any], Any] + ] = ..., + tuple_keys: Optional[bool] = ..., +) -> Dict[str, Any]: ... + +# TODO: add support for returning NamedTuple from the mypy plugin +def astuple( + inst: AttrsInstance, + recurse: bool = ..., + filter: Optional[_FilterType[Any]] = ..., + tuple_factory: Type[Sequence[Any]] = ..., + retain_collection_types: bool = ..., +) -> Tuple[Any, ...]: ... +def has(cls: type) -> TypeGuard[Type[AttrsInstance]]: ... +def assoc(inst: _T, **changes: Any) -> _T: ... +def evolve(inst: _T, **changes: Any) -> _T: ... + +# _config -- + +def set_run_validators(run: bool) -> None: ... +def get_run_validators() -> bool: ... + +# aliases -- + +s = attributes = attrs +ib = attr = attrib +dataclass = attrs # Technically, partial(attrs, auto_attribs=True) ;) diff --git a/Splunk_TA_paloalto/bin/splunk_ta_paloalto/aob_py3/attr/_cmp.py b/Splunk_TA_paloalto/bin/splunk_ta_paloalto/aob_py3/attr/_cmp.py new file mode 100644 index 00000000..ad1e18c7 --- /dev/null +++ b/Splunk_TA_paloalto/bin/splunk_ta_paloalto/aob_py3/attr/_cmp.py @@ -0,0 +1,155 @@ +# SPDX-License-Identifier: MIT + + +import functools +import types + +from ._make import _make_ne + + +_operation_names = {"eq": "==", "lt": "<", "le": "<=", "gt": ">", "ge": ">="} + + +def cmp_using( + eq=None, + lt=None, + le=None, + gt=None, + ge=None, + require_same_type=True, + class_name="Comparable", +): + """ + Create a class that can be passed into `attr.ib`'s ``eq``, ``order``, and + ``cmp`` arguments to customize field comparison. + + The resulting class will have a full set of ordering methods if + at least one of ``{lt, le, gt, ge}`` and ``eq`` are provided. + + :param Optional[callable] eq: `callable` used to evaluate equality + of two objects. + :param Optional[callable] lt: `callable` used to evaluate whether + one object is less than another object. + :param Optional[callable] le: `callable` used to evaluate whether + one object is less than or equal to another object. + :param Optional[callable] gt: `callable` used to evaluate whether + one object is greater than another object. + :param Optional[callable] ge: `callable` used to evaluate whether + one object is greater than or equal to another object. + + :param bool require_same_type: When `True`, equality and ordering methods + will return `NotImplemented` if objects are not of the same type. + + :param Optional[str] class_name: Name of class. Defaults to 'Comparable'. + + See `comparison` for more details. + + .. versionadded:: 21.1.0 + """ + + body = { + "__slots__": ["value"], + "__init__": _make_init(), + "_requirements": [], + "_is_comparable_to": _is_comparable_to, + } + + # Add operations. + num_order_functions = 0 + has_eq_function = False + + if eq is not None: + has_eq_function = True + body["__eq__"] = _make_operator("eq", eq) + body["__ne__"] = _make_ne() + + if lt is not None: + num_order_functions += 1 + body["__lt__"] = _make_operator("lt", lt) + + if le is not None: + num_order_functions += 1 + body["__le__"] = _make_operator("le", le) + + if gt is not None: + num_order_functions += 1 + body["__gt__"] = _make_operator("gt", gt) + + if ge is not None: + num_order_functions += 1 + body["__ge__"] = _make_operator("ge", ge) + + type_ = types.new_class( + class_name, (object,), {}, lambda ns: ns.update(body) + ) + + # Add same type requirement. + if require_same_type: + type_._requirements.append(_check_same_type) + + # Add total ordering if at least one operation was defined. + if 0 < num_order_functions < 4: + if not has_eq_function: + # functools.total_ordering requires __eq__ to be defined, + # so raise early error here to keep a nice stack. + raise ValueError( + "eq must be define is order to complete ordering from " + "lt, le, gt, ge." + ) + type_ = functools.total_ordering(type_) + + return type_ + + +def _make_init(): + """ + Create __init__ method. + """ + + def __init__(self, value): + """ + Initialize object with *value*. + """ + self.value = value + + return __init__ + + +def _make_operator(name, func): + """ + Create operator method. + """ + + def method(self, other): + if not self._is_comparable_to(other): + return NotImplemented + + result = func(self.value, other.value) + if result is NotImplemented: + return NotImplemented + + return result + + method.__name__ = f"__{name}__" + method.__doc__ = ( + f"Return a {_operation_names[name]} b. Computed by attrs." + ) + + return method + + +def _is_comparable_to(self, other): + """ + Check whether `other` is comparable to `self`. + """ + for func in self._requirements: + if not func(self, other): + return False + return True + + +def _check_same_type(self, other): + """ + Return True if *self* and *other* are of the same type, False otherwise. + """ + return other.value.__class__ is self.value.__class__ diff --git a/Splunk_TA_paloalto/bin/splunk_ta_paloalto/aob_py3/attr/_cmp.pyi b/Splunk_TA_paloalto/bin/splunk_ta_paloalto/aob_py3/attr/_cmp.pyi new file mode 100644 index 00000000..f3dcdc1a --- /dev/null +++ b/Splunk_TA_paloalto/bin/splunk_ta_paloalto/aob_py3/attr/_cmp.pyi @@ -0,0 +1,13 @@ +from typing import Any, Callable, Optional, Type + +_CompareWithType = Callable[[Any, Any], bool] + +def cmp_using( + eq: Optional[_CompareWithType] = ..., + lt: Optional[_CompareWithType] = ..., + le: Optional[_CompareWithType] = ..., + gt: Optional[_CompareWithType] = ..., + ge: Optional[_CompareWithType] = ..., + require_same_type: bool = ..., + class_name: str = ..., +) -> Type: ... diff --git a/Splunk_TA_paloalto/bin/splunk_ta_paloalto/aob_py3/attr/_compat.py b/Splunk_TA_paloalto/bin/splunk_ta_paloalto/aob_py3/attr/_compat.py new file mode 100644 index 00000000..35a85a3f --- /dev/null +++ b/Splunk_TA_paloalto/bin/splunk_ta_paloalto/aob_py3/attr/_compat.py @@ -0,0 +1,176 @@ +# SPDX-License-Identifier: MIT + + +import inspect +import platform +import sys +import threading +import types +import warnings + +from collections.abc import Mapping, Sequence # noqa + + +PYPY = platform.python_implementation() == "PyPy" +PY310 = sys.version_info[:2] >= (3, 10) +PY_3_12_PLUS = sys.version_info[:2] >= (3, 12) + + +def just_warn(*args, **kw): + warnings.warn( + "Running interpreter doesn't sufficiently support code object " + "introspection. Some features like bare super() or accessing " + "__class__ will not work with slotted classes.", + RuntimeWarning, + stacklevel=2, + ) + + +class _AnnotationExtractor: + """ + Extract type annotations from a callable, returning None whenever there + is none. + """ + + __slots__ = ["sig"] + + def __init__(self, callable): + try: + self.sig = inspect.signature(callable) + except (ValueError, TypeError): # inspect failed + self.sig = None + + def get_first_param_type(self): + """ + Return the type annotation of the first argument if it's not empty. + """ + if not self.sig: + return None + + params = list(self.sig.parameters.values()) + if params and params[0].annotation is not inspect.Parameter.empty: + return params[0].annotation + + return None + + def get_return_type(self): + """ + Return the return type if it's not empty. + """ + if ( + self.sig + and self.sig.return_annotation is not inspect.Signature.empty + ): + return self.sig.return_annotation + + return None + + +def make_set_closure_cell(): + """Return a function of two arguments (cell, value) which sets + the value stored in the closure cell `cell` to `value`. + """ + # pypy makes this easy. (It also supports the logic below, but + # why not do the easy/fast thing?) + if PYPY: + + def set_closure_cell(cell, value): + cell.__setstate__((value,)) + + return set_closure_cell + + # Otherwise gotta do it the hard way. + + # Create a function that will set its first cellvar to `value`. + def set_first_cellvar_to(value): + x = value + return + + # This function will be eliminated as dead code, but + # not before its reference to `x` forces `x` to be + # represented as a closure cell rather than a local. + def force_x_to_be_a_cell(): # pragma: no cover + return x + + try: + # Extract the code object and make sure our assumptions about + # the closure behavior are correct. + co = set_first_cellvar_to.__code__ + if co.co_cellvars != ("x",) or co.co_freevars != (): + raise AssertionError # pragma: no cover + + # Convert this code object to a code object that sets the + # function's first _freevar_ (not cellvar) to the argument. + if sys.version_info >= (3, 8): + + def set_closure_cell(cell, value): + cell.cell_contents = value + + else: + args = [co.co_argcount] + args.append(co.co_kwonlyargcount) + args.extend( + [ + co.co_nlocals, + co.co_stacksize, + co.co_flags, + co.co_code, + co.co_consts, + co.co_names, + co.co_varnames, + co.co_filename, + co.co_name, + co.co_firstlineno, + co.co_lnotab, + # These two arguments are reversed: + co.co_cellvars, + co.co_freevars, + ] + ) + set_first_freevar_code = types.CodeType(*args) + + def set_closure_cell(cell, value): + # Create a function using the set_first_freevar_code, + # whose first closure cell is `cell`. Calling it will + # change the value of that cell. + setter = types.FunctionType( + set_first_freevar_code, {}, "setter", (), (cell,) + ) + # And call it to set the cell. + setter(value) + + # Make sure it works on this interpreter: + def make_func_with_cell(): + x = None + + def func(): + return x # pragma: no cover + + return func + + cell = make_func_with_cell().__closure__[0] + set_closure_cell(cell, 100) + if cell.cell_contents != 100: + raise AssertionError # pragma: no cover + + except Exception: + return just_warn + else: + return set_closure_cell + + +set_closure_cell = make_set_closure_cell() + +# Thread-local global to track attrs instances which are already being repr'd. +# This is needed because there is no other (thread-safe) way to pass info +# about the instances that are already being repr'd through the call stack +# in order to ensure we don't perform infinite recursion. +# +# For instance, if an instance contains a dict which contains that instance, +# we need to know that we're already repr'ing the outside instance from within +# the dict's repr() call. +# +# This lives here rather than in _make.py so that the functions in _make.py +# don't have a direct reference to the thread-local in their globals dict. +# If they have such a reference, it breaks cloudpickle. +repr_context = threading.local() diff --git a/Splunk_TA_paloalto/bin/splunk_ta_paloalto/aob_py3/attr/_config.py b/Splunk_TA_paloalto/bin/splunk_ta_paloalto/aob_py3/attr/_config.py new file mode 100644 index 00000000..96d42007 --- /dev/null +++ b/Splunk_TA_paloalto/bin/splunk_ta_paloalto/aob_py3/attr/_config.py @@ -0,0 +1,31 @@ +# SPDX-License-Identifier: MIT + + +__all__ = ["set_run_validators", "get_run_validators"] + +_run_validators = True + + +def set_run_validators(run): + """ + Set whether or not validators are run. By default, they are run. + + .. deprecated:: 21.3.0 It will not be removed, but it also will not be + moved to new ``attrs`` namespace. Use `attrs.validators.set_disabled()` + instead. + """ + if not isinstance(run, bool): + raise TypeError("'run' must be bool.") + global _run_validators + _run_validators = run + + +def get_run_validators(): + """ + Return whether or not validators are run. + + .. deprecated:: 21.3.0 It will not be removed, but it also will not be + moved to new ``attrs`` namespace. Use `attrs.validators.get_disabled()` + instead. + """ + return _run_validators diff --git a/Splunk_TA_paloalto/bin/splunk_ta_paloalto/aob_py3/attr/_funcs.py b/Splunk_TA_paloalto/bin/splunk_ta_paloalto/aob_py3/attr/_funcs.py new file mode 100644 index 00000000..1f573c11 --- /dev/null +++ b/Splunk_TA_paloalto/bin/splunk_ta_paloalto/aob_py3/attr/_funcs.py @@ -0,0 +1,418 @@ +# SPDX-License-Identifier: MIT + + +import copy + +from ._make import NOTHING, _obj_setattr, fields +from .exceptions import AttrsAttributeNotFoundError + + +def asdict( + inst, + recurse=True, + filter=None, + dict_factory=dict, + retain_collection_types=False, + value_serializer=None, +): + """ + Return the ``attrs`` attribute values of *inst* as a dict. + + Optionally recurse into other ``attrs``-decorated classes. + + :param inst: Instance of an ``attrs``-decorated class. + :param bool recurse: Recurse into classes that are also + ``attrs``-decorated. + :param callable filter: A callable whose return code determines whether an + attribute or element is included (``True``) or dropped (``False``). Is + called with the `attrs.Attribute` as the first argument and the + value as the second argument. + :param callable dict_factory: A callable to produce dictionaries from. For + example, to produce ordered dictionaries instead of normal Python + dictionaries, pass in ``collections.OrderedDict``. + :param bool retain_collection_types: Do not convert to ``list`` when + encountering an attribute whose type is ``tuple`` or ``set``. Only + meaningful if ``recurse`` is ``True``. + :param Optional[callable] value_serializer: A hook that is called for every + attribute or dict key/value. It receives the current instance, field + and value and must return the (updated) value. The hook is run *after* + the optional *filter* has been applied. + + :rtype: return type of *dict_factory* + + :raise attr.exceptions.NotAnAttrsClassError: If *cls* is not an ``attrs`` + class. + + .. versionadded:: 16.0.0 *dict_factory* + .. versionadded:: 16.1.0 *retain_collection_types* + .. versionadded:: 20.3.0 *value_serializer* + .. versionadded:: 21.3.0 If a dict has a collection for a key, it is + serialized as a tuple. + """ + attrs = fields(inst.__class__) + rv = dict_factory() + for a in attrs: + v = getattr(inst, a.name) + if filter is not None and not filter(a, v): + continue + + if value_serializer is not None: + v = value_serializer(inst, a, v) + + if recurse is True: + if has(v.__class__): + rv[a.name] = asdict( + v, + recurse=True, + filter=filter, + dict_factory=dict_factory, + retain_collection_types=retain_collection_types, + value_serializer=value_serializer, + ) + elif isinstance(v, (tuple, list, set, frozenset)): + cf = v.__class__ if retain_collection_types is True else list + rv[a.name] = cf( + [ + _asdict_anything( + i, + is_key=False, + filter=filter, + dict_factory=dict_factory, + retain_collection_types=retain_collection_types, + value_serializer=value_serializer, + ) + for i in v + ] + ) + elif isinstance(v, dict): + df = dict_factory + rv[a.name] = df( + ( + _asdict_anything( + kk, + is_key=True, + filter=filter, + dict_factory=df, + retain_collection_types=retain_collection_types, + value_serializer=value_serializer, + ), + _asdict_anything( + vv, + is_key=False, + filter=filter, + dict_factory=df, + retain_collection_types=retain_collection_types, + value_serializer=value_serializer, + ), + ) + for kk, vv in v.items() + ) + else: + rv[a.name] = v + else: + rv[a.name] = v + return rv + + +def _asdict_anything( + val, + is_key, + filter, + dict_factory, + retain_collection_types, + value_serializer, +): + """ + ``asdict`` only works on attrs instances, this works on anything. + """ + if getattr(val.__class__, "__attrs_attrs__", None) is not None: + # Attrs class. + rv = asdict( + val, + recurse=True, + filter=filter, + dict_factory=dict_factory, + retain_collection_types=retain_collection_types, + value_serializer=value_serializer, + ) + elif isinstance(val, (tuple, list, set, frozenset)): + if retain_collection_types is True: + cf = val.__class__ + elif is_key: + cf = tuple + else: + cf = list + + rv = cf( + [ + _asdict_anything( + i, + is_key=False, + filter=filter, + dict_factory=dict_factory, + retain_collection_types=retain_collection_types, + value_serializer=value_serializer, + ) + for i in val + ] + ) + elif isinstance(val, dict): + df = dict_factory + rv = df( + ( + _asdict_anything( + kk, + is_key=True, + filter=filter, + dict_factory=df, + retain_collection_types=retain_collection_types, + value_serializer=value_serializer, + ), + _asdict_anything( + vv, + is_key=False, + filter=filter, + dict_factory=df, + retain_collection_types=retain_collection_types, + value_serializer=value_serializer, + ), + ) + for kk, vv in val.items() + ) + else: + rv = val + if value_serializer is not None: + rv = value_serializer(None, None, rv) + + return rv + + +def astuple( + inst, + recurse=True, + filter=None, + tuple_factory=tuple, + retain_collection_types=False, +): + """ + Return the ``attrs`` attribute values of *inst* as a tuple. + + Optionally recurse into other ``attrs``-decorated classes. + + :param inst: Instance of an ``attrs``-decorated class. + :param bool recurse: Recurse into classes that are also + ``attrs``-decorated. + :param callable filter: A callable whose return code determines whether an + attribute or element is included (``True``) or dropped (``False``). Is + called with the `attrs.Attribute` as the first argument and the + value as the second argument. + :param callable tuple_factory: A callable to produce tuples from. For + example, to produce lists instead of tuples. + :param bool retain_collection_types: Do not convert to ``list`` + or ``dict`` when encountering an attribute which type is + ``tuple``, ``dict`` or ``set``. Only meaningful if ``recurse`` is + ``True``. + + :rtype: return type of *tuple_factory* + + :raise attr.exceptions.NotAnAttrsClassError: If *cls* is not an ``attrs`` + class. + + .. versionadded:: 16.2.0 + """ + attrs = fields(inst.__class__) + rv = [] + retain = retain_collection_types # Very long. :/ + for a in attrs: + v = getattr(inst, a.name) + if filter is not None and not filter(a, v): + continue + if recurse is True: + if has(v.__class__): + rv.append( + astuple( + v, + recurse=True, + filter=filter, + tuple_factory=tuple_factory, + retain_collection_types=retain, + ) + ) + elif isinstance(v, (tuple, list, set, frozenset)): + cf = v.__class__ if retain is True else list + rv.append( + cf( + [ + astuple( + j, + recurse=True, + filter=filter, + tuple_factory=tuple_factory, + retain_collection_types=retain, + ) + if has(j.__class__) + else j + for j in v + ] + ) + ) + elif isinstance(v, dict): + df = v.__class__ if retain is True else dict + rv.append( + df( + ( + astuple( + kk, + tuple_factory=tuple_factory, + retain_collection_types=retain, + ) + if has(kk.__class__) + else kk, + astuple( + vv, + tuple_factory=tuple_factory, + retain_collection_types=retain, + ) + if has(vv.__class__) + else vv, + ) + for kk, vv in v.items() + ) + ) + else: + rv.append(v) + else: + rv.append(v) + + return rv if tuple_factory is list else tuple_factory(rv) + + +def has(cls): + """ + Check whether *cls* is a class with ``attrs`` attributes. + + :param type cls: Class to introspect. + :raise TypeError: If *cls* is not a class. + + :rtype: bool + """ + return getattr(cls, "__attrs_attrs__", None) is not None + + +def assoc(inst, **changes): + """ + Copy *inst* and apply *changes*. + + :param inst: Instance of a class with ``attrs`` attributes. + :param changes: Keyword changes in the new copy. + + :return: A copy of inst with *changes* incorporated. + + :raise attr.exceptions.AttrsAttributeNotFoundError: If *attr_name* couldn't + be found on *cls*. + :raise attr.exceptions.NotAnAttrsClassError: If *cls* is not an ``attrs`` + class. + + .. deprecated:: 17.1.0 + Use `attrs.evolve` instead if you can. + This function will not be removed du to the slightly different approach + compared to `attrs.evolve`. + """ + import warnings + + warnings.warn( + "assoc is deprecated and will be removed after 2018/01.", + DeprecationWarning, + stacklevel=2, + ) + new = copy.copy(inst) + attrs = fields(inst.__class__) + for k, v in changes.items(): + a = getattr(attrs, k, NOTHING) + if a is NOTHING: + raise AttrsAttributeNotFoundError( + f"{k} is not an attrs attribute on {new.__class__}." + ) + _obj_setattr(new, k, v) + return new + + +def evolve(inst, **changes): + """ + Create a new instance, based on *inst* with *changes* applied. + + :param inst: Instance of a class with ``attrs`` attributes. + :param changes: Keyword changes in the new copy. + + :return: A copy of inst with *changes* incorporated. + + :raise TypeError: If *attr_name* couldn't be found in the class + ``__init__``. + :raise attr.exceptions.NotAnAttrsClassError: If *cls* is not an ``attrs`` + class. + + .. versionadded:: 17.1.0 + """ + cls = inst.__class__ + attrs = fields(cls) + for a in attrs: + if not a.init: + continue + attr_name = a.name # To deal with private attributes. + init_name = a.alias + if init_name not in changes: + changes[init_name] = getattr(inst, attr_name) + + return cls(**changes) + + +def resolve_types(cls, globalns=None, localns=None, attribs=None): + """ + Resolve any strings and forward annotations in type annotations. + + This is only required if you need concrete types in `Attribute`'s *type* + field. In other words, you don't need to resolve your types if you only + use them for static type checking. + + With no arguments, names will be looked up in the module in which the class + was created. If this is not what you want, e.g. if the name only exists + inside a method, you may pass *globalns* or *localns* to specify other + dictionaries in which to look up these names. See the docs of + `typing.get_type_hints` for more details. + + :param type cls: Class to resolve. + :param Optional[dict] globalns: Dictionary containing global variables. + :param Optional[dict] localns: Dictionary containing local variables. + :param Optional[list] attribs: List of attribs for the given class. + This is necessary when calling from inside a ``field_transformer`` + since *cls* is not an ``attrs`` class yet. + + :raise TypeError: If *cls* is not a class. + :raise attr.exceptions.NotAnAttrsClassError: If *cls* is not an ``attrs`` + class and you didn't pass any attribs. + :raise NameError: If types cannot be resolved because of missing variables. + + :returns: *cls* so you can use this function also as a class decorator. + Please note that you have to apply it **after** `attrs.define`. That + means the decorator has to come in the line **before** `attrs.define`. + + .. versionadded:: 20.1.0 + .. versionadded:: 21.1.0 *attribs* + + """ + # Since calling get_type_hints is expensive we cache whether we've + # done it already. + if getattr(cls, "__attrs_types_resolved__", None) != cls: + import typing + + hints = typing.get_type_hints(cls, globalns=globalns, localns=localns) + for field in fields(cls) if attribs is None else attribs: + if field.name in hints: + # Since fields have been frozen we must work around it. + _obj_setattr(field, "type", hints[field.name]) + # We store the class we resolved so that subclasses know they haven't + # been resolved. + cls.__attrs_types_resolved__ = cls + + # Return the class so you can use it as a decorator too. + return cls diff --git a/Splunk_TA_paloalto/bin/splunk_ta_paloalto/aob_py3/attr/_make.py b/Splunk_TA_paloalto/bin/splunk_ta_paloalto/aob_py3/attr/_make.py new file mode 100644 index 00000000..9ee22005 --- /dev/null +++ b/Splunk_TA_paloalto/bin/splunk_ta_paloalto/aob_py3/attr/_make.py @@ -0,0 +1,2965 @@ +# SPDX-License-Identifier: MIT + +import copy +import enum +import linecache +import sys +import types +import typing + +from operator import itemgetter + +# We need to import _compat itself in addition to the _compat members to avoid +# having the thread-local in the globals here. +from . import _compat, _config, setters +from ._compat import PY310, PYPY, _AnnotationExtractor, set_closure_cell +from .exceptions import ( + DefaultAlreadySetError, + FrozenInstanceError, + NotAnAttrsClassError, + UnannotatedAttributeError, +) + + +# This is used at least twice, so cache it here. +_obj_setattr = object.__setattr__ +_init_converter_pat = "__attr_converter_%s" +_init_factory_pat = "__attr_factory_%s" +_classvar_prefixes = ( + "typing.ClassVar", + "t.ClassVar", + "ClassVar", + "typing_extensions.ClassVar", +) +# we don't use a double-underscore prefix because that triggers +# name mangling when trying to create a slot for the field +# (when slots=True) +_hash_cache_field = "_attrs_cached_hash" + +_empty_metadata_singleton = types.MappingProxyType({}) + +# Unique object for unequivocal getattr() defaults. +_sentinel = object() + +_ng_default_on_setattr = setters.pipe(setters.convert, setters.validate) + + +class _Nothing(enum.Enum): + """ + Sentinel to indicate the lack of a value when ``None`` is ambiguous. + + If extending attrs, you can use ``typing.Literal[NOTHING]`` to show + that a value may be ``NOTHING``. + + .. versionchanged:: 21.1.0 ``bool(NOTHING)`` is now False. + .. versionchanged:: 22.2.0 ``NOTHING`` is now an ``enum.Enum`` variant. + """ + + NOTHING = enum.auto() + + def __repr__(self): + return "NOTHING" + + def __bool__(self): + return False + + +NOTHING = _Nothing.NOTHING +""" +Sentinel to indicate the lack of a value when ``None`` is ambiguous. +""" + + +class _CacheHashWrapper(int): + """ + An integer subclass that pickles / copies as None + + This is used for non-slots classes with ``cache_hash=True``, to avoid + serializing a potentially (even likely) invalid hash value. Since ``None`` + is the default value for uncalculated hashes, whenever this is copied, + the copy's value for the hash should automatically reset. + + See GH #613 for more details. + """ + + def __reduce__(self, _none_constructor=type(None), _args=()): + return _none_constructor, _args + + +def attrib( + default=NOTHING, + validator=None, + repr=True, + cmp=None, + hash=None, + init=True, + metadata=None, + type=None, + converter=None, + factory=None, + kw_only=False, + eq=None, + order=None, + on_setattr=None, + alias=None, +): + """ + Create a new attribute on a class. + + .. warning:: + + Does *not* do anything unless the class is also decorated with + `attr.s`! + + :param default: A value that is used if an ``attrs``-generated ``__init__`` + is used and no value is passed while instantiating or the attribute is + excluded using ``init=False``. + + If the value is an instance of `attrs.Factory`, its callable will be + used to construct a new value (useful for mutable data types like lists + or dicts). + + If a default is not set (or set manually to `attrs.NOTHING`), a value + *must* be supplied when instantiating; otherwise a `TypeError` + will be raised. + + The default can also be set using decorator notation as shown below. + + :type default: Any value + + :param callable factory: Syntactic sugar for + ``default=attr.Factory(factory)``. + + :param validator: `callable` that is called by ``attrs``-generated + ``__init__`` methods after the instance has been initialized. They + receive the initialized instance, the :func:`~attrs.Attribute`, and the + passed value. + + The return value is *not* inspected so the validator has to throw an + exception itself. + + If a `list` is passed, its items are treated as validators and must + all pass. + + Validators can be globally disabled and re-enabled using + `get_run_validators`. + + The validator can also be set using decorator notation as shown below. + + :type validator: `callable` or a `list` of `callable`\\ s. + + :param repr: Include this attribute in the generated ``__repr__`` + method. If ``True``, include the attribute; if ``False``, omit it. By + default, the built-in ``repr()`` function is used. To override how the + attribute value is formatted, pass a ``callable`` that takes a single + value and returns a string. Note that the resulting string is used + as-is, i.e. it will be used directly *instead* of calling ``repr()`` + (the default). + :type repr: a `bool` or a `callable` to use a custom function. + + :param eq: If ``True`` (default), include this attribute in the + generated ``__eq__`` and ``__ne__`` methods that check two instances + for equality. To override how the attribute value is compared, + pass a ``callable`` that takes a single value and returns the value + to be compared. + :type eq: a `bool` or a `callable`. + + :param order: If ``True`` (default), include this attributes in the + generated ``__lt__``, ``__le__``, ``__gt__`` and ``__ge__`` methods. + To override how the attribute value is ordered, + pass a ``callable`` that takes a single value and returns the value + to be ordered. + :type order: a `bool` or a `callable`. + + :param cmp: Setting *cmp* is equivalent to setting *eq* and *order* to the + same value. Must not be mixed with *eq* or *order*. + :type cmp: a `bool` or a `callable`. + + :param Optional[bool] hash: Include this attribute in the generated + ``__hash__`` method. If ``None`` (default), mirror *eq*'s value. This + is the correct behavior according the Python spec. Setting this value + to anything else than ``None`` is *discouraged*. + :param bool init: Include this attribute in the generated ``__init__`` + method. It is possible to set this to ``False`` and set a default + value. In that case this attributed is unconditionally initialized + with the specified default value or factory. + :param callable converter: `callable` that is called by + ``attrs``-generated ``__init__`` methods to convert attribute's value + to the desired format. It is given the passed-in value, and the + returned value will be used as the new value of the attribute. The + value is converted before being passed to the validator, if any. + :param metadata: An arbitrary mapping, to be used by third-party + components. See `extending-metadata`. + + :param type: The type of the attribute. Nowadays, the preferred method to + specify the type is using a variable annotation (see :pep:`526`). + This argument is provided for backward compatibility. + Regardless of the approach used, the type will be stored on + ``Attribute.type``. + + Please note that ``attrs`` doesn't do anything with this metadata by + itself. You can use it as part of your own code or for + `static type checking `. + :param kw_only: Make this attribute keyword-only in the generated + ``__init__`` (if ``init`` is ``False``, this parameter is ignored). + :param on_setattr: Allows to overwrite the *on_setattr* setting from + `attr.s`. If left `None`, the *on_setattr* value from `attr.s` is used. + Set to `attrs.setters.NO_OP` to run **no** `setattr` hooks for this + attribute -- regardless of the setting in `attr.s`. + :type on_setattr: `callable`, or a list of callables, or `None`, or + `attrs.setters.NO_OP` + :param Optional[str] alias: Override this attribute's parameter name in the + generated ``__init__`` method. If left `None`, default to ``name`` + stripped of leading underscores. See `private-attributes`. + + .. versionadded:: 15.2.0 *convert* + .. versionadded:: 16.3.0 *metadata* + .. versionchanged:: 17.1.0 *validator* can be a ``list`` now. + .. versionchanged:: 17.1.0 + *hash* is ``None`` and therefore mirrors *eq* by default. + .. versionadded:: 17.3.0 *type* + .. deprecated:: 17.4.0 *convert* + .. versionadded:: 17.4.0 *converter* as a replacement for the deprecated + *convert* to achieve consistency with other noun-based arguments. + .. versionadded:: 18.1.0 + ``factory=f`` is syntactic sugar for ``default=attr.Factory(f)``. + .. versionadded:: 18.2.0 *kw_only* + .. versionchanged:: 19.2.0 *convert* keyword argument removed. + .. versionchanged:: 19.2.0 *repr* also accepts a custom callable. + .. deprecated:: 19.2.0 *cmp* Removal on or after 2021-06-01. + .. versionadded:: 19.2.0 *eq* and *order* + .. versionadded:: 20.1.0 *on_setattr* + .. versionchanged:: 20.3.0 *kw_only* backported to Python 2 + .. versionchanged:: 21.1.0 + *eq*, *order*, and *cmp* also accept a custom callable + .. versionchanged:: 21.1.0 *cmp* undeprecated + .. versionadded:: 22.2.0 *alias* + """ + eq, eq_key, order, order_key = _determine_attrib_eq_order( + cmp, eq, order, True + ) + + if hash is not None and hash is not True and hash is not False: + raise TypeError( + "Invalid value for hash. Must be True, False, or None." + ) + + if factory is not None: + if default is not NOTHING: + raise ValueError( + "The `default` and `factory` arguments are mutually " + "exclusive." + ) + if not callable(factory): + raise ValueError("The `factory` argument must be a callable.") + default = Factory(factory) + + if metadata is None: + metadata = {} + + # Apply syntactic sugar by auto-wrapping. + if isinstance(on_setattr, (list, tuple)): + on_setattr = setters.pipe(*on_setattr) + + if validator and isinstance(validator, (list, tuple)): + validator = and_(*validator) + + if converter and isinstance(converter, (list, tuple)): + converter = pipe(*converter) + + return _CountingAttr( + default=default, + validator=validator, + repr=repr, + cmp=None, + hash=hash, + init=init, + converter=converter, + metadata=metadata, + type=type, + kw_only=kw_only, + eq=eq, + eq_key=eq_key, + order=order, + order_key=order_key, + on_setattr=on_setattr, + alias=alias, + ) + + +def _compile_and_eval(script, globs, locs=None, filename=""): + """ + "Exec" the script with the given global (globs) and local (locs) variables. + """ + bytecode = compile(script, filename, "exec") + eval(bytecode, globs, locs) + + +def _make_method(name, script, filename, globs): + """ + Create the method with the script given and return the method object. + """ + locs = {} + + # In order of debuggers like PDB being able to step through the code, + # we add a fake linecache entry. + count = 1 + base_filename = filename + while True: + linecache_tuple = ( + len(script), + None, + script.splitlines(True), + filename, + ) + old_val = linecache.cache.setdefault(filename, linecache_tuple) + if old_val == linecache_tuple: + break + else: + filename = f"{base_filename[:-1]}-{count}>" + count += 1 + + _compile_and_eval(script, globs, locs, filename) + + return locs[name] + + +def _make_attr_tuple_class(cls_name, attr_names): + """ + Create a tuple subclass to hold `Attribute`s for an `attrs` class. + + The subclass is a bare tuple with properties for names. + + class MyClassAttributes(tuple): + __slots__ = () + x = property(itemgetter(0)) + """ + attr_class_name = f"{cls_name}Attributes" + attr_class_template = [ + f"class {attr_class_name}(tuple):", + " __slots__ = ()", + ] + if attr_names: + for i, attr_name in enumerate(attr_names): + attr_class_template.append( + f" {attr_name} = _attrs_property(_attrs_itemgetter({i}))" + ) + else: + attr_class_template.append(" pass") + globs = {"_attrs_itemgetter": itemgetter, "_attrs_property": property} + _compile_and_eval("\n".join(attr_class_template), globs) + return globs[attr_class_name] + + +# Tuple class for extracted attributes from a class definition. +# `base_attrs` is a subset of `attrs`. +_Attributes = _make_attr_tuple_class( + "_Attributes", + [ + # all attributes to build dunder methods for + "attrs", + # attributes that have been inherited + "base_attrs", + # map inherited attributes to their originating classes + "base_attrs_map", + ], +) + + +def _is_class_var(annot): + """ + Check whether *annot* is a typing.ClassVar. + + The string comparison hack is used to avoid evaluating all string + annotations which would put attrs-based classes at a performance + disadvantage compared to plain old classes. + """ + annot = str(annot) + + # Annotation can be quoted. + if annot.startswith(("'", '"')) and annot.endswith(("'", '"')): + annot = annot[1:-1] + + return annot.startswith(_classvar_prefixes) + + +def _has_own_attribute(cls, attrib_name): + """ + Check whether *cls* defines *attrib_name* (and doesn't just inherit it). + """ + attr = getattr(cls, attrib_name, _sentinel) + if attr is _sentinel: + return False + + for base_cls in cls.__mro__[1:]: + a = getattr(base_cls, attrib_name, None) + if attr is a: + return False + + return True + + +def _get_annotations(cls): + """ + Get annotations for *cls*. + """ + if _has_own_attribute(cls, "__annotations__"): + return cls.__annotations__ + + return {} + + +def _collect_base_attrs(cls, taken_attr_names): + """ + Collect attr.ibs from base classes of *cls*, except *taken_attr_names*. + """ + base_attrs = [] + base_attr_map = {} # A dictionary of base attrs to their classes. + + # Traverse the MRO and collect attributes. + for base_cls in reversed(cls.__mro__[1:-1]): + for a in getattr(base_cls, "__attrs_attrs__", []): + if a.inherited or a.name in taken_attr_names: + continue + + a = a.evolve(inherited=True) + base_attrs.append(a) + base_attr_map[a.name] = base_cls + + # For each name, only keep the freshest definition i.e. the furthest at the + # back. base_attr_map is fine because it gets overwritten with every new + # instance. + filtered = [] + seen = set() + for a in reversed(base_attrs): + if a.name in seen: + continue + filtered.insert(0, a) + seen.add(a.name) + + return filtered, base_attr_map + + +def _collect_base_attrs_broken(cls, taken_attr_names): + """ + Collect attr.ibs from base classes of *cls*, except *taken_attr_names*. + + N.B. *taken_attr_names* will be mutated. + + Adhere to the old incorrect behavior. + + Notably it collects from the front and considers inherited attributes which + leads to the buggy behavior reported in #428. + """ + base_attrs = [] + base_attr_map = {} # A dictionary of base attrs to their classes. + + # Traverse the MRO and collect attributes. + for base_cls in cls.__mro__[1:-1]: + for a in getattr(base_cls, "__attrs_attrs__", []): + if a.name in taken_attr_names: + continue + + a = a.evolve(inherited=True) + taken_attr_names.add(a.name) + base_attrs.append(a) + base_attr_map[a.name] = base_cls + + return base_attrs, base_attr_map + + +def _transform_attrs( + cls, these, auto_attribs, kw_only, collect_by_mro, field_transformer +): + """ + Transform all `_CountingAttr`s on a class into `Attribute`s. + + If *these* is passed, use that and don't look for them on the class. + + *collect_by_mro* is True, collect them in the correct MRO order, otherwise + use the old -- incorrect -- order. See #428. + + Return an `_Attributes`. + """ + cd = cls.__dict__ + anns = _get_annotations(cls) + + if these is not None: + ca_list = [(name, ca) for name, ca in these.items()] + elif auto_attribs is True: + ca_names = { + name + for name, attr in cd.items() + if isinstance(attr, _CountingAttr) + } + ca_list = [] + annot_names = set() + for attr_name, type in anns.items(): + if _is_class_var(type): + continue + annot_names.add(attr_name) + a = cd.get(attr_name, NOTHING) + + if not isinstance(a, _CountingAttr): + if a is NOTHING: + a = attrib() + else: + a = attrib(default=a) + ca_list.append((attr_name, a)) + + unannotated = ca_names - annot_names + if len(unannotated) > 0: + raise UnannotatedAttributeError( + "The following `attr.ib`s lack a type annotation: " + + ", ".join( + sorted(unannotated, key=lambda n: cd.get(n).counter) + ) + + "." + ) + else: + ca_list = sorted( + ( + (name, attr) + for name, attr in cd.items() + if isinstance(attr, _CountingAttr) + ), + key=lambda e: e[1].counter, + ) + + own_attrs = [ + Attribute.from_counting_attr( + name=attr_name, ca=ca, type=anns.get(attr_name) + ) + for attr_name, ca in ca_list + ] + + if collect_by_mro: + base_attrs, base_attr_map = _collect_base_attrs( + cls, {a.name for a in own_attrs} + ) + else: + base_attrs, base_attr_map = _collect_base_attrs_broken( + cls, {a.name for a in own_attrs} + ) + + if kw_only: + own_attrs = [a.evolve(kw_only=True) for a in own_attrs] + base_attrs = [a.evolve(kw_only=True) for a in base_attrs] + + attrs = base_attrs + own_attrs + + # Mandatory vs non-mandatory attr order only matters when they are part of + # the __init__ signature and when they aren't kw_only (which are moved to + # the end and can be mandatory or non-mandatory in any order, as they will + # be specified as keyword args anyway). Check the order of those attrs: + had_default = False + for a in (a for a in attrs if a.init is not False and a.kw_only is False): + if had_default is True and a.default is NOTHING: + raise ValueError( + "No mandatory attributes allowed after an attribute with a " + f"default value or factory. Attribute in question: {a!r}" + ) + + if had_default is False and a.default is not NOTHING: + had_default = True + + if field_transformer is not None: + attrs = field_transformer(cls, attrs) + + # Resolve default field alias after executing field_transformer. + # This allows field_transformer to differentiate between explicit vs + # default aliases and supply their own defaults. + attrs = [ + a.evolve(alias=_default_init_alias_for(a.name)) if not a.alias else a + for a in attrs + ] + + # Create AttrsClass *after* applying the field_transformer since it may + # add or remove attributes! + attr_names = [a.name for a in attrs] + AttrsClass = _make_attr_tuple_class(cls.__name__, attr_names) + + return _Attributes((AttrsClass(attrs), base_attrs, base_attr_map)) + + +if PYPY: + + def _frozen_setattrs(self, name, value): + """ + Attached to frozen classes as __setattr__. + """ + if isinstance(self, BaseException) and name in ( + "__cause__", + "__context__", + ): + BaseException.__setattr__(self, name, value) + return + + raise FrozenInstanceError() + +else: + + def _frozen_setattrs(self, name, value): + """ + Attached to frozen classes as __setattr__. + """ + raise FrozenInstanceError() + + +def _frozen_delattrs(self, name): + """ + Attached to frozen classes as __delattr__. + """ + raise FrozenInstanceError() + + +class _ClassBuilder: + """ + Iteratively build *one* class. + """ + + __slots__ = ( + "_attr_names", + "_attrs", + "_base_attr_map", + "_base_names", + "_cache_hash", + "_cls", + "_cls_dict", + "_delete_attribs", + "_frozen", + "_has_pre_init", + "_has_post_init", + "_is_exc", + "_on_setattr", + "_slots", + "_weakref_slot", + "_wrote_own_setattr", + "_has_custom_setattr", + ) + + def __init__( + self, + cls, + these, + slots, + frozen, + weakref_slot, + getstate_setstate, + auto_attribs, + kw_only, + cache_hash, + is_exc, + collect_by_mro, + on_setattr, + has_custom_setattr, + field_transformer, + ): + attrs, base_attrs, base_map = _transform_attrs( + cls, + these, + auto_attribs, + kw_only, + collect_by_mro, + field_transformer, + ) + + self._cls = cls + self._cls_dict = dict(cls.__dict__) if slots else {} + self._attrs = attrs + self._base_names = {a.name for a in base_attrs} + self._base_attr_map = base_map + self._attr_names = tuple(a.name for a in attrs) + self._slots = slots + self._frozen = frozen + self._weakref_slot = weakref_slot + self._cache_hash = cache_hash + self._has_pre_init = bool(getattr(cls, "__attrs_pre_init__", False)) + self._has_post_init = bool(getattr(cls, "__attrs_post_init__", False)) + self._delete_attribs = not bool(these) + self._is_exc = is_exc + self._on_setattr = on_setattr + + self._has_custom_setattr = has_custom_setattr + self._wrote_own_setattr = False + + self._cls_dict["__attrs_attrs__"] = self._attrs + + if frozen: + self._cls_dict["__setattr__"] = _frozen_setattrs + self._cls_dict["__delattr__"] = _frozen_delattrs + + self._wrote_own_setattr = True + elif on_setattr in ( + _ng_default_on_setattr, + setters.validate, + setters.convert, + ): + has_validator = has_converter = False + for a in attrs: + if a.validator is not None: + has_validator = True + if a.converter is not None: + has_converter = True + + if has_validator and has_converter: + break + if ( + ( + on_setattr == _ng_default_on_setattr + and not (has_validator or has_converter) + ) + or (on_setattr == setters.validate and not has_validator) + or (on_setattr == setters.convert and not has_converter) + ): + # If class-level on_setattr is set to convert + validate, but + # there's no field to convert or validate, pretend like there's + # no on_setattr. + self._on_setattr = None + + if getstate_setstate: + ( + self._cls_dict["__getstate__"], + self._cls_dict["__setstate__"], + ) = self._make_getstate_setstate() + + def __repr__(self): + return f"<_ClassBuilder(cls={self._cls.__name__})>" + + if PY310: + import abc + + def build_class(self): + """ + Finalize class based on the accumulated configuration. + + Builder cannot be used after calling this method. + """ + if self._slots is True: + return self._create_slots_class() + + return self.abc.update_abstractmethods( + self._patch_original_class() + ) + + else: + + def build_class(self): + """ + Finalize class based on the accumulated configuration. + + Builder cannot be used after calling this method. + """ + if self._slots is True: + return self._create_slots_class() + + return self._patch_original_class() + + def _patch_original_class(self): + """ + Apply accumulated methods and return the class. + """ + cls = self._cls + base_names = self._base_names + + # Clean class of attribute definitions (`attr.ib()`s). + if self._delete_attribs: + for name in self._attr_names: + if ( + name not in base_names + and getattr(cls, name, _sentinel) is not _sentinel + ): + try: + delattr(cls, name) + except AttributeError: + # This can happen if a base class defines a class + # variable and we want to set an attribute with the + # same name by using only a type annotation. + pass + + # Attach our dunder methods. + for name, value in self._cls_dict.items(): + setattr(cls, name, value) + + # If we've inherited an attrs __setattr__ and don't write our own, + # reset it to object's. + if not self._wrote_own_setattr and getattr( + cls, "__attrs_own_setattr__", False + ): + cls.__attrs_own_setattr__ = False + + if not self._has_custom_setattr: + cls.__setattr__ = _obj_setattr + + return cls + + def _create_slots_class(self): + """ + Build and return a new class with a `__slots__` attribute. + """ + cd = { + k: v + for k, v in self._cls_dict.items() + if k not in tuple(self._attr_names) + ("__dict__", "__weakref__") + } + + # If our class doesn't have its own implementation of __setattr__ + # (either from the user or by us), check the bases, if one of them has + # an attrs-made __setattr__, that needs to be reset. We don't walk the + # MRO because we only care about our immediate base classes. + # XXX: This can be confused by subclassing a slotted attrs class with + # XXX: a non-attrs class and subclass the resulting class with an attrs + # XXX: class. See `test_slotted_confused` for details. For now that's + # XXX: OK with us. + if not self._wrote_own_setattr: + cd["__attrs_own_setattr__"] = False + + if not self._has_custom_setattr: + for base_cls in self._cls.__bases__: + if base_cls.__dict__.get("__attrs_own_setattr__", False): + cd["__setattr__"] = _obj_setattr + break + + # Traverse the MRO to collect existing slots + # and check for an existing __weakref__. + existing_slots = dict() + weakref_inherited = False + for base_cls in self._cls.__mro__[1:-1]: + if base_cls.__dict__.get("__weakref__", None) is not None: + weakref_inherited = True + existing_slots.update( + { + name: getattr(base_cls, name) + for name in getattr(base_cls, "__slots__", []) + } + ) + + base_names = set(self._base_names) + + names = self._attr_names + if ( + self._weakref_slot + and "__weakref__" not in getattr(self._cls, "__slots__", ()) + and "__weakref__" not in names + and not weakref_inherited + ): + names += ("__weakref__",) + + # We only add the names of attributes that aren't inherited. + # Setting __slots__ to inherited attributes wastes memory. + slot_names = [name for name in names if name not in base_names] + # There are slots for attributes from current class + # that are defined in parent classes. + # As their descriptors may be overridden by a child class, + # we collect them here and update the class dict + reused_slots = { + slot: slot_descriptor + for slot, slot_descriptor in existing_slots.items() + if slot in slot_names + } + slot_names = [name for name in slot_names if name not in reused_slots] + cd.update(reused_slots) + if self._cache_hash: + slot_names.append(_hash_cache_field) + cd["__slots__"] = tuple(slot_names) + + cd["__qualname__"] = self._cls.__qualname__ + + # Create new class based on old class and our methods. + cls = type(self._cls)(self._cls.__name__, self._cls.__bases__, cd) + + # The following is a fix for + # . + # If a method mentions `__class__` or uses the no-arg super(), the + # compiler will bake a reference to the class in the method itself + # as `method.__closure__`. Since we replace the class with a + # clone, we rewrite these references so it keeps working. + for item in cls.__dict__.values(): + if isinstance(item, (classmethod, staticmethod)): + # Class- and staticmethods hide their functions inside. + # These might need to be rewritten as well. + closure_cells = getattr(item.__func__, "__closure__", None) + elif isinstance(item, property): + # Workaround for property `super()` shortcut (PY3-only). + # There is no universal way for other descriptors. + closure_cells = getattr(item.fget, "__closure__", None) + else: + closure_cells = getattr(item, "__closure__", None) + + if not closure_cells: # Catch None or the empty list. + continue + for cell in closure_cells: + try: + match = cell.cell_contents is self._cls + except ValueError: # ValueError: Cell is empty + pass + else: + if match: + set_closure_cell(cell, cls) + + return cls + + def add_repr(self, ns): + self._cls_dict["__repr__"] = self._add_method_dunders( + _make_repr(self._attrs, ns, self._cls) + ) + return self + + def add_str(self): + repr = self._cls_dict.get("__repr__") + if repr is None: + raise ValueError( + "__str__ can only be generated if a __repr__ exists." + ) + + def __str__(self): + return self.__repr__() + + self._cls_dict["__str__"] = self._add_method_dunders(__str__) + return self + + def _make_getstate_setstate(self): + """ + Create custom __setstate__ and __getstate__ methods. + """ + # __weakref__ is not writable. + state_attr_names = tuple( + an for an in self._attr_names if an != "__weakref__" + ) + + def slots_getstate(self): + """ + Automatically created by attrs. + """ + return {name: getattr(self, name) for name in state_attr_names} + + hash_caching_enabled = self._cache_hash + + def slots_setstate(self, state): + """ + Automatically created by attrs. + """ + __bound_setattr = _obj_setattr.__get__(self) + for name in state_attr_names: + if name in state: + __bound_setattr(name, state[name]) + + # The hash code cache is not included when the object is + # serialized, but it still needs to be initialized to None to + # indicate that the first call to __hash__ should be a cache + # miss. + if hash_caching_enabled: + __bound_setattr(_hash_cache_field, None) + + return slots_getstate, slots_setstate + + def make_unhashable(self): + self._cls_dict["__hash__"] = None + return self + + def add_hash(self): + self._cls_dict["__hash__"] = self._add_method_dunders( + _make_hash( + self._cls, + self._attrs, + frozen=self._frozen, + cache_hash=self._cache_hash, + ) + ) + + return self + + def add_init(self): + self._cls_dict["__init__"] = self._add_method_dunders( + _make_init( + self._cls, + self._attrs, + self._has_pre_init, + self._has_post_init, + self._frozen, + self._slots, + self._cache_hash, + self._base_attr_map, + self._is_exc, + self._on_setattr, + attrs_init=False, + ) + ) + + return self + + def add_match_args(self): + self._cls_dict["__match_args__"] = tuple( + field.name + for field in self._attrs + if field.init and not field.kw_only + ) + + def add_attrs_init(self): + self._cls_dict["__attrs_init__"] = self._add_method_dunders( + _make_init( + self._cls, + self._attrs, + self._has_pre_init, + self._has_post_init, + self._frozen, + self._slots, + self._cache_hash, + self._base_attr_map, + self._is_exc, + self._on_setattr, + attrs_init=True, + ) + ) + + return self + + def add_eq(self): + cd = self._cls_dict + + cd["__eq__"] = self._add_method_dunders( + _make_eq(self._cls, self._attrs) + ) + cd["__ne__"] = self._add_method_dunders(_make_ne()) + + return self + + def add_order(self): + cd = self._cls_dict + + cd["__lt__"], cd["__le__"], cd["__gt__"], cd["__ge__"] = ( + self._add_method_dunders(meth) + for meth in _make_order(self._cls, self._attrs) + ) + + return self + + def add_setattr(self): + if self._frozen: + return self + + sa_attrs = {} + for a in self._attrs: + on_setattr = a.on_setattr or self._on_setattr + if on_setattr and on_setattr is not setters.NO_OP: + sa_attrs[a.name] = a, on_setattr + + if not sa_attrs: + return self + + if self._has_custom_setattr: + # We need to write a __setattr__ but there already is one! + raise ValueError( + "Can't combine custom __setattr__ with on_setattr hooks." + ) + + # docstring comes from _add_method_dunders + def __setattr__(self, name, val): + try: + a, hook = sa_attrs[name] + except KeyError: + nval = val + else: + nval = hook(self, a, val) + + _obj_setattr(self, name, nval) + + self._cls_dict["__attrs_own_setattr__"] = True + self._cls_dict["__setattr__"] = self._add_method_dunders(__setattr__) + self._wrote_own_setattr = True + + return self + + def _add_method_dunders(self, method): + """ + Add __module__ and __qualname__ to a *method* if possible. + """ + try: + method.__module__ = self._cls.__module__ + except AttributeError: + pass + + try: + method.__qualname__ = ".".join( + (self._cls.__qualname__, method.__name__) + ) + except AttributeError: + pass + + try: + method.__doc__ = ( + "Method generated by attrs for class " + f"{self._cls.__qualname__}." + ) + except AttributeError: + pass + + return method + + +def _determine_attrs_eq_order(cmp, eq, order, default_eq): + """ + Validate the combination of *cmp*, *eq*, and *order*. Derive the effective + values of eq and order. If *eq* is None, set it to *default_eq*. + """ + if cmp is not None and any((eq is not None, order is not None)): + raise ValueError("Don't mix `cmp` with `eq' and `order`.") + + # cmp takes precedence due to bw-compatibility. + if cmp is not None: + return cmp, cmp + + # If left None, equality is set to the specified default and ordering + # mirrors equality. + if eq is None: + eq = default_eq + + if order is None: + order = eq + + if eq is False and order is True: + raise ValueError("`order` can only be True if `eq` is True too.") + + return eq, order + + +def _determine_attrib_eq_order(cmp, eq, order, default_eq): + """ + Validate the combination of *cmp*, *eq*, and *order*. Derive the effective + values of eq and order. If *eq* is None, set it to *default_eq*. + """ + if cmp is not None and any((eq is not None, order is not None)): + raise ValueError("Don't mix `cmp` with `eq' and `order`.") + + def decide_callable_or_boolean(value): + """ + Decide whether a key function is used. + """ + if callable(value): + value, key = True, value + else: + key = None + return value, key + + # cmp takes precedence due to bw-compatibility. + if cmp is not None: + cmp, cmp_key = decide_callable_or_boolean(cmp) + return cmp, cmp_key, cmp, cmp_key + + # If left None, equality is set to the specified default and ordering + # mirrors equality. + if eq is None: + eq, eq_key = default_eq, None + else: + eq, eq_key = decide_callable_or_boolean(eq) + + if order is None: + order, order_key = eq, eq_key + else: + order, order_key = decide_callable_or_boolean(order) + + if eq is False and order is True: + raise ValueError("`order` can only be True if `eq` is True too.") + + return eq, eq_key, order, order_key + + +def _determine_whether_to_implement( + cls, flag, auto_detect, dunders, default=True +): + """ + Check whether we should implement a set of methods for *cls*. + + *flag* is the argument passed into @attr.s like 'init', *auto_detect* the + same as passed into @attr.s and *dunders* is a tuple of attribute names + whose presence signal that the user has implemented it themselves. + + Return *default* if no reason for either for or against is found. + """ + if flag is True or flag is False: + return flag + + if flag is None and auto_detect is False: + return default + + # Logically, flag is None and auto_detect is True here. + for dunder in dunders: + if _has_own_attribute(cls, dunder): + return False + + return default + + +def attrs( + maybe_cls=None, + these=None, + repr_ns=None, + repr=None, + cmp=None, + hash=None, + init=None, + slots=False, + frozen=False, + weakref_slot=True, + str=False, + auto_attribs=False, + kw_only=False, + cache_hash=False, + auto_exc=False, + eq=None, + order=None, + auto_detect=False, + collect_by_mro=False, + getstate_setstate=None, + on_setattr=None, + field_transformer=None, + match_args=True, + unsafe_hash=None, +): + r""" + A class decorator that adds :term:`dunder methods` according to the + specified attributes using `attr.ib` or the *these* argument. + + :param these: A dictionary of name to `attr.ib` mappings. This is + useful to avoid the definition of your attributes within the class body + because you can't (e.g. if you want to add ``__repr__`` methods to + Django models) or don't want to. + + If *these* is not ``None``, ``attrs`` will *not* search the class body + for attributes and will *not* remove any attributes from it. + + The order is deduced from the order of the attributes inside *these*. + + :type these: `dict` of `str` to `attr.ib` + + :param str repr_ns: When using nested classes, there's no way in Python 2 + to automatically detect that. Therefore it's possible to set the + namespace explicitly for a more meaningful ``repr`` output. + :param bool auto_detect: Instead of setting the *init*, *repr*, *eq*, + *order*, and *hash* arguments explicitly, assume they are set to + ``True`` **unless any** of the involved methods for one of the + arguments is implemented in the *current* class (i.e. it is *not* + inherited from some base class). + + So for example by implementing ``__eq__`` on a class yourself, + ``attrs`` will deduce ``eq=False`` and will create *neither* + ``__eq__`` *nor* ``__ne__`` (but Python classes come with a sensible + ``__ne__`` by default, so it *should* be enough to only implement + ``__eq__`` in most cases). + + .. warning:: + + If you prevent ``attrs`` from creating the ordering methods for you + (``order=False``, e.g. by implementing ``__le__``), it becomes + *your* responsibility to make sure its ordering is sound. The best + way is to use the `functools.total_ordering` decorator. + + + Passing ``True`` or ``False`` to *init*, *repr*, *eq*, *order*, + *cmp*, or *hash* overrides whatever *auto_detect* would determine. + + :param bool repr: Create a ``__repr__`` method with a human readable + representation of ``attrs`` attributes.. + :param bool str: Create a ``__str__`` method that is identical to + ``__repr__``. This is usually not necessary except for + `Exception`\ s. + :param Optional[bool] eq: If ``True`` or ``None`` (default), add ``__eq__`` + and ``__ne__`` methods that check two instances for equality. + + They compare the instances as if they were tuples of their ``attrs`` + attributes if and only if the types of both classes are *identical*! + :param Optional[bool] order: If ``True``, add ``__lt__``, ``__le__``, + ``__gt__``, and ``__ge__`` methods that behave like *eq* above and + allow instances to be ordered. If ``None`` (default) mirror value of + *eq*. + :param Optional[bool] cmp: Setting *cmp* is equivalent to setting *eq* + and *order* to the same value. Must not be mixed with *eq* or *order*. + :param Optional[bool] unsafe_hash: If ``None`` (default), the ``__hash__`` + method is generated according how *eq* and *frozen* are set. + + 1. If *both* are True, ``attrs`` will generate a ``__hash__`` for you. + 2. If *eq* is True and *frozen* is False, ``__hash__`` will be set to + None, marking it unhashable (which it is). + 3. If *eq* is False, ``__hash__`` will be left untouched meaning the + ``__hash__`` method of the base class will be used (if base class is + ``object``, this means it will fall back to id-based hashing.). + + Although not recommended, you can decide for yourself and force + ``attrs`` to create one (e.g. if the class is immutable even though you + didn't freeze it programmatically) by passing ``True`` or not. Both of + these cases are rather special and should be used carefully. + + See our documentation on `hashing`, Python's documentation on + `object.__hash__`, and the `GitHub issue that led to the default \ + behavior `_ for more + details. + :param Optional[bool] hash: Alias for *unsafe_hash*. *unsafe_hash* takes + precedence. + :param bool init: Create a ``__init__`` method that initializes the + ``attrs`` attributes. Leading underscores are stripped for the argument + name. If a ``__attrs_pre_init__`` method exists on the class, it will + be called before the class is initialized. If a ``__attrs_post_init__`` + method exists on the class, it will be called after the class is fully + initialized. + + If ``init`` is ``False``, an ``__attrs_init__`` method will be + injected instead. This allows you to define a custom ``__init__`` + method that can do pre-init work such as ``super().__init__()``, + and then call ``__attrs_init__()`` and ``__attrs_post_init__()``. + :param bool slots: Create a :term:`slotted class ` that's + more memory-efficient. Slotted classes are generally superior to the + default dict classes, but have some gotchas you should know about, so + we encourage you to read the :term:`glossary entry `. + :param bool frozen: Make instances immutable after initialization. If + someone attempts to modify a frozen instance, + `attr.exceptions.FrozenInstanceError` is raised. + + .. note:: + + 1. This is achieved by installing a custom ``__setattr__`` method + on your class, so you can't implement your own. + + 2. True immutability is impossible in Python. + + 3. This *does* have a minor a runtime performance `impact + ` when initializing new instances. In other words: + ``__init__`` is slightly slower with ``frozen=True``. + + 4. If a class is frozen, you cannot modify ``self`` in + ``__attrs_post_init__`` or a self-written ``__init__``. You can + circumvent that limitation by using + ``object.__setattr__(self, "attribute_name", value)``. + + 5. Subclasses of a frozen class are frozen too. + + :param bool weakref_slot: Make instances weak-referenceable. This has no + effect unless ``slots`` is also enabled. + :param bool auto_attribs: If ``True``, collect :pep:`526`-annotated + attributes from the class body. + + In this case, you **must** annotate every field. If ``attrs`` + encounters a field that is set to an `attr.ib` but lacks a type + annotation, an `attr.exceptions.UnannotatedAttributeError` is + raised. Use ``field_name: typing.Any = attr.ib(...)`` if you don't + want to set a type. + + If you assign a value to those attributes (e.g. ``x: int = 42``), that + value becomes the default value like if it were passed using + ``attr.ib(default=42)``. Passing an instance of `attrs.Factory` also + works as expected in most cases (see warning below). + + Attributes annotated as `typing.ClassVar`, and attributes that are + neither annotated nor set to an `attr.ib` are **ignored**. + + .. warning:: + For features that use the attribute name to create decorators (e.g. + `validators `), you still *must* assign `attr.ib` to + them. Otherwise Python will either not find the name or try to use + the default value to call e.g. ``validator`` on it. + + These errors can be quite confusing and probably the most common bug + report on our bug tracker. + + :param bool kw_only: Make all attributes keyword-only + in the generated ``__init__`` (if ``init`` is ``False``, this + parameter is ignored). + :param bool cache_hash: Ensure that the object's hash code is computed + only once and stored on the object. If this is set to ``True``, + hashing must be either explicitly or implicitly enabled for this + class. If the hash code is cached, avoid any reassignments of + fields involved in hash code computation or mutations of the objects + those fields point to after object creation. If such changes occur, + the behavior of the object's hash code is undefined. + :param bool auto_exc: If the class subclasses `BaseException` + (which implicitly includes any subclass of any exception), the + following happens to behave like a well-behaved Python exceptions + class: + + - the values for *eq*, *order*, and *hash* are ignored and the + instances compare and hash by the instance's ids (N.B. ``attrs`` will + *not* remove existing implementations of ``__hash__`` or the equality + methods. It just won't add own ones.), + - all attributes that are either passed into ``__init__`` or have a + default value are additionally available as a tuple in the ``args`` + attribute, + - the value of *str* is ignored leaving ``__str__`` to base classes. + :param bool collect_by_mro: Setting this to `True` fixes the way ``attrs`` + collects attributes from base classes. The default behavior is + incorrect in certain cases of multiple inheritance. It should be on by + default but is kept off for backward-compatibility. + + See issue `#428 `_ for + more details. + + :param Optional[bool] getstate_setstate: + .. note:: + This is usually only interesting for slotted classes and you should + probably just set *auto_detect* to `True`. + + If `True`, ``__getstate__`` and + ``__setstate__`` are generated and attached to the class. This is + necessary for slotted classes to be pickleable. If left `None`, it's + `True` by default for slotted classes and ``False`` for dict classes. + + If *auto_detect* is `True`, and *getstate_setstate* is left `None`, + and **either** ``__getstate__`` or ``__setstate__`` is detected directly + on the class (i.e. not inherited), it is set to `False` (this is usually + what you want). + + :param on_setattr: A callable that is run whenever the user attempts to set + an attribute (either by assignment like ``i.x = 42`` or by using + `setattr` like ``setattr(i, "x", 42)``). It receives the same arguments + as validators: the instance, the attribute that is being modified, and + the new value. + + If no exception is raised, the attribute is set to the return value of + the callable. + + If a list of callables is passed, they're automatically wrapped in an + `attrs.setters.pipe`. + :type on_setattr: `callable`, or a list of callables, or `None`, or + `attrs.setters.NO_OP` + + :param Optional[callable] field_transformer: + A function that is called with the original class object and all + fields right before ``attrs`` finalizes the class. You can use + this, e.g., to automatically add converters or validators to + fields based on their types. See `transform-fields` for more details. + + :param bool match_args: + If `True` (default), set ``__match_args__`` on the class to support + :pep:`634` (Structural Pattern Matching). It is a tuple of all + non-keyword-only ``__init__`` parameter names on Python 3.10 and later. + Ignored on older Python versions. + + .. versionadded:: 16.0.0 *slots* + .. versionadded:: 16.1.0 *frozen* + .. versionadded:: 16.3.0 *str* + .. versionadded:: 16.3.0 Support for ``__attrs_post_init__``. + .. versionchanged:: 17.1.0 + *hash* supports ``None`` as value which is also the default now. + .. versionadded:: 17.3.0 *auto_attribs* + .. versionchanged:: 18.1.0 + If *these* is passed, no attributes are deleted from the class body. + .. versionchanged:: 18.1.0 If *these* is ordered, the order is retained. + .. versionadded:: 18.2.0 *weakref_slot* + .. deprecated:: 18.2.0 + ``__lt__``, ``__le__``, ``__gt__``, and ``__ge__`` now raise a + `DeprecationWarning` if the classes compared are subclasses of + each other. ``__eq`` and ``__ne__`` never tried to compared subclasses + to each other. + .. versionchanged:: 19.2.0 + ``__lt__``, ``__le__``, ``__gt__``, and ``__ge__`` now do not consider + subclasses comparable anymore. + .. versionadded:: 18.2.0 *kw_only* + .. versionadded:: 18.2.0 *cache_hash* + .. versionadded:: 19.1.0 *auto_exc* + .. deprecated:: 19.2.0 *cmp* Removal on or after 2021-06-01. + .. versionadded:: 19.2.0 *eq* and *order* + .. versionadded:: 20.1.0 *auto_detect* + .. versionadded:: 20.1.0 *collect_by_mro* + .. versionadded:: 20.1.0 *getstate_setstate* + .. versionadded:: 20.1.0 *on_setattr* + .. versionadded:: 20.3.0 *field_transformer* + .. versionchanged:: 21.1.0 + ``init=False`` injects ``__attrs_init__`` + .. versionchanged:: 21.1.0 Support for ``__attrs_pre_init__`` + .. versionchanged:: 21.1.0 *cmp* undeprecated + .. versionadded:: 21.3.0 *match_args* + .. versionadded:: 22.2.0 + *unsafe_hash* as an alias for *hash* (for :pep:`681` compliance). + """ + eq_, order_ = _determine_attrs_eq_order(cmp, eq, order, None) + + # unsafe_hash takes precedence due to PEP 681. + if unsafe_hash is not None: + hash = unsafe_hash + + if isinstance(on_setattr, (list, tuple)): + on_setattr = setters.pipe(*on_setattr) + + def wrap(cls): + is_frozen = frozen or _has_frozen_base_class(cls) + is_exc = auto_exc is True and issubclass(cls, BaseException) + has_own_setattr = auto_detect and _has_own_attribute( + cls, "__setattr__" + ) + + if has_own_setattr and is_frozen: + raise ValueError("Can't freeze a class with a custom __setattr__.") + + builder = _ClassBuilder( + cls, + these, + slots, + is_frozen, + weakref_slot, + _determine_whether_to_implement( + cls, + getstate_setstate, + auto_detect, + ("__getstate__", "__setstate__"), + default=slots, + ), + auto_attribs, + kw_only, + cache_hash, + is_exc, + collect_by_mro, + on_setattr, + has_own_setattr, + field_transformer, + ) + if _determine_whether_to_implement( + cls, repr, auto_detect, ("__repr__",) + ): + builder.add_repr(repr_ns) + if str is True: + builder.add_str() + + eq = _determine_whether_to_implement( + cls, eq_, auto_detect, ("__eq__", "__ne__") + ) + if not is_exc and eq is True: + builder.add_eq() + if not is_exc and _determine_whether_to_implement( + cls, order_, auto_detect, ("__lt__", "__le__", "__gt__", "__ge__") + ): + builder.add_order() + + builder.add_setattr() + + nonlocal hash + if ( + hash is None + and auto_detect is True + and _has_own_attribute(cls, "__hash__") + ): + hash = False + + if hash is not True and hash is not False and hash is not None: + # Can't use `hash in` because 1 == True for example. + raise TypeError( + "Invalid value for hash. Must be True, False, or None." + ) + elif hash is False or (hash is None and eq is False) or is_exc: + # Don't do anything. Should fall back to __object__'s __hash__ + # which is by id. + if cache_hash: + raise TypeError( + "Invalid value for cache_hash. To use hash caching," + " hashing must be either explicitly or implicitly " + "enabled." + ) + elif hash is True or ( + hash is None and eq is True and is_frozen is True + ): + # Build a __hash__ if told so, or if it's safe. + builder.add_hash() + else: + # Raise TypeError on attempts to hash. + if cache_hash: + raise TypeError( + "Invalid value for cache_hash. To use hash caching," + " hashing must be either explicitly or implicitly " + "enabled." + ) + builder.make_unhashable() + + if _determine_whether_to_implement( + cls, init, auto_detect, ("__init__",) + ): + builder.add_init() + else: + builder.add_attrs_init() + if cache_hash: + raise TypeError( + "Invalid value for cache_hash. To use hash caching," + " init must be True." + ) + + if ( + PY310 + and match_args + and not _has_own_attribute(cls, "__match_args__") + ): + builder.add_match_args() + + return builder.build_class() + + # maybe_cls's type depends on the usage of the decorator. It's a class + # if it's used as `@attrs` but ``None`` if used as `@attrs()`. + if maybe_cls is None: + return wrap + else: + return wrap(maybe_cls) + + +_attrs = attrs +""" +Internal alias so we can use it in functions that take an argument called +*attrs*. +""" + + +def _has_frozen_base_class(cls): + """ + Check whether *cls* has a frozen ancestor by looking at its + __setattr__. + """ + return cls.__setattr__ is _frozen_setattrs + + +def _generate_unique_filename(cls, func_name): + """ + Create a "filename" suitable for a function being generated. + """ + return ( + f"" + ) + + +def _make_hash(cls, attrs, frozen, cache_hash): + attrs = tuple( + a for a in attrs if a.hash is True or (a.hash is None and a.eq is True) + ) + + tab = " " + + unique_filename = _generate_unique_filename(cls, "hash") + type_hash = hash(unique_filename) + # If eq is custom generated, we need to include the functions in globs + globs = {} + + hash_def = "def __hash__(self" + hash_func = "hash((" + closing_braces = "))" + if not cache_hash: + hash_def += "):" + else: + hash_def += ", *" + + hash_def += ( + ", _cache_wrapper=" + + "__import__('attr._make')._make._CacheHashWrapper):" + ) + hash_func = "_cache_wrapper(" + hash_func + closing_braces += ")" + + method_lines = [hash_def] + + def append_hash_computation_lines(prefix, indent): + """ + Generate the code for actually computing the hash code. + Below this will either be returned directly or used to compute + a value which is then cached, depending on the value of cache_hash + """ + + method_lines.extend( + [ + indent + prefix + hash_func, + indent + f" {type_hash},", + ] + ) + + for a in attrs: + if a.eq_key: + cmp_name = f"_{a.name}_key" + globs[cmp_name] = a.eq_key + method_lines.append( + indent + f" {cmp_name}(self.{a.name})," + ) + else: + method_lines.append(indent + f" self.{a.name},") + + method_lines.append(indent + " " + closing_braces) + + if cache_hash: + method_lines.append(tab + f"if self.{_hash_cache_field} is None:") + if frozen: + append_hash_computation_lines( + f"object.__setattr__(self, '{_hash_cache_field}', ", tab * 2 + ) + method_lines.append(tab * 2 + ")") # close __setattr__ + else: + append_hash_computation_lines( + f"self.{_hash_cache_field} = ", tab * 2 + ) + method_lines.append(tab + f"return self.{_hash_cache_field}") + else: + append_hash_computation_lines("return ", tab) + + script = "\n".join(method_lines) + return _make_method("__hash__", script, unique_filename, globs) + + +def _add_hash(cls, attrs): + """ + Add a hash method to *cls*. + """ + cls.__hash__ = _make_hash(cls, attrs, frozen=False, cache_hash=False) + return cls + + +def _make_ne(): + """ + Create __ne__ method. + """ + + def __ne__(self, other): + """ + Check equality and either forward a NotImplemented or + return the result negated. + """ + result = self.__eq__(other) + if result is NotImplemented: + return NotImplemented + + return not result + + return __ne__ + + +def _make_eq(cls, attrs): + """ + Create __eq__ method for *cls* with *attrs*. + """ + attrs = [a for a in attrs if a.eq] + + unique_filename = _generate_unique_filename(cls, "eq") + lines = [ + "def __eq__(self, other):", + " if other.__class__ is not self.__class__:", + " return NotImplemented", + ] + + # We can't just do a big self.x = other.x and... clause due to + # irregularities like nan == nan is false but (nan,) == (nan,) is true. + globs = {} + if attrs: + lines.append(" return (") + others = [" ) == ("] + for a in attrs: + if a.eq_key: + cmp_name = f"_{a.name}_key" + # Add the key function to the global namespace + # of the evaluated function. + globs[cmp_name] = a.eq_key + lines.append(f" {cmp_name}(self.{a.name}),") + others.append(f" {cmp_name}(other.{a.name}),") + else: + lines.append(f" self.{a.name},") + others.append(f" other.{a.name},") + + lines += others + [" )"] + else: + lines.append(" return True") + + script = "\n".join(lines) + + return _make_method("__eq__", script, unique_filename, globs) + + +def _make_order(cls, attrs): + """ + Create ordering methods for *cls* with *attrs*. + """ + attrs = [a for a in attrs if a.order] + + def attrs_to_tuple(obj): + """ + Save us some typing. + """ + return tuple( + key(value) if key else value + for value, key in ( + (getattr(obj, a.name), a.order_key) for a in attrs + ) + ) + + def __lt__(self, other): + """ + Automatically created by attrs. + """ + if other.__class__ is self.__class__: + return attrs_to_tuple(self) < attrs_to_tuple(other) + + return NotImplemented + + def __le__(self, other): + """ + Automatically created by attrs. + """ + if other.__class__ is self.__class__: + return attrs_to_tuple(self) <= attrs_to_tuple(other) + + return NotImplemented + + def __gt__(self, other): + """ + Automatically created by attrs. + """ + if other.__class__ is self.__class__: + return attrs_to_tuple(self) > attrs_to_tuple(other) + + return NotImplemented + + def __ge__(self, other): + """ + Automatically created by attrs. + """ + if other.__class__ is self.__class__: + return attrs_to_tuple(self) >= attrs_to_tuple(other) + + return NotImplemented + + return __lt__, __le__, __gt__, __ge__ + + +def _add_eq(cls, attrs=None): + """ + Add equality methods to *cls* with *attrs*. + """ + if attrs is None: + attrs = cls.__attrs_attrs__ + + cls.__eq__ = _make_eq(cls, attrs) + cls.__ne__ = _make_ne() + + return cls + + +def _make_repr(attrs, ns, cls): + unique_filename = _generate_unique_filename(cls, "repr") + # Figure out which attributes to include, and which function to use to + # format them. The a.repr value can be either bool or a custom + # callable. + attr_names_with_reprs = tuple( + (a.name, (repr if a.repr is True else a.repr), a.init) + for a in attrs + if a.repr is not False + ) + globs = { + name + "_repr": r for name, r, _ in attr_names_with_reprs if r != repr + } + globs["_compat"] = _compat + globs["AttributeError"] = AttributeError + globs["NOTHING"] = NOTHING + attribute_fragments = [] + for name, r, i in attr_names_with_reprs: + accessor = ( + "self." + name if i else 'getattr(self, "' + name + '", NOTHING)' + ) + fragment = ( + "%s={%s!r}" % (name, accessor) + if r == repr + else "%s={%s_repr(%s)}" % (name, name, accessor) + ) + attribute_fragments.append(fragment) + repr_fragment = ", ".join(attribute_fragments) + + if ns is None: + cls_name_fragment = '{self.__class__.__qualname__.rsplit(">.", 1)[-1]}' + else: + cls_name_fragment = ns + ".{self.__class__.__name__}" + + lines = [ + "def __repr__(self):", + " try:", + " already_repring = _compat.repr_context.already_repring", + " except AttributeError:", + " already_repring = {id(self),}", + " _compat.repr_context.already_repring = already_repring", + " else:", + " if id(self) in already_repring:", + " return '...'", + " else:", + " already_repring.add(id(self))", + " try:", + f" return f'{cls_name_fragment}({repr_fragment})'", + " finally:", + " already_repring.remove(id(self))", + ] + + return _make_method( + "__repr__", "\n".join(lines), unique_filename, globs=globs + ) + + +def _add_repr(cls, ns=None, attrs=None): + """ + Add a repr method to *cls*. + """ + if attrs is None: + attrs = cls.__attrs_attrs__ + + cls.__repr__ = _make_repr(attrs, ns, cls) + return cls + + +def fields(cls): + """ + Return the tuple of ``attrs`` attributes for a class. + + The tuple also allows accessing the fields by their names (see below for + examples). + + :param type cls: Class to introspect. + + :raise TypeError: If *cls* is not a class. + :raise attr.exceptions.NotAnAttrsClassError: If *cls* is not an ``attrs`` + class. + + :rtype: tuple (with name accessors) of `attrs.Attribute` + + .. versionchanged:: 16.2.0 Returned tuple allows accessing the fields + by name. + """ + if not isinstance(cls, type): + raise TypeError("Passed object must be a class.") + attrs = getattr(cls, "__attrs_attrs__", None) + if attrs is None: + raise NotAnAttrsClassError(f"{cls!r} is not an attrs-decorated class.") + return attrs + + +def fields_dict(cls): + """ + Return an ordered dictionary of ``attrs`` attributes for a class, whose + keys are the attribute names. + + :param type cls: Class to introspect. + + :raise TypeError: If *cls* is not a class. + :raise attr.exceptions.NotAnAttrsClassError: If *cls* is not an ``attrs`` + class. + + :rtype: dict + + .. versionadded:: 18.1.0 + """ + if not isinstance(cls, type): + raise TypeError("Passed object must be a class.") + attrs = getattr(cls, "__attrs_attrs__", None) + if attrs is None: + raise NotAnAttrsClassError(f"{cls!r} is not an attrs-decorated class.") + return {a.name: a for a in attrs} + + +def validate(inst): + """ + Validate all attributes on *inst* that have a validator. + + Leaves all exceptions through. + + :param inst: Instance of a class with ``attrs`` attributes. + """ + if _config._run_validators is False: + return + + for a in fields(inst.__class__): + v = a.validator + if v is not None: + v(inst, a, getattr(inst, a.name)) + + +def _is_slot_cls(cls): + return "__slots__" in cls.__dict__ + + +def _is_slot_attr(a_name, base_attr_map): + """ + Check if the attribute name comes from a slot class. + """ + return a_name in base_attr_map and _is_slot_cls(base_attr_map[a_name]) + + +def _make_init( + cls, + attrs, + pre_init, + post_init, + frozen, + slots, + cache_hash, + base_attr_map, + is_exc, + cls_on_setattr, + attrs_init, +): + has_cls_on_setattr = ( + cls_on_setattr is not None and cls_on_setattr is not setters.NO_OP + ) + + if frozen and has_cls_on_setattr: + raise ValueError("Frozen classes can't use on_setattr.") + + needs_cached_setattr = cache_hash or frozen + filtered_attrs = [] + attr_dict = {} + for a in attrs: + if not a.init and a.default is NOTHING: + continue + + filtered_attrs.append(a) + attr_dict[a.name] = a + + if a.on_setattr is not None: + if frozen is True: + raise ValueError("Frozen classes can't use on_setattr.") + + needs_cached_setattr = True + elif has_cls_on_setattr and a.on_setattr is not setters.NO_OP: + needs_cached_setattr = True + + unique_filename = _generate_unique_filename(cls, "init") + + script, globs, annotations = _attrs_to_init_script( + filtered_attrs, + frozen, + slots, + pre_init, + post_init, + cache_hash, + base_attr_map, + is_exc, + needs_cached_setattr, + has_cls_on_setattr, + attrs_init, + ) + if cls.__module__ in sys.modules: + # This makes typing.get_type_hints(CLS.__init__) resolve string types. + globs.update(sys.modules[cls.__module__].__dict__) + + globs.update({"NOTHING": NOTHING, "attr_dict": attr_dict}) + + if needs_cached_setattr: + # Save the lookup overhead in __init__ if we need to circumvent + # setattr hooks. + globs["_cached_setattr_get"] = _obj_setattr.__get__ + + init = _make_method( + "__attrs_init__" if attrs_init else "__init__", + script, + unique_filename, + globs, + ) + init.__annotations__ = annotations + + return init + + +def _setattr(attr_name, value_var, has_on_setattr): + """ + Use the cached object.setattr to set *attr_name* to *value_var*. + """ + return f"_setattr('{attr_name}', {value_var})" + + +def _setattr_with_converter(attr_name, value_var, has_on_setattr): + """ + Use the cached object.setattr to set *attr_name* to *value_var*, but run + its converter first. + """ + return "_setattr('%s', %s(%s))" % ( + attr_name, + _init_converter_pat % (attr_name,), + value_var, + ) + + +def _assign(attr_name, value, has_on_setattr): + """ + Unless *attr_name* has an on_setattr hook, use normal assignment. Otherwise + relegate to _setattr. + """ + if has_on_setattr: + return _setattr(attr_name, value, True) + + return f"self.{attr_name} = {value}" + + +def _assign_with_converter(attr_name, value_var, has_on_setattr): + """ + Unless *attr_name* has an on_setattr hook, use normal assignment after + conversion. Otherwise relegate to _setattr_with_converter. + """ + if has_on_setattr: + return _setattr_with_converter(attr_name, value_var, True) + + return "self.%s = %s(%s)" % ( + attr_name, + _init_converter_pat % (attr_name,), + value_var, + ) + + +def _attrs_to_init_script( + attrs, + frozen, + slots, + pre_init, + post_init, + cache_hash, + base_attr_map, + is_exc, + needs_cached_setattr, + has_cls_on_setattr, + attrs_init, +): + """ + Return a script of an initializer for *attrs* and a dict of globals. + + The globals are expected by the generated script. + + If *frozen* is True, we cannot set the attributes directly so we use + a cached ``object.__setattr__``. + """ + lines = [] + if pre_init: + lines.append("self.__attrs_pre_init__()") + + if needs_cached_setattr: + lines.append( + # Circumvent the __setattr__ descriptor to save one lookup per + # assignment. + # Note _setattr will be used again below if cache_hash is True + "_setattr = _cached_setattr_get(self)" + ) + + if frozen is True: + if slots is True: + fmt_setter = _setattr + fmt_setter_with_converter = _setattr_with_converter + else: + # Dict frozen classes assign directly to __dict__. + # But only if the attribute doesn't come from an ancestor slot + # class. + # Note _inst_dict will be used again below if cache_hash is True + lines.append("_inst_dict = self.__dict__") + + def fmt_setter(attr_name, value_var, has_on_setattr): + if _is_slot_attr(attr_name, base_attr_map): + return _setattr(attr_name, value_var, has_on_setattr) + + return f"_inst_dict['{attr_name}'] = {value_var}" + + def fmt_setter_with_converter( + attr_name, value_var, has_on_setattr + ): + if has_on_setattr or _is_slot_attr(attr_name, base_attr_map): + return _setattr_with_converter( + attr_name, value_var, has_on_setattr + ) + + return "_inst_dict['%s'] = %s(%s)" % ( + attr_name, + _init_converter_pat % (attr_name,), + value_var, + ) + + else: + # Not frozen. + fmt_setter = _assign + fmt_setter_with_converter = _assign_with_converter + + args = [] + kw_only_args = [] + attrs_to_validate = [] + + # This is a dictionary of names to validator and converter callables. + # Injecting this into __init__ globals lets us avoid lookups. + names_for_globals = {} + annotations = {"return": None} + + for a in attrs: + if a.validator: + attrs_to_validate.append(a) + + attr_name = a.name + has_on_setattr = a.on_setattr is not None or ( + a.on_setattr is not setters.NO_OP and has_cls_on_setattr + ) + # a.alias is set to maybe-mangled attr_name in _ClassBuilder if not + # explicitly provided + arg_name = a.alias + + has_factory = isinstance(a.default, Factory) + if has_factory and a.default.takes_self: + maybe_self = "self" + else: + maybe_self = "" + + if a.init is False: + if has_factory: + init_factory_name = _init_factory_pat % (a.name,) + if a.converter is not None: + lines.append( + fmt_setter_with_converter( + attr_name, + init_factory_name + f"({maybe_self})", + has_on_setattr, + ) + ) + conv_name = _init_converter_pat % (a.name,) + names_for_globals[conv_name] = a.converter + else: + lines.append( + fmt_setter( + attr_name, + init_factory_name + f"({maybe_self})", + has_on_setattr, + ) + ) + names_for_globals[init_factory_name] = a.default.factory + else: + if a.converter is not None: + lines.append( + fmt_setter_with_converter( + attr_name, + f"attr_dict['{attr_name}'].default", + has_on_setattr, + ) + ) + conv_name = _init_converter_pat % (a.name,) + names_for_globals[conv_name] = a.converter + else: + lines.append( + fmt_setter( + attr_name, + f"attr_dict['{attr_name}'].default", + has_on_setattr, + ) + ) + elif a.default is not NOTHING and not has_factory: + arg = f"{arg_name}=attr_dict['{attr_name}'].default" + if a.kw_only: + kw_only_args.append(arg) + else: + args.append(arg) + + if a.converter is not None: + lines.append( + fmt_setter_with_converter( + attr_name, arg_name, has_on_setattr + ) + ) + names_for_globals[ + _init_converter_pat % (a.name,) + ] = a.converter + else: + lines.append(fmt_setter(attr_name, arg_name, has_on_setattr)) + + elif has_factory: + arg = f"{arg_name}=NOTHING" + if a.kw_only: + kw_only_args.append(arg) + else: + args.append(arg) + lines.append(f"if {arg_name} is not NOTHING:") + + init_factory_name = _init_factory_pat % (a.name,) + if a.converter is not None: + lines.append( + " " + + fmt_setter_with_converter( + attr_name, arg_name, has_on_setattr + ) + ) + lines.append("else:") + lines.append( + " " + + fmt_setter_with_converter( + attr_name, + init_factory_name + "(" + maybe_self + ")", + has_on_setattr, + ) + ) + names_for_globals[ + _init_converter_pat % (a.name,) + ] = a.converter + else: + lines.append( + " " + fmt_setter(attr_name, arg_name, has_on_setattr) + ) + lines.append("else:") + lines.append( + " " + + fmt_setter( + attr_name, + init_factory_name + "(" + maybe_self + ")", + has_on_setattr, + ) + ) + names_for_globals[init_factory_name] = a.default.factory + else: + if a.kw_only: + kw_only_args.append(arg_name) + else: + args.append(arg_name) + + if a.converter is not None: + lines.append( + fmt_setter_with_converter( + attr_name, arg_name, has_on_setattr + ) + ) + names_for_globals[ + _init_converter_pat % (a.name,) + ] = a.converter + else: + lines.append(fmt_setter(attr_name, arg_name, has_on_setattr)) + + if a.init is True: + if a.type is not None and a.converter is None: + annotations[arg_name] = a.type + elif a.converter is not None: + # Try to get the type from the converter. + t = _AnnotationExtractor(a.converter).get_first_param_type() + if t: + annotations[arg_name] = t + + if attrs_to_validate: # we can skip this if there are no validators. + names_for_globals["_config"] = _config + lines.append("if _config._run_validators is True:") + for a in attrs_to_validate: + val_name = "__attr_validator_" + a.name + attr_name = "__attr_" + a.name + lines.append(f" {val_name}(self, {attr_name}, self.{a.name})") + names_for_globals[val_name] = a.validator + names_for_globals[attr_name] = a + + if post_init: + lines.append("self.__attrs_post_init__()") + + # because this is set only after __attrs_post_init__ is called, a crash + # will result if post-init tries to access the hash code. This seemed + # preferable to setting this beforehand, in which case alteration to + # field values during post-init combined with post-init accessing the + # hash code would result in silent bugs. + if cache_hash: + if frozen: + if slots: + # if frozen and slots, then _setattr defined above + init_hash_cache = "_setattr('%s', %s)" + else: + # if frozen and not slots, then _inst_dict defined above + init_hash_cache = "_inst_dict['%s'] = %s" + else: + init_hash_cache = "self.%s = %s" + lines.append(init_hash_cache % (_hash_cache_field, "None")) + + # For exceptions we rely on BaseException.__init__ for proper + # initialization. + if is_exc: + vals = ",".join(f"self.{a.name}" for a in attrs if a.init) + + lines.append(f"BaseException.__init__(self, {vals})") + + args = ", ".join(args) + if kw_only_args: + args += "%s*, %s" % ( + ", " if args else "", # leading comma + ", ".join(kw_only_args), # kw_only args + ) + + return ( + "def %s(self, %s):\n %s\n" + % ( + ("__attrs_init__" if attrs_init else "__init__"), + args, + "\n ".join(lines) if lines else "pass", + ), + names_for_globals, + annotations, + ) + + +def _default_init_alias_for(name: str) -> str: + """ + The default __init__ parameter name for a field. + + This performs private-name adjustment via leading-unscore stripping, + and is the default value of Attribute.alias if not provided. + """ + + return name.lstrip("_") + + +class Attribute: + """ + *Read-only* representation of an attribute. + + The class has *all* arguments of `attr.ib` (except for ``factory`` + which is only syntactic sugar for ``default=Factory(...)`` plus the + following: + + - ``name`` (`str`): The name of the attribute. + - ``alias`` (`str`): The __init__ parameter name of the attribute, after + any explicit overrides and default private-attribute-name handling. + - ``inherited`` (`bool`): Whether or not that attribute has been inherited + from a base class. + - ``eq_key`` and ``order_key`` (`typing.Callable` or `None`): The callables + that are used for comparing and ordering objects by this attribute, + respectively. These are set by passing a callable to `attr.ib`'s ``eq``, + ``order``, or ``cmp`` arguments. See also :ref:`comparison customization + `. + + Instances of this class are frequently used for introspection purposes + like: + + - `fields` returns a tuple of them. + - Validators get them passed as the first argument. + - The :ref:`field transformer ` hook receives a list of + them. + - The ``alias`` property exposes the __init__ parameter name of the field, + with any overrides and default private-attribute handling applied. + + + .. versionadded:: 20.1.0 *inherited* + .. versionadded:: 20.1.0 *on_setattr* + .. versionchanged:: 20.2.0 *inherited* is not taken into account for + equality checks and hashing anymore. + .. versionadded:: 21.1.0 *eq_key* and *order_key* + .. versionadded:: 22.2.0 *alias* + + For the full version history of the fields, see `attr.ib`. + """ + + __slots__ = ( + "name", + "default", + "validator", + "repr", + "eq", + "eq_key", + "order", + "order_key", + "hash", + "init", + "metadata", + "type", + "converter", + "kw_only", + "inherited", + "on_setattr", + "alias", + ) + + def __init__( + self, + name, + default, + validator, + repr, + cmp, # XXX: unused, remove along with other cmp code. + hash, + init, + inherited, + metadata=None, + type=None, + converter=None, + kw_only=False, + eq=None, + eq_key=None, + order=None, + order_key=None, + on_setattr=None, + alias=None, + ): + eq, eq_key, order, order_key = _determine_attrib_eq_order( + cmp, eq_key or eq, order_key or order, True + ) + + # Cache this descriptor here to speed things up later. + bound_setattr = _obj_setattr.__get__(self) + + # Despite the big red warning, people *do* instantiate `Attribute` + # themselves. + bound_setattr("name", name) + bound_setattr("default", default) + bound_setattr("validator", validator) + bound_setattr("repr", repr) + bound_setattr("eq", eq) + bound_setattr("eq_key", eq_key) + bound_setattr("order", order) + bound_setattr("order_key", order_key) + bound_setattr("hash", hash) + bound_setattr("init", init) + bound_setattr("converter", converter) + bound_setattr( + "metadata", + ( + types.MappingProxyType(dict(metadata)) # Shallow copy + if metadata + else _empty_metadata_singleton + ), + ) + bound_setattr("type", type) + bound_setattr("kw_only", kw_only) + bound_setattr("inherited", inherited) + bound_setattr("on_setattr", on_setattr) + bound_setattr("alias", alias) + + def __setattr__(self, name, value): + raise FrozenInstanceError() + + @classmethod + def from_counting_attr(cls, name, ca, type=None): + # type holds the annotated value. deal with conflicts: + if type is None: + type = ca.type + elif ca.type is not None: + raise ValueError( + "Type annotation and type argument cannot both be present" + ) + inst_dict = { + k: getattr(ca, k) + for k in Attribute.__slots__ + if k + not in ( + "name", + "validator", + "default", + "type", + "inherited", + ) # exclude methods and deprecated alias + } + return cls( + name=name, + validator=ca._validator, + default=ca._default, + type=type, + cmp=None, + inherited=False, + **inst_dict, + ) + + # Don't use attr.evolve since fields(Attribute) doesn't work + def evolve(self, **changes): + """ + Copy *self* and apply *changes*. + + This works similarly to `attr.evolve` but that function does not work + with ``Attribute``. + + It is mainly meant to be used for `transform-fields`. + + .. versionadded:: 20.3.0 + """ + new = copy.copy(self) + + new._setattrs(changes.items()) + + return new + + # Don't use _add_pickle since fields(Attribute) doesn't work + def __getstate__(self): + """ + Play nice with pickle. + """ + return tuple( + getattr(self, name) if name != "metadata" else dict(self.metadata) + for name in self.__slots__ + ) + + def __setstate__(self, state): + """ + Play nice with pickle. + """ + self._setattrs(zip(self.__slots__, state)) + + def _setattrs(self, name_values_pairs): + bound_setattr = _obj_setattr.__get__(self) + for name, value in name_values_pairs: + if name != "metadata": + bound_setattr(name, value) + else: + bound_setattr( + name, + types.MappingProxyType(dict(value)) + if value + else _empty_metadata_singleton, + ) + + +_a = [ + Attribute( + name=name, + default=NOTHING, + validator=None, + repr=True, + cmp=None, + eq=True, + order=False, + hash=(name != "metadata"), + init=True, + inherited=False, + alias=_default_init_alias_for(name), + ) + for name in Attribute.__slots__ +] + +Attribute = _add_hash( + _add_eq( + _add_repr(Attribute, attrs=_a), + attrs=[a for a in _a if a.name != "inherited"], + ), + attrs=[a for a in _a if a.hash and a.name != "inherited"], +) + + +class _CountingAttr: + """ + Intermediate representation of attributes that uses a counter to preserve + the order in which the attributes have been defined. + + *Internal* data structure of the attrs library. Running into is most + likely the result of a bug like a forgotten `@attr.s` decorator. + """ + + __slots__ = ( + "counter", + "_default", + "repr", + "eq", + "eq_key", + "order", + "order_key", + "hash", + "init", + "metadata", + "_validator", + "converter", + "type", + "kw_only", + "on_setattr", + "alias", + ) + __attrs_attrs__ = tuple( + Attribute( + name=name, + alias=_default_init_alias_for(name), + default=NOTHING, + validator=None, + repr=True, + cmp=None, + hash=True, + init=True, + kw_only=False, + eq=True, + eq_key=None, + order=False, + order_key=None, + inherited=False, + on_setattr=None, + ) + for name in ( + "counter", + "_default", + "repr", + "eq", + "order", + "hash", + "init", + "on_setattr", + "alias", + ) + ) + ( + Attribute( + name="metadata", + alias="metadata", + default=None, + validator=None, + repr=True, + cmp=None, + hash=False, + init=True, + kw_only=False, + eq=True, + eq_key=None, + order=False, + order_key=None, + inherited=False, + on_setattr=None, + ), + ) + cls_counter = 0 + + def __init__( + self, + default, + validator, + repr, + cmp, + hash, + init, + converter, + metadata, + type, + kw_only, + eq, + eq_key, + order, + order_key, + on_setattr, + alias, + ): + _CountingAttr.cls_counter += 1 + self.counter = _CountingAttr.cls_counter + self._default = default + self._validator = validator + self.converter = converter + self.repr = repr + self.eq = eq + self.eq_key = eq_key + self.order = order + self.order_key = order_key + self.hash = hash + self.init = init + self.metadata = metadata + self.type = type + self.kw_only = kw_only + self.on_setattr = on_setattr + self.alias = alias + + def validator(self, meth): + """ + Decorator that adds *meth* to the list of validators. + + Returns *meth* unchanged. + + .. versionadded:: 17.1.0 + """ + if self._validator is None: + self._validator = meth + else: + self._validator = and_(self._validator, meth) + return meth + + def default(self, meth): + """ + Decorator that allows to set the default for an attribute. + + Returns *meth* unchanged. + + :raises DefaultAlreadySetError: If default has been set before. + + .. versionadded:: 17.1.0 + """ + if self._default is not NOTHING: + raise DefaultAlreadySetError() + + self._default = Factory(meth, takes_self=True) + + return meth + + +_CountingAttr = _add_eq(_add_repr(_CountingAttr)) + + +class Factory: + """ + Stores a factory callable. + + If passed as the default value to `attrs.field`, the factory is used to + generate a new value. + + :param callable factory: A callable that takes either none or exactly one + mandatory positional argument depending on *takes_self*. + :param bool takes_self: Pass the partially initialized instance that is + being initialized as a positional argument. + + .. versionadded:: 17.1.0 *takes_self* + """ + + __slots__ = ("factory", "takes_self") + + def __init__(self, factory, takes_self=False): + """ + `Factory` is part of the default machinery so if we want a default + value here, we have to implement it ourselves. + """ + self.factory = factory + self.takes_self = takes_self + + def __getstate__(self): + """ + Play nice with pickle. + """ + return tuple(getattr(self, name) for name in self.__slots__) + + def __setstate__(self, state): + """ + Play nice with pickle. + """ + for name, value in zip(self.__slots__, state): + setattr(self, name, value) + + +_f = [ + Attribute( + name=name, + default=NOTHING, + validator=None, + repr=True, + cmp=None, + eq=True, + order=False, + hash=True, + init=True, + inherited=False, + ) + for name in Factory.__slots__ +] + +Factory = _add_hash(_add_eq(_add_repr(Factory, attrs=_f), attrs=_f), attrs=_f) + + +def make_class(name, attrs, bases=(object,), **attributes_arguments): + """ + A quick way to create a new class called *name* with *attrs*. + + :param str name: The name for the new class. + + :param attrs: A list of names or a dictionary of mappings of names to + attributes. + + The order is deduced from the order of the names or attributes inside + *attrs*. Otherwise the order of the definition of the attributes is + used. + :type attrs: `list` or `dict` + + :param tuple bases: Classes that the new class will subclass. + + :param attributes_arguments: Passed unmodified to `attr.s`. + + :return: A new class with *attrs*. + :rtype: type + + .. versionadded:: 17.1.0 *bases* + .. versionchanged:: 18.1.0 If *attrs* is ordered, the order is retained. + """ + if isinstance(attrs, dict): + cls_dict = attrs + elif isinstance(attrs, (list, tuple)): + cls_dict = {a: attrib() for a in attrs} + else: + raise TypeError("attrs argument must be a dict or a list.") + + pre_init = cls_dict.pop("__attrs_pre_init__", None) + post_init = cls_dict.pop("__attrs_post_init__", None) + user_init = cls_dict.pop("__init__", None) + + body = {} + if pre_init is not None: + body["__attrs_pre_init__"] = pre_init + if post_init is not None: + body["__attrs_post_init__"] = post_init + if user_init is not None: + body["__init__"] = user_init + + type_ = types.new_class(name, bases, {}, lambda ns: ns.update(body)) + + # For pickling to work, the __module__ variable needs to be set to the + # frame where the class is created. Bypass this step in environments where + # sys._getframe is not defined (Jython for example) or sys._getframe is not + # defined for arguments greater than 0 (IronPython). + try: + type_.__module__ = sys._getframe(1).f_globals.get( + "__name__", "__main__" + ) + except (AttributeError, ValueError): + pass + + # We do it here for proper warnings with meaningful stacklevel. + cmp = attributes_arguments.pop("cmp", None) + ( + attributes_arguments["eq"], + attributes_arguments["order"], + ) = _determine_attrs_eq_order( + cmp, + attributes_arguments.get("eq"), + attributes_arguments.get("order"), + True, + ) + + return _attrs(these=cls_dict, **attributes_arguments)(type_) + + +# These are required by within this module so we define them here and merely +# import into .validators / .converters. + + +@attrs(slots=True, hash=True) +class _AndValidator: + """ + Compose many validators to a single one. + """ + + _validators = attrib() + + def __call__(self, inst, attr, value): + for v in self._validators: + v(inst, attr, value) + + +def and_(*validators): + """ + A validator that composes multiple validators into one. + + When called on a value, it runs all wrapped validators. + + :param callables validators: Arbitrary number of validators. + + .. versionadded:: 17.1.0 + """ + vals = [] + for validator in validators: + vals.extend( + validator._validators + if isinstance(validator, _AndValidator) + else [validator] + ) + + return _AndValidator(tuple(vals)) + + +def pipe(*converters): + """ + A converter that composes multiple converters into one. + + When called on a value, it runs all wrapped converters, returning the + *last* value. + + Type annotations will be inferred from the wrapped converters', if + they have any. + + :param callables converters: Arbitrary number of converters. + + .. versionadded:: 20.1.0 + """ + + def pipe_converter(val): + for converter in converters: + val = converter(val) + + return val + + if not converters: + # If the converter list is empty, pipe_converter is the identity. + A = typing.TypeVar("A") + pipe_converter.__annotations__ = {"val": A, "return": A} + else: + # Get parameter type from first converter. + t = _AnnotationExtractor(converters[0]).get_first_param_type() + if t: + pipe_converter.__annotations__["val"] = t + + # Get return type from last converter. + rt = _AnnotationExtractor(converters[-1]).get_return_type() + if rt: + pipe_converter.__annotations__["return"] = rt + + return pipe_converter diff --git a/Splunk_TA_paloalto/bin/splunk_ta_paloalto/aob_py3/attr/_next_gen.py b/Splunk_TA_paloalto/bin/splunk_ta_paloalto/aob_py3/attr/_next_gen.py new file mode 100644 index 00000000..c59d8486 --- /dev/null +++ b/Splunk_TA_paloalto/bin/splunk_ta_paloalto/aob_py3/attr/_next_gen.py @@ -0,0 +1,226 @@ +# SPDX-License-Identifier: MIT + +""" +These are keyword-only APIs that call `attr.s` and `attr.ib` with different +default values. +""" + + +from functools import partial + +from . import setters +from ._funcs import asdict as _asdict +from ._funcs import astuple as _astuple +from ._make import ( + NOTHING, + _frozen_setattrs, + _ng_default_on_setattr, + attrib, + attrs, +) +from .exceptions import UnannotatedAttributeError + + +def define( + maybe_cls=None, + *, + these=None, + repr=None, + unsafe_hash=None, + hash=None, + init=None, + slots=True, + frozen=False, + weakref_slot=True, + str=False, + auto_attribs=None, + kw_only=False, + cache_hash=False, + auto_exc=True, + eq=None, + order=False, + auto_detect=True, + getstate_setstate=None, + on_setattr=None, + field_transformer=None, + match_args=True, +): + r""" + Define an ``attrs`` class. + + Differences to the classic `attr.s` that it uses underneath: + + - Automatically detect whether or not *auto_attribs* should be `True` (c.f. + *auto_attribs* parameter). + - If *frozen* is `False`, run converters and validators when setting an + attribute by default. + - *slots=True* + + .. caution:: + + Usually this has only upsides and few visible effects in everyday + programming. But it *can* lead to some suprising behaviors, so please + make sure to read :term:`slotted classes`. + - *auto_exc=True* + - *auto_detect=True* + - *order=False* + - Some options that were only relevant on Python 2 or were kept around for + backwards-compatibility have been removed. + + Please note that these are all defaults and you can change them as you + wish. + + :param Optional[bool] auto_attribs: If set to `True` or `False`, it behaves + exactly like `attr.s`. If left `None`, `attr.s` will try to guess: + + 1. If any attributes are annotated and no unannotated `attrs.fields`\ s + are found, it assumes *auto_attribs=True*. + 2. Otherwise it assumes *auto_attribs=False* and tries to collect + `attrs.fields`\ s. + + For now, please refer to `attr.s` for the rest of the parameters. + + .. versionadded:: 20.1.0 + .. versionchanged:: 21.3.0 Converters are also run ``on_setattr``. + .. versionadded:: 22.2.0 + *unsafe_hash* as an alias for *hash* (for :pep:`681` compliance). + """ + + def do_it(cls, auto_attribs): + return attrs( + maybe_cls=cls, + these=these, + repr=repr, + hash=hash, + unsafe_hash=unsafe_hash, + init=init, + slots=slots, + frozen=frozen, + weakref_slot=weakref_slot, + str=str, + auto_attribs=auto_attribs, + kw_only=kw_only, + cache_hash=cache_hash, + auto_exc=auto_exc, + eq=eq, + order=order, + auto_detect=auto_detect, + collect_by_mro=True, + getstate_setstate=getstate_setstate, + on_setattr=on_setattr, + field_transformer=field_transformer, + match_args=match_args, + ) + + def wrap(cls): + """ + Making this a wrapper ensures this code runs during class creation. + + We also ensure that frozen-ness of classes is inherited. + """ + nonlocal frozen, on_setattr + + had_on_setattr = on_setattr not in (None, setters.NO_OP) + + # By default, mutable classes convert & validate on setattr. + if frozen is False and on_setattr is None: + on_setattr = _ng_default_on_setattr + + # However, if we subclass a frozen class, we inherit the immutability + # and disable on_setattr. + for base_cls in cls.__bases__: + if base_cls.__setattr__ is _frozen_setattrs: + if had_on_setattr: + raise ValueError( + "Frozen classes can't use on_setattr " + "(frozen-ness was inherited)." + ) + + on_setattr = setters.NO_OP + break + + if auto_attribs is not None: + return do_it(cls, auto_attribs) + + try: + return do_it(cls, True) + except UnannotatedAttributeError: + return do_it(cls, False) + + # maybe_cls's type depends on the usage of the decorator. It's a class + # if it's used as `@attrs` but ``None`` if used as `@attrs()`. + if maybe_cls is None: + return wrap + else: + return wrap(maybe_cls) + + +mutable = define +frozen = partial(define, frozen=True, on_setattr=None) + + +def field( + *, + default=NOTHING, + validator=None, + repr=True, + hash=None, + init=True, + metadata=None, + converter=None, + factory=None, + kw_only=False, + eq=None, + order=None, + on_setattr=None, + alias=None, +): + """ + Identical to `attr.ib`, except keyword-only and with some arguments + removed. + + .. versionadded:: 20.1.0 + """ + return attrib( + default=default, + validator=validator, + repr=repr, + hash=hash, + init=init, + metadata=metadata, + converter=converter, + factory=factory, + kw_only=kw_only, + eq=eq, + order=order, + on_setattr=on_setattr, + alias=alias, + ) + + +def asdict(inst, *, recurse=True, filter=None, value_serializer=None): + """ + Same as `attr.asdict`, except that collections types are always retained + and dict is always used as *dict_factory*. + + .. versionadded:: 21.3.0 + """ + return _asdict( + inst=inst, + recurse=recurse, + filter=filter, + value_serializer=value_serializer, + retain_collection_types=True, + ) + + +def astuple(inst, *, recurse=True, filter=None): + """ + Same as `attr.astuple`, except that collections types are always retained + and `tuple` is always used as the *tuple_factory*. + + .. versionadded:: 21.3.0 + """ + return _astuple( + inst=inst, recurse=recurse, filter=filter, retain_collection_types=True + ) diff --git a/Splunk_TA_paloalto/bin/splunk_ta_paloalto/aob_py3/attr/_typing_compat.pyi b/Splunk_TA_paloalto/bin/splunk_ta_paloalto/aob_py3/attr/_typing_compat.pyi new file mode 100644 index 00000000..ca7b71e9 --- /dev/null +++ b/Splunk_TA_paloalto/bin/splunk_ta_paloalto/aob_py3/attr/_typing_compat.pyi @@ -0,0 +1,15 @@ +from typing import Any, ClassVar, Protocol + +# MYPY is a special constant in mypy which works the same way as `TYPE_CHECKING`. +MYPY = False + +if MYPY: + # A protocol to be able to statically accept an attrs class. + class AttrsInstance_(Protocol): + __attrs_attrs__: ClassVar[Any] + +else: + # For type checkers without plug-in support use an empty protocol that + # will (hopefully) be combined into a union. + class AttrsInstance_(Protocol): + pass diff --git a/Splunk_TA_paloalto/bin/splunk_ta_paloalto/aob_py3/attr/_version_info.py b/Splunk_TA_paloalto/bin/splunk_ta_paloalto/aob_py3/attr/_version_info.py new file mode 100644 index 00000000..51a1312f --- /dev/null +++ b/Splunk_TA_paloalto/bin/splunk_ta_paloalto/aob_py3/attr/_version_info.py @@ -0,0 +1,86 @@ +# SPDX-License-Identifier: MIT + + +from functools import total_ordering + +from ._funcs import astuple +from ._make import attrib, attrs + + +@total_ordering +@attrs(eq=False, order=False, slots=True, frozen=True) +class VersionInfo: + """ + A version object that can be compared to tuple of length 1--4: + + >>> attr.VersionInfo(19, 1, 0, "final") <= (19, 2) + True + >>> attr.VersionInfo(19, 1, 0, "final") < (19, 1, 1) + True + >>> vi = attr.VersionInfo(19, 2, 0, "final") + >>> vi < (19, 1, 1) + False + >>> vi < (19,) + False + >>> vi == (19, 2,) + True + >>> vi == (19, 2, 1) + False + + .. versionadded:: 19.2 + """ + + year = attrib(type=int) + minor = attrib(type=int) + micro = attrib(type=int) + releaselevel = attrib(type=str) + + @classmethod + def _from_version_string(cls, s): + """ + Parse *s* and return a _VersionInfo. + """ + v = s.split(".") + if len(v) == 3: + v.append("final") + + return cls( + year=int(v[0]), minor=int(v[1]), micro=int(v[2]), releaselevel=v[3] + ) + + def _ensure_tuple(self, other): + """ + Ensure *other* is a tuple of a valid length. + + Returns a possibly transformed *other* and ourselves as a tuple of + the same length as *other*. + """ + + if self.__class__ is other.__class__: + other = astuple(other) + + if not isinstance(other, tuple): + raise NotImplementedError + + if not (1 <= len(other) <= 4): + raise NotImplementedError + + return astuple(self)[: len(other)], other + + def __eq__(self, other): + try: + us, them = self._ensure_tuple(other) + except NotImplementedError: + return NotImplemented + + return us == them + + def __lt__(self, other): + try: + us, them = self._ensure_tuple(other) + except NotImplementedError: + return NotImplemented + + # Since alphabetically "dev0" < "final" < "post1" < "post2", we don't + # have to do anything special with releaselevel for now. + return us < them diff --git a/Splunk_TA_paloalto/bin/splunk_ta_paloalto/aob_py3/attr/_version_info.pyi b/Splunk_TA_paloalto/bin/splunk_ta_paloalto/aob_py3/attr/_version_info.pyi new file mode 100644 index 00000000..45ced086 --- /dev/null +++ b/Splunk_TA_paloalto/bin/splunk_ta_paloalto/aob_py3/attr/_version_info.pyi @@ -0,0 +1,9 @@ +class VersionInfo: + @property + def year(self) -> int: ... + @property + def minor(self) -> int: ... + @property + def micro(self) -> int: ... + @property + def releaselevel(self) -> str: ... diff --git a/Splunk_TA_paloalto/bin/splunk_ta_paloalto/aob_py3/attr/converters.py b/Splunk_TA_paloalto/bin/splunk_ta_paloalto/aob_py3/attr/converters.py new file mode 100644 index 00000000..4cada106 --- /dev/null +++ b/Splunk_TA_paloalto/bin/splunk_ta_paloalto/aob_py3/attr/converters.py @@ -0,0 +1,144 @@ +# SPDX-License-Identifier: MIT + +""" +Commonly useful converters. +""" + + +import typing + +from ._compat import _AnnotationExtractor +from ._make import NOTHING, Factory, pipe + + +__all__ = [ + "default_if_none", + "optional", + "pipe", + "to_bool", +] + + +def optional(converter): + """ + A converter that allows an attribute to be optional. An optional attribute + is one which can be set to ``None``. + + Type annotations will be inferred from the wrapped converter's, if it + has any. + + :param callable converter: the converter that is used for non-``None`` + values. + + .. versionadded:: 17.1.0 + """ + + def optional_converter(val): + if val is None: + return None + return converter(val) + + xtr = _AnnotationExtractor(converter) + + t = xtr.get_first_param_type() + if t: + optional_converter.__annotations__["val"] = typing.Optional[t] + + rt = xtr.get_return_type() + if rt: + optional_converter.__annotations__["return"] = typing.Optional[rt] + + return optional_converter + + +def default_if_none(default=NOTHING, factory=None): + """ + A converter that allows to replace ``None`` values by *default* or the + result of *factory*. + + :param default: Value to be used if ``None`` is passed. Passing an instance + of `attrs.Factory` is supported, however the ``takes_self`` option + is *not*. + :param callable factory: A callable that takes no parameters whose result + is used if ``None`` is passed. + + :raises TypeError: If **neither** *default* or *factory* is passed. + :raises TypeError: If **both** *default* and *factory* are passed. + :raises ValueError: If an instance of `attrs.Factory` is passed with + ``takes_self=True``. + + .. versionadded:: 18.2.0 + """ + if default is NOTHING and factory is None: + raise TypeError("Must pass either `default` or `factory`.") + + if default is not NOTHING and factory is not None: + raise TypeError( + "Must pass either `default` or `factory` but not both." + ) + + if factory is not None: + default = Factory(factory) + + if isinstance(default, Factory): + if default.takes_self: + raise ValueError( + "`takes_self` is not supported by default_if_none." + ) + + def default_if_none_converter(val): + if val is not None: + return val + + return default.factory() + + else: + + def default_if_none_converter(val): + if val is not None: + return val + + return default + + return default_if_none_converter + + +def to_bool(val): + """ + Convert "boolean" strings (e.g., from env. vars.) to real booleans. + + Values mapping to :code:`True`: + + - :code:`True` + - :code:`"true"` / :code:`"t"` + - :code:`"yes"` / :code:`"y"` + - :code:`"on"` + - :code:`"1"` + - :code:`1` + + Values mapping to :code:`False`: + + - :code:`False` + - :code:`"false"` / :code:`"f"` + - :code:`"no"` / :code:`"n"` + - :code:`"off"` + - :code:`"0"` + - :code:`0` + + :raises ValueError: for any other value. + + .. versionadded:: 21.3.0 + """ + if isinstance(val, str): + val = val.lower() + truthy = {True, "true", "t", "yes", "y", "on", "1", 1} + falsy = {False, "false", "f", "no", "n", "off", "0", 0} + try: + if val in truthy: + return True + if val in falsy: + return False + except TypeError: + # Raised when "val" is not hashable (e.g., lists) + pass + raise ValueError(f"Cannot convert value to bool: {val}") diff --git a/Splunk_TA_paloalto/bin/splunk_ta_paloalto/aob_py3/attr/converters.pyi b/Splunk_TA_paloalto/bin/splunk_ta_paloalto/aob_py3/attr/converters.pyi new file mode 100644 index 00000000..5abb49f6 --- /dev/null +++ b/Splunk_TA_paloalto/bin/splunk_ta_paloalto/aob_py3/attr/converters.pyi @@ -0,0 +1,13 @@ +from typing import Callable, TypeVar, overload + +from . import _ConverterType + +_T = TypeVar("_T") + +def pipe(*validators: _ConverterType) -> _ConverterType: ... +def optional(converter: _ConverterType) -> _ConverterType: ... +@overload +def default_if_none(default: _T) -> _ConverterType: ... +@overload +def default_if_none(*, factory: Callable[[], _T]) -> _ConverterType: ... +def to_bool(val: str) -> bool: ... diff --git a/Splunk_TA_paloalto/bin/splunk_ta_paloalto/aob_py3/attr/exceptions.py b/Splunk_TA_paloalto/bin/splunk_ta_paloalto/aob_py3/attr/exceptions.py new file mode 100644 index 00000000..5dc51e0a --- /dev/null +++ b/Splunk_TA_paloalto/bin/splunk_ta_paloalto/aob_py3/attr/exceptions.py @@ -0,0 +1,92 @@ +# SPDX-License-Identifier: MIT + + +class FrozenError(AttributeError): + """ + A frozen/immutable instance or attribute have been attempted to be + modified. + + It mirrors the behavior of ``namedtuples`` by using the same error message + and subclassing `AttributeError`. + + .. versionadded:: 20.1.0 + """ + + msg = "can't set attribute" + args = [msg] + + +class FrozenInstanceError(FrozenError): + """ + A frozen instance has been attempted to be modified. + + .. versionadded:: 16.1.0 + """ + + +class FrozenAttributeError(FrozenError): + """ + A frozen attribute has been attempted to be modified. + + .. versionadded:: 20.1.0 + """ + + +class AttrsAttributeNotFoundError(ValueError): + """ + An ``attrs`` function couldn't find an attribute that the user asked for. + + .. versionadded:: 16.2.0 + """ + + +class NotAnAttrsClassError(ValueError): + """ + A non-``attrs`` class has been passed into an ``attrs`` function. + + .. versionadded:: 16.2.0 + """ + + +class DefaultAlreadySetError(RuntimeError): + """ + A default has been set using ``attr.ib()`` and is attempted to be reset + using the decorator. + + .. versionadded:: 17.1.0 + """ + + +class UnannotatedAttributeError(RuntimeError): + """ + A class with ``auto_attribs=True`` has an ``attr.ib()`` without a type + annotation. + + .. versionadded:: 17.3.0 + """ + + +class PythonTooOldError(RuntimeError): + """ + It was attempted to use an ``attrs`` feature that requires a newer Python + version. + + .. versionadded:: 18.2.0 + """ + + +class NotCallableError(TypeError): + """ + A ``attr.ib()`` requiring a callable has been set with a value + that is not callable. + + .. versionadded:: 19.2.0 + """ + + def __init__(self, msg, value): + super(TypeError, self).__init__(msg, value) + self.msg = msg + self.value = value + + def __str__(self): + return str(self.msg) diff --git a/Splunk_TA_paloalto/bin/splunk_ta_paloalto/aob_py3/attr/exceptions.pyi b/Splunk_TA_paloalto/bin/splunk_ta_paloalto/aob_py3/attr/exceptions.pyi new file mode 100644 index 00000000..f2680118 --- /dev/null +++ b/Splunk_TA_paloalto/bin/splunk_ta_paloalto/aob_py3/attr/exceptions.pyi @@ -0,0 +1,17 @@ +from typing import Any + +class FrozenError(AttributeError): + msg: str = ... + +class FrozenInstanceError(FrozenError): ... +class FrozenAttributeError(FrozenError): ... +class AttrsAttributeNotFoundError(ValueError): ... +class NotAnAttrsClassError(ValueError): ... +class DefaultAlreadySetError(RuntimeError): ... +class UnannotatedAttributeError(RuntimeError): ... +class PythonTooOldError(RuntimeError): ... + +class NotCallableError(TypeError): + msg: str = ... + value: Any = ... + def __init__(self, msg: str, value: Any) -> None: ... diff --git a/Splunk_TA_paloalto/bin/splunk_ta_paloalto/aob_py3/attr/filters.py b/Splunk_TA_paloalto/bin/splunk_ta_paloalto/aob_py3/attr/filters.py new file mode 100644 index 00000000..baa25e94 --- /dev/null +++ b/Splunk_TA_paloalto/bin/splunk_ta_paloalto/aob_py3/attr/filters.py @@ -0,0 +1,51 @@ +# SPDX-License-Identifier: MIT + +""" +Commonly useful filters for `attr.asdict`. +""" + +from ._make import Attribute + + +def _split_what(what): + """ + Returns a tuple of `frozenset`s of classes and attributes. + """ + return ( + frozenset(cls for cls in what if isinstance(cls, type)), + frozenset(cls for cls in what if isinstance(cls, Attribute)), + ) + + +def include(*what): + """ + Include *what*. + + :param what: What to include. + :type what: `list` of `type` or `attrs.Attribute`\\ s + + :rtype: `callable` + """ + cls, attrs = _split_what(what) + + def include_(attribute, value): + return value.__class__ in cls or attribute in attrs + + return include_ + + +def exclude(*what): + """ + Exclude *what*. + + :param what: What to exclude. + :type what: `list` of classes or `attrs.Attribute`\\ s. + + :rtype: `callable` + """ + cls, attrs = _split_what(what) + + def exclude_(attribute, value): + return value.__class__ not in cls and attribute not in attrs + + return exclude_ diff --git a/Splunk_TA_paloalto/bin/splunk_ta_paloalto/aob_py3/attr/filters.pyi b/Splunk_TA_paloalto/bin/splunk_ta_paloalto/aob_py3/attr/filters.pyi new file mode 100644 index 00000000..99386686 --- /dev/null +++ b/Splunk_TA_paloalto/bin/splunk_ta_paloalto/aob_py3/attr/filters.pyi @@ -0,0 +1,6 @@ +from typing import Any, Union + +from . import Attribute, _FilterType + +def include(*what: Union[type, Attribute[Any]]) -> _FilterType[Any]: ... +def exclude(*what: Union[type, Attribute[Any]]) -> _FilterType[Any]: ... diff --git a/Splunk_TA_paloalto/bin/splunk_ta_paloalto/aob_py3/attr/py.typed b/Splunk_TA_paloalto/bin/splunk_ta_paloalto/aob_py3/attr/py.typed new file mode 100644 index 00000000..e69de29b diff --git a/Splunk_TA_paloalto/bin/splunk_ta_paloalto/aob_py3/attr/setters.py b/Splunk_TA_paloalto/bin/splunk_ta_paloalto/aob_py3/attr/setters.py new file mode 100644 index 00000000..12ed6750 --- /dev/null +++ b/Splunk_TA_paloalto/bin/splunk_ta_paloalto/aob_py3/attr/setters.py @@ -0,0 +1,73 @@ +# SPDX-License-Identifier: MIT + +""" +Commonly used hooks for on_setattr. +""" + + +from . import _config +from .exceptions import FrozenAttributeError + + +def pipe(*setters): + """ + Run all *setters* and return the return value of the last one. + + .. versionadded:: 20.1.0 + """ + + def wrapped_pipe(instance, attrib, new_value): + rv = new_value + + for setter in setters: + rv = setter(instance, attrib, rv) + + return rv + + return wrapped_pipe + + +def frozen(_, __, ___): + """ + Prevent an attribute to be modified. + + .. versionadded:: 20.1.0 + """ + raise FrozenAttributeError() + + +def validate(instance, attrib, new_value): + """ + Run *attrib*'s validator on *new_value* if it has one. + + .. versionadded:: 20.1.0 + """ + if _config._run_validators is False: + return new_value + + v = attrib.validator + if not v: + return new_value + + v(instance, attrib, new_value) + + return new_value + + +def convert(instance, attrib, new_value): + """ + Run *attrib*'s converter -- if it has one -- on *new_value* and return the + result. + + .. versionadded:: 20.1.0 + """ + c = attrib.converter + if c: + return c(new_value) + + return new_value + + +# Sentinel for disabling class-wide *on_setattr* hooks for certain attributes. +# autodata stopped working, so the docstring is inlined in the API docs. +NO_OP = object() diff --git a/Splunk_TA_paloalto/bin/splunk_ta_paloalto/aob_py3/attr/setters.pyi b/Splunk_TA_paloalto/bin/splunk_ta_paloalto/aob_py3/attr/setters.pyi new file mode 100644 index 00000000..72f7ce47 --- /dev/null +++ b/Splunk_TA_paloalto/bin/splunk_ta_paloalto/aob_py3/attr/setters.pyi @@ -0,0 +1,19 @@ +from typing import Any, NewType, NoReturn, TypeVar + +from . import Attribute, _OnSetAttrType + +_T = TypeVar("_T") + +def frozen( + instance: Any, attribute: Attribute[Any], new_value: Any +) -> NoReturn: ... +def pipe(*setters: _OnSetAttrType) -> _OnSetAttrType: ... +def validate(instance: Any, attribute: Attribute[_T], new_value: _T) -> _T: ... + +# convert is allowed to return Any, because they can be chained using pipe. +def convert( + instance: Any, attribute: Attribute[Any], new_value: Any +) -> Any: ... + +_NoOpType = NewType("_NoOpType", object) +NO_OP: _NoOpType diff --git a/Splunk_TA_paloalto/bin/splunk_ta_paloalto/aob_py3/attr/validators.py b/Splunk_TA_paloalto/bin/splunk_ta_paloalto/aob_py3/attr/validators.py new file mode 100644 index 00000000..852ae965 --- /dev/null +++ b/Splunk_TA_paloalto/bin/splunk_ta_paloalto/aob_py3/attr/validators.py @@ -0,0 +1,714 @@ +# SPDX-License-Identifier: MIT + +""" +Commonly useful validators. +""" + + +import operator +import re + +from contextlib import contextmanager + +from ._config import get_run_validators, set_run_validators +from ._make import _AndValidator, and_, attrib, attrs +from .converters import default_if_none +from .exceptions import NotCallableError + + +try: + Pattern = re.Pattern +except AttributeError: # Python <3.7 lacks a Pattern type. + Pattern = type(re.compile("")) + + +__all__ = [ + "and_", + "deep_iterable", + "deep_mapping", + "disabled", + "ge", + "get_disabled", + "gt", + "in_", + "instance_of", + "is_callable", + "le", + "lt", + "matches_re", + "max_len", + "min_len", + "not_", + "optional", + "provides", + "set_disabled", +] + + +def set_disabled(disabled): + """ + Globally disable or enable running validators. + + By default, they are run. + + :param disabled: If ``True``, disable running all validators. + :type disabled: bool + + .. warning:: + + This function is not thread-safe! + + .. versionadded:: 21.3.0 + """ + set_run_validators(not disabled) + + +def get_disabled(): + """ + Return a bool indicating whether validators are currently disabled or not. + + :return: ``True`` if validators are currently disabled. + :rtype: bool + + .. versionadded:: 21.3.0 + """ + return not get_run_validators() + + +@contextmanager +def disabled(): + """ + Context manager that disables running validators within its context. + + .. warning:: + + This context manager is not thread-safe! + + .. versionadded:: 21.3.0 + """ + set_run_validators(False) + try: + yield + finally: + set_run_validators(True) + + +@attrs(repr=False, slots=True, hash=True) +class _InstanceOfValidator: + type = attrib() + + def __call__(self, inst, attr, value): + """ + We use a callable class to be able to change the ``__repr__``. + """ + if not isinstance(value, self.type): + raise TypeError( + "'{name}' must be {type!r} (got {value!r} that is a " + "{actual!r}).".format( + name=attr.name, + type=self.type, + actual=value.__class__, + value=value, + ), + attr, + self.type, + value, + ) + + def __repr__(self): + return "".format( + type=self.type + ) + + +def instance_of(type): + """ + A validator that raises a `TypeError` if the initializer is called + with a wrong type for this particular attribute (checks are performed using + `isinstance` therefore it's also valid to pass a tuple of types). + + :param type: The type to check for. + :type type: type or tuple of type + + :raises TypeError: With a human readable error message, the attribute + (of type `attrs.Attribute`), the expected type, and the value it + got. + """ + return _InstanceOfValidator(type) + + +@attrs(repr=False, frozen=True, slots=True) +class _MatchesReValidator: + pattern = attrib() + match_func = attrib() + + def __call__(self, inst, attr, value): + """ + We use a callable class to be able to change the ``__repr__``. + """ + if not self.match_func(value): + raise ValueError( + "'{name}' must match regex {pattern!r}" + " ({value!r} doesn't)".format( + name=attr.name, pattern=self.pattern.pattern, value=value + ), + attr, + self.pattern, + value, + ) + + def __repr__(self): + return "".format( + pattern=self.pattern + ) + + +def matches_re(regex, flags=0, func=None): + r""" + A validator that raises `ValueError` if the initializer is called + with a string that doesn't match *regex*. + + :param regex: a regex string or precompiled pattern to match against + :param int flags: flags that will be passed to the underlying re function + (default 0) + :param callable func: which underlying `re` function to call. Valid options + are `re.fullmatch`, `re.search`, and `re.match`; the default ``None`` + means `re.fullmatch`. For performance reasons, the pattern is always + precompiled using `re.compile`. + + .. versionadded:: 19.2.0 + .. versionchanged:: 21.3.0 *regex* can be a pre-compiled pattern. + """ + valid_funcs = (re.fullmatch, None, re.search, re.match) + if func not in valid_funcs: + raise ValueError( + "'func' must be one of {}.".format( + ", ".join( + sorted( + e and e.__name__ or "None" for e in set(valid_funcs) + ) + ) + ) + ) + + if isinstance(regex, Pattern): + if flags: + raise TypeError( + "'flags' can only be used with a string pattern; " + "pass flags to re.compile() instead" + ) + pattern = regex + else: + pattern = re.compile(regex, flags) + + if func is re.match: + match_func = pattern.match + elif func is re.search: + match_func = pattern.search + else: + match_func = pattern.fullmatch + + return _MatchesReValidator(pattern, match_func) + + +@attrs(repr=False, slots=True, hash=True) +class _ProvidesValidator: + interface = attrib() + + def __call__(self, inst, attr, value): + """ + We use a callable class to be able to change the ``__repr__``. + """ + if not self.interface.providedBy(value): + raise TypeError( + "'{name}' must provide {interface!r} which {value!r} " + "doesn't.".format( + name=attr.name, interface=self.interface, value=value + ), + attr, + self.interface, + value, + ) + + def __repr__(self): + return "".format( + interface=self.interface + ) + + +def provides(interface): + """ + A validator that raises a `TypeError` if the initializer is called + with an object that does not provide the requested *interface* (checks are + performed using ``interface.providedBy(value)`` (see `zope.interface + `_). + + :param interface: The interface to check for. + :type interface: ``zope.interface.Interface`` + + :raises TypeError: With a human readable error message, the attribute + (of type `attrs.Attribute`), the expected interface, and the + value it got. + """ + return _ProvidesValidator(interface) + + +@attrs(repr=False, slots=True, hash=True) +class _OptionalValidator: + validator = attrib() + + def __call__(self, inst, attr, value): + if value is None: + return + + self.validator(inst, attr, value) + + def __repr__(self): + return "".format( + what=repr(self.validator) + ) + + +def optional(validator): + """ + A validator that makes an attribute optional. An optional attribute is one + which can be set to ``None`` in addition to satisfying the requirements of + the sub-validator. + + :param validator: A validator (or a list of validators) that is used for + non-``None`` values. + :type validator: callable or `list` of callables. + + .. versionadded:: 15.1.0 + .. versionchanged:: 17.1.0 *validator* can be a list of validators. + """ + if isinstance(validator, list): + return _OptionalValidator(_AndValidator(validator)) + return _OptionalValidator(validator) + + +@attrs(repr=False, slots=True, hash=True) +class _InValidator: + options = attrib() + + def __call__(self, inst, attr, value): + try: + in_options = value in self.options + except TypeError: # e.g. `1 in "abc"` + in_options = False + + if not in_options: + raise ValueError( + "'{name}' must be in {options!r} (got {value!r})".format( + name=attr.name, options=self.options, value=value + ), + attr, + self.options, + value, + ) + + def __repr__(self): + return "".format( + options=self.options + ) + + +def in_(options): + """ + A validator that raises a `ValueError` if the initializer is called + with a value that does not belong in the options provided. The check is + performed using ``value in options``. + + :param options: Allowed options. + :type options: list, tuple, `enum.Enum`, ... + + :raises ValueError: With a human readable error message, the attribute (of + type `attrs.Attribute`), the expected options, and the value it + got. + + .. versionadded:: 17.1.0 + .. versionchanged:: 22.1.0 + The ValueError was incomplete until now and only contained the human + readable error message. Now it contains all the information that has + been promised since 17.1.0. + """ + return _InValidator(options) + + +@attrs(repr=False, slots=False, hash=True) +class _IsCallableValidator: + def __call__(self, inst, attr, value): + """ + We use a callable class to be able to change the ``__repr__``. + """ + if not callable(value): + message = ( + "'{name}' must be callable " + "(got {value!r} that is a {actual!r})." + ) + raise NotCallableError( + msg=message.format( + name=attr.name, value=value, actual=value.__class__ + ), + value=value, + ) + + def __repr__(self): + return "" + + +def is_callable(): + """ + A validator that raises a `attr.exceptions.NotCallableError` if the + initializer is called with a value for this particular attribute + that is not callable. + + .. versionadded:: 19.1.0 + + :raises `attr.exceptions.NotCallableError`: With a human readable error + message containing the attribute (`attrs.Attribute`) name, + and the value it got. + """ + return _IsCallableValidator() + + +@attrs(repr=False, slots=True, hash=True) +class _DeepIterable: + member_validator = attrib(validator=is_callable()) + iterable_validator = attrib( + default=None, validator=optional(is_callable()) + ) + + def __call__(self, inst, attr, value): + """ + We use a callable class to be able to change the ``__repr__``. + """ + if self.iterable_validator is not None: + self.iterable_validator(inst, attr, value) + + for member in value: + self.member_validator(inst, attr, member) + + def __repr__(self): + iterable_identifier = ( + "" + if self.iterable_validator is None + else f" {self.iterable_validator!r}" + ) + return ( + "" + ).format( + iterable_identifier=iterable_identifier, + member=self.member_validator, + ) + + +def deep_iterable(member_validator, iterable_validator=None): + """ + A validator that performs deep validation of an iterable. + + :param member_validator: Validator(s) to apply to iterable members + :param iterable_validator: Validator to apply to iterable itself + (optional) + + .. versionadded:: 19.1.0 + + :raises TypeError: if any sub-validators fail + """ + if isinstance(member_validator, (list, tuple)): + member_validator = and_(*member_validator) + return _DeepIterable(member_validator, iterable_validator) + + +@attrs(repr=False, slots=True, hash=True) +class _DeepMapping: + key_validator = attrib(validator=is_callable()) + value_validator = attrib(validator=is_callable()) + mapping_validator = attrib(default=None, validator=optional(is_callable())) + + def __call__(self, inst, attr, value): + """ + We use a callable class to be able to change the ``__repr__``. + """ + if self.mapping_validator is not None: + self.mapping_validator(inst, attr, value) + + for key in value: + self.key_validator(inst, attr, key) + self.value_validator(inst, attr, value[key]) + + def __repr__(self): + return ( + "" + ).format(key=self.key_validator, value=self.value_validator) + + +def deep_mapping(key_validator, value_validator, mapping_validator=None): + """ + A validator that performs deep validation of a dictionary. + + :param key_validator: Validator to apply to dictionary keys + :param value_validator: Validator to apply to dictionary values + :param mapping_validator: Validator to apply to top-level mapping + attribute (optional) + + .. versionadded:: 19.1.0 + + :raises TypeError: if any sub-validators fail + """ + return _DeepMapping(key_validator, value_validator, mapping_validator) + + +@attrs(repr=False, frozen=True, slots=True) +class _NumberValidator: + bound = attrib() + compare_op = attrib() + compare_func = attrib() + + def __call__(self, inst, attr, value): + """ + We use a callable class to be able to change the ``__repr__``. + """ + if not self.compare_func(value, self.bound): + raise ValueError( + "'{name}' must be {op} {bound}: {value}".format( + name=attr.name, + op=self.compare_op, + bound=self.bound, + value=value, + ) + ) + + def __repr__(self): + return "".format( + op=self.compare_op, bound=self.bound + ) + + +def lt(val): + """ + A validator that raises `ValueError` if the initializer is called + with a number larger or equal to *val*. + + :param val: Exclusive upper bound for values + + .. versionadded:: 21.3.0 + """ + return _NumberValidator(val, "<", operator.lt) + + +def le(val): + """ + A validator that raises `ValueError` if the initializer is called + with a number greater than *val*. + + :param val: Inclusive upper bound for values + + .. versionadded:: 21.3.0 + """ + return _NumberValidator(val, "<=", operator.le) + + +def ge(val): + """ + A validator that raises `ValueError` if the initializer is called + with a number smaller than *val*. + + :param val: Inclusive lower bound for values + + .. versionadded:: 21.3.0 + """ + return _NumberValidator(val, ">=", operator.ge) + + +def gt(val): + """ + A validator that raises `ValueError` if the initializer is called + with a number smaller or equal to *val*. + + :param val: Exclusive lower bound for values + + .. versionadded:: 21.3.0 + """ + return _NumberValidator(val, ">", operator.gt) + + +@attrs(repr=False, frozen=True, slots=True) +class _MaxLengthValidator: + max_length = attrib() + + def __call__(self, inst, attr, value): + """ + We use a callable class to be able to change the ``__repr__``. + """ + if len(value) > self.max_length: + raise ValueError( + "Length of '{name}' must be <= {max}: {len}".format( + name=attr.name, max=self.max_length, len=len(value) + ) + ) + + def __repr__(self): + return f"" + + +def max_len(length): + """ + A validator that raises `ValueError` if the initializer is called + with a string or iterable that is longer than *length*. + + :param int length: Maximum length of the string or iterable + + .. versionadded:: 21.3.0 + """ + return _MaxLengthValidator(length) + + +@attrs(repr=False, frozen=True, slots=True) +class _MinLengthValidator: + min_length = attrib() + + def __call__(self, inst, attr, value): + """ + We use a callable class to be able to change the ``__repr__``. + """ + if len(value) < self.min_length: + raise ValueError( + "Length of '{name}' must be => {min}: {len}".format( + name=attr.name, min=self.min_length, len=len(value) + ) + ) + + def __repr__(self): + return f"" + + +def min_len(length): + """ + A validator that raises `ValueError` if the initializer is called + with a string or iterable that is shorter than *length*. + + :param int length: Minimum length of the string or iterable + + .. versionadded:: 22.1.0 + """ + return _MinLengthValidator(length) + + +@attrs(repr=False, slots=True, hash=True) +class _SubclassOfValidator: + type = attrib() + + def __call__(self, inst, attr, value): + """ + We use a callable class to be able to change the ``__repr__``. + """ + if not issubclass(value, self.type): + raise TypeError( + "'{name}' must be a subclass of {type!r} " + "(got {value!r}).".format( + name=attr.name, + type=self.type, + value=value, + ), + attr, + self.type, + value, + ) + + def __repr__(self): + return "".format( + type=self.type + ) + + +def _subclass_of(type): + """ + A validator that raises a `TypeError` if the initializer is called + with a wrong type for this particular attribute (checks are performed using + `issubclass` therefore it's also valid to pass a tuple of types). + + :param type: The type to check for. + :type type: type or tuple of types + + :raises TypeError: With a human readable error message, the attribute + (of type `attrs.Attribute`), the expected type, and the value it + got. + """ + return _SubclassOfValidator(type) + + +@attrs(repr=False, slots=True, hash=True) +class _NotValidator: + validator = attrib() + msg = attrib( + converter=default_if_none( + "not_ validator child '{validator!r}' " + "did not raise a captured error" + ) + ) + exc_types = attrib( + validator=deep_iterable( + member_validator=_subclass_of(Exception), + iterable_validator=instance_of(tuple), + ), + ) + + def __call__(self, inst, attr, value): + try: + self.validator(inst, attr, value) + except self.exc_types: + pass # suppress error to invert validity + else: + raise ValueError( + self.msg.format( + validator=self.validator, + exc_types=self.exc_types, + ), + attr, + self.validator, + value, + self.exc_types, + ) + + def __repr__(self): + return ( + "" + ).format( + what=self.validator, + exc_types=self.exc_types, + ) + + +def not_(validator, *, msg=None, exc_types=(ValueError, TypeError)): + """ + A validator that wraps and logically 'inverts' the validator passed to it. + It will raise a `ValueError` if the provided validator *doesn't* raise a + `ValueError` or `TypeError` (by default), and will suppress the exception + if the provided validator *does*. + + Intended to be used with existing validators to compose logic without + needing to create inverted variants, for example, ``not_(in_(...))``. + + :param validator: A validator to be logically inverted. + :param msg: Message to raise if validator fails. + Formatted with keys ``exc_types`` and ``validator``. + :type msg: str + :param exc_types: Exception type(s) to capture. + Other types raised by child validators will not be intercepted and + pass through. + + :raises ValueError: With a human readable error message, + the attribute (of type `attrs.Attribute`), + the validator that failed to raise an exception, + the value it got, + and the expected exception types. + + .. versionadded:: 22.2.0 + """ + try: + exc_types = tuple(exc_types) + except TypeError: + exc_types = (exc_types,) + return _NotValidator(validator, msg, exc_types) diff --git a/Splunk_TA_paloalto/bin/splunk_ta_paloalto/aob_py3/attr/validators.pyi b/Splunk_TA_paloalto/bin/splunk_ta_paloalto/aob_py3/attr/validators.pyi new file mode 100644 index 00000000..fd9206de --- /dev/null +++ b/Splunk_TA_paloalto/bin/splunk_ta_paloalto/aob_py3/attr/validators.pyi @@ -0,0 +1,86 @@ +from typing import ( + Any, + AnyStr, + Callable, + Container, + ContextManager, + Iterable, + List, + Mapping, + Match, + Optional, + Pattern, + Tuple, + Type, + TypeVar, + Union, + overload, +) + +from . import _ValidatorType +from . import _ValidatorArgType + +_T = TypeVar("_T") +_T1 = TypeVar("_T1") +_T2 = TypeVar("_T2") +_T3 = TypeVar("_T3") +_I = TypeVar("_I", bound=Iterable) +_K = TypeVar("_K") +_V = TypeVar("_V") +_M = TypeVar("_M", bound=Mapping) + +def set_disabled(run: bool) -> None: ... +def get_disabled() -> bool: ... +def disabled() -> ContextManager[None]: ... + +# To be more precise on instance_of use some overloads. +# If there are more than 3 items in the tuple then we fall back to Any +@overload +def instance_of(type: Type[_T]) -> _ValidatorType[_T]: ... +@overload +def instance_of(type: Tuple[Type[_T]]) -> _ValidatorType[_T]: ... +@overload +def instance_of( + type: Tuple[Type[_T1], Type[_T2]] +) -> _ValidatorType[Union[_T1, _T2]]: ... +@overload +def instance_of( + type: Tuple[Type[_T1], Type[_T2], Type[_T3]] +) -> _ValidatorType[Union[_T1, _T2, _T3]]: ... +@overload +def instance_of(type: Tuple[type, ...]) -> _ValidatorType[Any]: ... +def provides(interface: Any) -> _ValidatorType[Any]: ... +def optional( + validator: Union[_ValidatorType[_T], List[_ValidatorType[_T]]] +) -> _ValidatorType[Optional[_T]]: ... +def in_(options: Container[_T]) -> _ValidatorType[_T]: ... +def and_(*validators: _ValidatorType[_T]) -> _ValidatorType[_T]: ... +def matches_re( + regex: Union[Pattern[AnyStr], AnyStr], + flags: int = ..., + func: Optional[ + Callable[[AnyStr, AnyStr, int], Optional[Match[AnyStr]]] + ] = ..., +) -> _ValidatorType[AnyStr]: ... +def deep_iterable( + member_validator: _ValidatorArgType[_T], + iterable_validator: Optional[_ValidatorType[_I]] = ..., +) -> _ValidatorType[_I]: ... +def deep_mapping( + key_validator: _ValidatorType[_K], + value_validator: _ValidatorType[_V], + mapping_validator: Optional[_ValidatorType[_M]] = ..., +) -> _ValidatorType[_M]: ... +def is_callable() -> _ValidatorType[_T]: ... +def lt(val: _T) -> _ValidatorType[_T]: ... +def le(val: _T) -> _ValidatorType[_T]: ... +def ge(val: _T) -> _ValidatorType[_T]: ... +def gt(val: _T) -> _ValidatorType[_T]: ... +def max_len(length: int) -> _ValidatorType[_T]: ... +def min_len(length: int) -> _ValidatorType[_T]: ... +def not_( + validator: _ValidatorType[_T], + *, + msg: Optional[str] = None, + exc_types: Union[Type[Exception], Iterable[Type[Exception]]] = ... +) -> _ValidatorType[_T]: ... diff --git a/Splunk_TA_paloalto/bin/splunk_ta_paloalto/aob_py3/attrs-21.4.0.dist-info/AUTHORS.rst b/Splunk_TA_paloalto/bin/splunk_ta_paloalto/aob_py3/attrs-21.4.0.dist-info/AUTHORS.rst new file mode 100644 index 00000000..f14ef6c6 --- /dev/null +++ b/Splunk_TA_paloalto/bin/splunk_ta_paloalto/aob_py3/attrs-21.4.0.dist-info/AUTHORS.rst @@ -0,0 +1,11 @@ +Credits +======= + +``attrs`` is written and maintained by `Hynek Schlawack `_. + +The development is kindly supported by `Variomedia AG `_. + +A full list of contributors can be found in `GitHub's overview `_. + +It’s the spiritual successor of `characteristic `_ and aspires to fix some of it clunkiness and unfortunate decisions. +Both were inspired by Twisted’s `FancyEqMixin `_ but both are implemented using class decorators because `subclassing is bad for you `_, m’kay? diff --git a/Splunk_TA_paloalto/bin/splunk_ta_paloalto/aob_py3/attrs-21.4.0.dist-info/INSTALLER b/Splunk_TA_paloalto/bin/splunk_ta_paloalto/aob_py3/attrs-21.4.0.dist-info/INSTALLER new file mode 100644 index 00000000..a1b589e3 --- /dev/null +++ b/Splunk_TA_paloalto/bin/splunk_ta_paloalto/aob_py3/attrs-21.4.0.dist-info/INSTALLER @@ -0,0 +1 @@ +pip diff --git a/Splunk_TA_paloalto/bin/splunk_ta_paloalto/aob_py3/attrs-21.4.0.dist-info/LICENSE b/Splunk_TA_paloalto/bin/splunk_ta_paloalto/aob_py3/attrs-21.4.0.dist-info/LICENSE new file mode 100644 index 00000000..7ae3df93 --- /dev/null +++ b/Splunk_TA_paloalto/bin/splunk_ta_paloalto/aob_py3/attrs-21.4.0.dist-info/LICENSE @@ -0,0 +1,21 @@ +The MIT License (MIT) + +Copyright (c) 2015 Hynek Schlawack + +Permission is hereby granted, free of charge, to any person obtaining a copy +of this software and associated documentation files (the "Software"), to deal +in the Software without restriction, including without limitation the rights +to use, copy, modify, merge, publish, distribute, sublicense, and/or sell +copies of the Software, and to permit persons to whom the Software is +furnished to do so, subject to the following conditions: + +The above copyright notice and this permission notice shall be included in all +copies or substantial portions of the Software. + +THE SOFTWARE IS PROVIDED "AS IS", WITHOUT WARRANTY OF ANY KIND, EXPRESS OR +IMPLIED, INCLUDING BUT NOT LIMITED TO THE WARRANTIES OF MERCHANTABILITY, +FITNESS FOR A PARTICULAR PURPOSE AND NONINFRINGEMENT. IN NO EVENT SHALL THE +AUTHORS OR COPYRIGHT HOLDERS BE LIABLE FOR ANY CLAIM, DAMAGES OR OTHER +LIABILITY, WHETHER IN AN ACTION OF CONTRACT, TORT OR OTHERWISE, ARISING FROM, +OUT OF OR IN CONNECTION WITH THE SOFTWARE OR THE USE OR OTHER DEALINGS IN THE +SOFTWARE. diff --git a/Splunk_TA_paloalto/bin/splunk_ta_paloalto/aob_py3/attrs-21.4.0.dist-info/METADATA b/Splunk_TA_paloalto/bin/splunk_ta_paloalto/aob_py3/attrs-21.4.0.dist-info/METADATA new file mode 100644 index 00000000..aa327d5e --- /dev/null +++ b/Splunk_TA_paloalto/bin/splunk_ta_paloalto/aob_py3/attrs-21.4.0.dist-info/METADATA @@ -0,0 +1,232 @@ +Metadata-Version: 2.1 +Name: attrs +Version: 21.4.0 +Summary: Classes Without Boilerplate +Home-page: https://www.attrs.org/ +Author: Hynek Schlawack +Author-email: hs@ox.cx +Maintainer: Hynek Schlawack +Maintainer-email: hs@ox.cx +License: MIT +Project-URL: Documentation, https://www.attrs.org/ +Project-URL: Changelog, https://www.attrs.org/en/stable/changelog.html +Project-URL: Bug Tracker, https://github.com/python-attrs/attrs/issues +Project-URL: Source Code, https://github.com/python-attrs/attrs +Project-URL: Funding, https://github.com/sponsors/hynek +Project-URL: Tidelift, https://tidelift.com/subscription/pkg/pypi-attrs?utm_source=pypi-attrs&utm_medium=pypi +Project-URL: Ko-fi, https://ko-fi.com/the_hynek +Keywords: class,attribute,boilerplate +Platform: UNKNOWN +Classifier: Development Status :: 5 - Production/Stable +Classifier: Intended Audience :: Developers +Classifier: Natural Language :: English +Classifier: License :: OSI Approved :: MIT License +Classifier: Operating System :: OS Independent +Classifier: Programming Language :: Python +Classifier: Programming Language :: Python :: 2 +Classifier: Programming Language :: Python :: 2.7 +Classifier: Programming Language :: Python :: 3 +Classifier: Programming Language :: Python :: 3.5 +Classifier: Programming Language :: Python :: 3.6 +Classifier: Programming Language :: Python :: 3.7 +Classifier: Programming Language :: Python :: 3.8 +Classifier: Programming Language :: Python :: 3.9 +Classifier: Programming Language :: Python :: 3.10 +Classifier: Programming Language :: Python :: Implementation :: CPython +Classifier: Programming Language :: Python :: Implementation :: PyPy +Classifier: Topic :: Software Development :: Libraries :: Python Modules +Requires-Python: >=2.7, !=3.0.*, !=3.1.*, !=3.2.*, !=3.3.*, !=3.4.* +Description-Content-Type: text/x-rst +License-File: LICENSE +License-File: AUTHORS.rst +Provides-Extra: dev +Requires-Dist: coverage[toml] (>=5.0.2) ; extra == 'dev' +Requires-Dist: hypothesis ; extra == 'dev' +Requires-Dist: pympler ; extra == 'dev' +Requires-Dist: pytest (>=4.3.0) ; extra == 'dev' +Requires-Dist: six ; extra == 'dev' +Requires-Dist: mypy ; extra == 'dev' +Requires-Dist: pytest-mypy-plugins ; extra == 'dev' +Requires-Dist: zope.interface ; extra == 'dev' +Requires-Dist: furo ; extra == 'dev' +Requires-Dist: sphinx ; extra == 'dev' +Requires-Dist: sphinx-notfound-page ; extra == 'dev' +Requires-Dist: pre-commit ; extra == 'dev' +Requires-Dist: cloudpickle ; (platform_python_implementation == "CPython") and extra == 'dev' +Provides-Extra: docs +Requires-Dist: furo ; extra == 'docs' +Requires-Dist: sphinx ; extra == 'docs' +Requires-Dist: zope.interface ; extra == 'docs' +Requires-Dist: sphinx-notfound-page ; extra == 'docs' +Provides-Extra: tests +Requires-Dist: coverage[toml] (>=5.0.2) ; extra == 'tests' +Requires-Dist: hypothesis ; extra == 'tests' +Requires-Dist: pympler ; extra == 'tests' +Requires-Dist: pytest (>=4.3.0) ; extra == 'tests' +Requires-Dist: six ; extra == 'tests' +Requires-Dist: mypy ; extra == 'tests' +Requires-Dist: pytest-mypy-plugins ; extra == 'tests' +Requires-Dist: zope.interface ; extra == 'tests' +Requires-Dist: cloudpickle ; (platform_python_implementation == "CPython") and extra == 'tests' +Provides-Extra: tests_no_zope +Requires-Dist: coverage[toml] (>=5.0.2) ; extra == 'tests_no_zope' +Requires-Dist: hypothesis ; extra == 'tests_no_zope' +Requires-Dist: pympler ; extra == 'tests_no_zope' +Requires-Dist: pytest (>=4.3.0) ; extra == 'tests_no_zope' +Requires-Dist: six ; extra == 'tests_no_zope' +Requires-Dist: mypy ; extra == 'tests_no_zope' +Requires-Dist: pytest-mypy-plugins ; extra == 'tests_no_zope' +Requires-Dist: cloudpickle ; (platform_python_implementation == "CPython") and extra == 'tests_no_zope' + + +.. image:: https://www.attrs.org/en/stable/_static/attrs_logo.png + :alt: attrs logo + :align: center + + +``attrs`` is the Python package that will bring back the **joy** of **writing classes** by relieving you from the drudgery of implementing object protocols (aka `dunder methods `_). +`Trusted by NASA `_ for Mars missions since 2020! + +Its main goal is to help you to write **concise** and **correct** software without slowing down your code. + +.. teaser-end + +For that, it gives you a class decorator and a way to declaratively define the attributes on that class: + +.. -code-begin- + +.. code-block:: pycon + + >>> from attrs import asdict, define, make_class, Factory + + >>> @define + ... class SomeClass: + ... a_number: int = 42 + ... list_of_numbers: list[int] = Factory(list) + ... + ... def hard_math(self, another_number): + ... return self.a_number + sum(self.list_of_numbers) * another_number + + + >>> sc = SomeClass(1, [1, 2, 3]) + >>> sc + SomeClass(a_number=1, list_of_numbers=[1, 2, 3]) + + >>> sc.hard_math(3) + 19 + >>> sc == SomeClass(1, [1, 2, 3]) + True + >>> sc != SomeClass(2, [3, 2, 1]) + True + + >>> asdict(sc) + {'a_number': 1, 'list_of_numbers': [1, 2, 3]} + + >>> SomeClass() + SomeClass(a_number=42, list_of_numbers=[]) + + >>> C = make_class("C", ["a", "b"]) + >>> C("foo", "bar") + C(a='foo', b='bar') + + +After *declaring* your attributes ``attrs`` gives you: + +- a concise and explicit overview of the class's attributes, +- a nice human-readable ``__repr__``, +- a equality-checking methods, +- an initializer, +- and much more, + +*without* writing dull boilerplate code again and again and *without* runtime performance penalties. + +**Hate type annotations**!? +No problem! +Types are entirely **optional** with ``attrs``. +Simply assign ``attrs.field()`` to the attributes instead of annotating them with types. + +---- + +This example uses ``attrs``'s modern APIs that have been introduced in version 20.1.0, and the ``attrs`` package import name that has been added in version 21.3.0. +The classic APIs (``@attr.s``, ``attr.ib``, plus their serious business aliases) and the ``attr`` package import name will remain **indefinitely**. + +Please check out `On The Core API Names `_ for a more in-depth explanation. + + +Data Classes +============ + +On the tin, ``attrs`` might remind you of ``dataclasses`` (and indeed, ``dataclasses`` are a descendant of ``attrs``). +In practice it does a lot more and is more flexible. +For instance it allows you to define `special handling of NumPy arrays for equality checks `_, or allows more ways to `plug into the initialization process `_. + +For more details, please refer to our `comparison page `_. + + +.. -getting-help- + +Getting Help +============ + +Please use the ``python-attrs`` tag on `Stack Overflow `_ to get help. + +Answering questions of your fellow developers is also a great way to help the project! + + +.. -project-information- + +Project Information +=================== + +``attrs`` is released under the `MIT `_ license, +its documentation lives at `Read the Docs `_, +the code on `GitHub `_, +and the latest release on `PyPI `_. +It’s rigorously tested on Python 2.7, 3.5+, and PyPy. + +We collect information on **third-party extensions** in our `wiki `_. +Feel free to browse and add your own! + +If you'd like to contribute to ``attrs`` you're most welcome and we've written `a little guide `_ to get you started! + + +``attrs`` for Enterprise +------------------------ + +Available as part of the Tidelift Subscription. + +The maintainers of ``attrs`` and thousands of other packages are working with Tidelift to deliver commercial support and maintenance for the open source packages you use to build your applications. +Save time, reduce risk, and improve code health, while paying the maintainers of the exact packages you use. +`Learn more. `_ + + +Release Information +=================== + +21.4.0 (2021-12-29) +------------------- + +Changes +^^^^^^^ + +- Fixed the test suite on PyPy3.8 where ``cloudpickle`` does not work. + `#892 `_ +- Fixed ``coverage report`` for projects that use ``attrs`` and don't set a ``--source``. + `#895 `_, + `#896 `_ + +`Full changelog `_. + +Credits +======= + +``attrs`` is written and maintained by `Hynek Schlawack `_. + +The development is kindly supported by `Variomedia AG `_. + +A full list of contributors can be found in `GitHub's overview `_. + +It’s the spiritual successor of `characteristic `_ and aspires to fix some of it clunkiness and unfortunate decisions. +Both were inspired by Twisted’s `FancyEqMixin `_ but both are implemented using class decorators because `subclassing is bad for you `_, m’kay? + + diff --git a/Splunk_TA_paloalto/bin/splunk_ta_paloalto/aob_py3/attrs-21.4.0.dist-info/RECORD b/Splunk_TA_paloalto/bin/splunk_ta_paloalto/aob_py3/attrs-21.4.0.dist-info/RECORD new file mode 100644 index 00000000..64b3a6d5 --- /dev/null +++ b/Splunk_TA_paloalto/bin/splunk_ta_paloalto/aob_py3/attrs-21.4.0.dist-info/RECORD @@ -0,0 +1,37 @@ +attr/__init__.py,sha256=_zhJ4O8Q5KR5gaIrjX73vkR5nA6NjfpMGXQChEdNljI,1667 +attr/__init__.pyi,sha256=ubRkstoRHPpQN17iA0OCh8waIwZ5NeJgbz0lwI8XUjY,15100 +attr/_cmp.py,sha256=JP0N7OIyTqIR3prUDfMZOR4DV4tlV_xXf39-bQg7xOo,4165 +attr/_cmp.pyi,sha256=oyjJVytrwwkUJOoe332IiYzp6pCVZEKKcKveH-ev604,317 +attr/_compat.py,sha256=i8u27AAK_4SzQnmTf3aliGV27UdYbJxdZ-O0tOHbLU8,8396 +attr/_config.py,sha256=aj1Lh8t2CuVa5nSxgCrLQtg_ZSdO8ZKeNJQd6RvpIp8,892 +attr/_funcs.py,sha256=sm_D12y2IyRW_bCnR7M-O7U5qHaieXr0BzINwJ7_K38,14753 +attr/_make.py,sha256=D05j0_ckcVIRFn2xHch5SPUCwh3t7WpeFj-3Ku9SocQ,102736 +attr/_next_gen.py,sha256=s5jCsVEQ4IhOjAykP4N0ETaWpg0RsgQttMvEZErUrhQ,5752 +attr/_version_info.py,sha256=sxD9yNai0jGbur_-RGEQHbgV2YX5_5G9PhrhBA5pA54,2194 +attr/_version_info.pyi,sha256=x_M3L3WuB7r_ULXAWjx959udKQ4HLB8l-hsc1FDGNvk,209 +attr/converters.py,sha256=uiiWTz8GLJe8I1Ty7UICK1DegVUnqHTXbOSnar7g7Nk,4078 +attr/converters.pyi,sha256=MQo7iEzPNVoFpKqD30sVwgVpdNoIeSCF2nsXvoxLZ-Y,416 +attr/exceptions.py,sha256=BMg7AljkJnvG-irMwL2TBHYlaLBXhSKnzoEWo4e42Zw,1981 +attr/exceptions.pyi,sha256=zZq8bCUnKAy9mDtBEw42ZhPhAUIHoTKedDQInJD883M,539 +attr/filters.py,sha256=JGZgvPGkdOfttkoL6XhXS6ZCoaVV5nZ8GCYeZNUN_mE,1124 +attr/filters.pyi,sha256=_Sm80jGySETX_Clzdkon5NHVjQWRl3Y3liQKZX1czXc,215 +attr/py.typed,sha256=47DEQpj8HBSa-_TImW-5JCeuQeRkm5NMpJWZG3hSuFU,0 +attr/setters.py,sha256=rH_UtQuHgQEC7hfZyMO_SJW0R1Gus7-a83U8igZfqs8,1466 +attr/setters.pyi,sha256=7dM10rqpQVDW0y-iJUnq8rabdO5Wx2Sbo5LwNa0IXl0,573 +attr/validators.py,sha256=jVE9roaSOmTf0dJNSLHNaQNilkrlzc3pNNBKmv0g7pk,15966 +attr/validators.pyi,sha256=adn6rNbIXmRXlg_FKrTmWj0dOX0vKTsGG82Jd3YcJbQ,2268 +attrs-21.4.0.dist-info/AUTHORS.rst,sha256=wsqCNbGz_mklcJrt54APIZHZpoTIJLkXqEhhn4Nd8hc,752 +attrs-21.4.0.dist-info/INSTALLER,sha256=zuuue4knoyJ-UwPPXg8fezS7VCrXJQrAP7zeNuwvFQg,4 +attrs-21.4.0.dist-info/LICENSE,sha256=v2WaKLSSQGAvVrvfSQy-LsUJsVuY-Z17GaUsdA4yeGM,1082 +attrs-21.4.0.dist-info/METADATA,sha256=WwgR4MfxE55PpGGv21UOEOEtXZGCqwekfXYg-JgA5HY,9810 +attrs-21.4.0.dist-info/RECORD,, +attrs-21.4.0.dist-info/WHEEL,sha256=z9j0xAa_JmUKMpmz72K0ZGALSM_n-wQVmGbleXx2VHg,110 +attrs-21.4.0.dist-info/top_level.txt,sha256=AGbmKnOtYpdkLRsDRQVSBIwfL32pAQ6BSo1mt-BxI7M,11 +attrs/__init__.py,sha256=CeyxLGVViAEKKsLOLaif8vF3vs1a28vsrRVLv7eMEgM,1109 +attrs/__init__.pyi,sha256=57aCxUJukK9lZlrUgk9RuWiBiPY5DzDKJAJkhbrStYw,1982 +attrs/converters.py,sha256=fCBEdlYWcmI3sCnpUk2pz22GYtXzqTkp6NeOpdI64PY,70 +attrs/exceptions.py,sha256=SlDli6AY77f6ny-H7oy98OkQjsrw-D_supEuErIVYkE,70 +attrs/filters.py,sha256=dc_dNey29kH6KLU1mT2Dakq7tZ3kBfzEGwzOmDzw1F8,67 +attrs/py.typed,sha256=47DEQpj8HBSa-_TImW-5JCeuQeRkm5NMpJWZG3hSuFU,0 +attrs/setters.py,sha256=oKw51C72Hh45wTwYvDHJP9kbicxiMhMR4Y5GvdpKdHQ,67 +attrs/validators.py,sha256=4ag1SyVD2Hm3PYKiNG_NOtR_e7f81Hr6GiNl4YvXo4Q,70 diff --git a/Splunk_TA_paloalto/bin/splunk_ta_paloalto/aob_py3/attrs-21.4.0.dist-info/WHEEL b/Splunk_TA_paloalto/bin/splunk_ta_paloalto/aob_py3/attrs-21.4.0.dist-info/WHEEL new file mode 100644 index 00000000..0b18a281 --- /dev/null +++ b/Splunk_TA_paloalto/bin/splunk_ta_paloalto/aob_py3/attrs-21.4.0.dist-info/WHEEL @@ -0,0 +1,6 @@ +Wheel-Version: 1.0 +Generator: bdist_wheel (0.37.1) +Root-Is-Purelib: true +Tag: py2-none-any +Tag: py3-none-any + diff --git a/Splunk_TA_paloalto/bin/splunk_ta_paloalto/aob_py3/attrs-21.4.0.dist-info/top_level.txt b/Splunk_TA_paloalto/bin/splunk_ta_paloalto/aob_py3/attrs-21.4.0.dist-info/top_level.txt new file mode 100644 index 00000000..eca8ba9f --- /dev/null +++ b/Splunk_TA_paloalto/bin/splunk_ta_paloalto/aob_py3/attrs-21.4.0.dist-info/top_level.txt @@ -0,0 +1,2 @@ +attr +attrs diff --git a/Splunk_TA_paloalto/bin/splunk_ta_paloalto/aob_py3/attrs-22.2.0.dist-info/INSTALLER b/Splunk_TA_paloalto/bin/splunk_ta_paloalto/aob_py3/attrs-22.2.0.dist-info/INSTALLER new file mode 100644 index 00000000..a1b589e3 --- /dev/null +++ b/Splunk_TA_paloalto/bin/splunk_ta_paloalto/aob_py3/attrs-22.2.0.dist-info/INSTALLER @@ -0,0 +1 @@ +pip diff --git a/Splunk_TA_paloalto/bin/splunk_ta_paloalto/aob_py3/attrs-22.2.0.dist-info/LICENSE b/Splunk_TA_paloalto/bin/splunk_ta_paloalto/aob_py3/attrs-22.2.0.dist-info/LICENSE new file mode 100644 index 00000000..2bd6453d --- /dev/null +++ b/Splunk_TA_paloalto/bin/splunk_ta_paloalto/aob_py3/attrs-22.2.0.dist-info/LICENSE @@ -0,0 +1,21 @@ +The MIT License (MIT) + +Copyright (c) 2015 Hynek Schlawack and the attrs contributors + +Permission is hereby granted, free of charge, to any person obtaining a copy +of this software and associated documentation files (the "Software"), to deal +in the Software without restriction, including without limitation the rights +to use, copy, modify, merge, publish, distribute, sublicense, and/or sell +copies of the Software, and to permit persons to whom the Software is +furnished to do so, subject to the following conditions: + +The above copyright notice and this permission notice shall be included in all +copies or substantial portions of the Software. + +THE SOFTWARE IS PROVIDED "AS IS", WITHOUT WARRANTY OF ANY KIND, EXPRESS OR +IMPLIED, INCLUDING BUT NOT LIMITED TO THE WARRANTIES OF MERCHANTABILITY, +FITNESS FOR A PARTICULAR PURPOSE AND NONINFRINGEMENT. IN NO EVENT SHALL THE +AUTHORS OR COPYRIGHT HOLDERS BE LIABLE FOR ANY CLAIM, DAMAGES OR OTHER +LIABILITY, WHETHER IN AN ACTION OF CONTRACT, TORT OR OTHERWISE, ARISING FROM, +OUT OF OR IN CONNECTION WITH THE SOFTWARE OR THE USE OR OTHER DEALINGS IN THE +SOFTWARE. diff --git a/Splunk_TA_paloalto/bin/splunk_ta_paloalto/aob_py3/attrs-22.2.0.dist-info/METADATA b/Splunk_TA_paloalto/bin/splunk_ta_paloalto/aob_py3/attrs-22.2.0.dist-info/METADATA new file mode 100644 index 00000000..0f71b57a --- /dev/null +++ b/Splunk_TA_paloalto/bin/splunk_ta_paloalto/aob_py3/attrs-22.2.0.dist-info/METADATA @@ -0,0 +1,278 @@ +Metadata-Version: 2.1 +Name: attrs +Version: 22.2.0 +Summary: Classes Without Boilerplate +Home-page: https://www.attrs.org/ +Author: Hynek Schlawack +Author-email: hs@ox.cx +Maintainer: Hynek Schlawack +Maintainer-email: hs@ox.cx +License: MIT +Project-URL: Documentation, https://www.attrs.org/ +Project-URL: Changelog, https://www.attrs.org/en/stable/changelog.html +Project-URL: Bug Tracker, https://github.com/python-attrs/attrs/issues +Project-URL: Source Code, https://github.com/python-attrs/attrs +Project-URL: Funding, https://github.com/sponsors/hynek +Project-URL: Tidelift, https://tidelift.com/subscription/pkg/pypi-attrs?utm_source=pypi-attrs&utm_medium=pypi +Project-URL: Ko-fi, https://ko-fi.com/the_hynek +Keywords: class,attribute,boilerplate,dataclass +Classifier: Development Status :: 5 - Production/Stable +Classifier: Intended Audience :: Developers +Classifier: Natural Language :: English +Classifier: License :: OSI Approved :: MIT License +Classifier: Operating System :: OS Independent +Classifier: Programming Language :: Python +Classifier: Programming Language :: Python :: 3 +Classifier: Programming Language :: Python :: 3.6 +Classifier: Programming Language :: Python :: 3.7 +Classifier: Programming Language :: Python :: 3.8 +Classifier: Programming Language :: Python :: 3.9 +Classifier: Programming Language :: Python :: 3.10 +Classifier: Programming Language :: Python :: 3.11 +Classifier: Programming Language :: Python :: Implementation :: CPython +Classifier: Programming Language :: Python :: Implementation :: PyPy +Classifier: Topic :: Software Development :: Libraries :: Python Modules +Requires-Python: >=3.6 +Description-Content-Type: text/markdown +License-File: LICENSE +Provides-Extra: cov +Requires-Dist: attrs[tests] ; extra == 'cov' +Requires-Dist: coverage-enable-subprocess ; extra == 'cov' +Requires-Dist: coverage[toml] (>=5.3) ; extra == 'cov' +Provides-Extra: dev +Requires-Dist: attrs[docs,tests] ; extra == 'dev' +Provides-Extra: docs +Requires-Dist: furo ; extra == 'docs' +Requires-Dist: sphinx ; extra == 'docs' +Requires-Dist: myst-parser ; extra == 'docs' +Requires-Dist: zope.interface ; extra == 'docs' +Requires-Dist: sphinx-notfound-page ; extra == 'docs' +Requires-Dist: sphinxcontrib-towncrier ; extra == 'docs' +Requires-Dist: towncrier ; extra == 'docs' +Provides-Extra: tests +Requires-Dist: attrs[tests-no-zope] ; extra == 'tests' +Requires-Dist: zope.interface ; extra == 'tests' +Provides-Extra: tests-no-zope +Requires-Dist: hypothesis ; extra == 'tests-no-zope' +Requires-Dist: pympler ; extra == 'tests-no-zope' +Requires-Dist: pytest (>=4.3.0) ; extra == 'tests-no-zope' +Requires-Dist: pytest-xdist[psutil] ; extra == 'tests-no-zope' +Requires-Dist: cloudpickle ; (platform_python_implementation == "CPython") and extra == 'tests-no-zope' +Requires-Dist: mypy (<0.990,>=0.971) ; (platform_python_implementation == "CPython") and extra == 'tests-no-zope' +Requires-Dist: pytest-mypy-plugins ; (platform_python_implementation == "CPython" and python_version < "3.11") and extra == 'tests-no-zope' +Provides-Extra: tests_no_zope +Requires-Dist: hypothesis ; extra == 'tests_no_zope' +Requires-Dist: pympler ; extra == 'tests_no_zope' +Requires-Dist: pytest (>=4.3.0) ; extra == 'tests_no_zope' +Requires-Dist: pytest-xdist[psutil] ; extra == 'tests_no_zope' +Requires-Dist: cloudpickle ; (platform_python_implementation == "CPython") and extra == 'tests_no_zope' +Requires-Dist: mypy (<0.990,>=0.971) ; (platform_python_implementation == "CPython") and extra == 'tests_no_zope' +Requires-Dist: pytest-mypy-plugins ; (platform_python_implementation == "CPython" and python_version < "3.11") and extra == 'tests_no_zope' + +

+ + attrs + +

+ + +

+ + Documentation + + + License: MIT + + + + + + + Downloads per month + + DOI +

+ + + +*attrs* is the Python package that will bring back the **joy** of **writing classes** by relieving you from the drudgery of implementing object protocols (aka [dunder methods](https://www.attrs.org/en/latest/glossary.html#term-dunder-methods)). +[Trusted by NASA](https://docs.github.com/en/account-and-profile/setting-up-and-managing-your-github-profile/customizing-your-profile/personalizing-your-profile#list-of-qualifying-repositories-for-mars-2020-helicopter-contributor-achievement) for Mars missions since 2020! + +Its main goal is to help you to write **concise** and **correct** software without slowing down your code. + + +## Sponsors + +*attrs* would not be possible without our [amazing sponsors](https://github.com/sponsors/hynek). +Especially those generously supporting us at the *The Organization* tier and higher: + +

+ + + + + + + + + + + + + + + +

+ +

+ Please consider joining them to help make attrs’s maintenance more sustainable! +

+ + + +## Example + +*attrs* gives you a class decorator and a way to declaratively define the attributes on that class: + + + +```pycon +>>> from attrs import asdict, define, make_class, Factory + +>>> @define +... class SomeClass: +... a_number: int = 42 +... list_of_numbers: list[int] = Factory(list) +... +... def hard_math(self, another_number): +... return self.a_number + sum(self.list_of_numbers) * another_number + + +>>> sc = SomeClass(1, [1, 2, 3]) +>>> sc +SomeClass(a_number=1, list_of_numbers=[1, 2, 3]) + +>>> sc.hard_math(3) +19 +>>> sc == SomeClass(1, [1, 2, 3]) +True +>>> sc != SomeClass(2, [3, 2, 1]) +True + +>>> asdict(sc) +{'a_number': 1, 'list_of_numbers': [1, 2, 3]} + +>>> SomeClass() +SomeClass(a_number=42, list_of_numbers=[]) + +>>> C = make_class("C", ["a", "b"]) +>>> C("foo", "bar") +C(a='foo', b='bar') +``` + +After *declaring* your attributes, *attrs* gives you: + +- a concise and explicit overview of the class's attributes, +- a nice human-readable `__repr__`, +- equality-checking methods, +- an initializer, +- and much more, + +*without* writing dull boilerplate code again and again and *without* runtime performance penalties. + +**Hate type annotations**!? +No problem! +Types are entirely **optional** with *attrs*. +Simply assign `attrs.field()` to the attributes instead of annotating them with types. + +--- + +This example uses *attrs*'s modern APIs that have been introduced in version 20.1.0, and the *attrs* package import name that has been added in version 21.3.0. +The classic APIs (`@attr.s`, `attr.ib`, plus their serious-business aliases) and the `attr` package import name will remain **indefinitely**. + +Please check out [*On The Core API Names*](https://www.attrs.org/en/latest/names.html) for a more in-depth explanation. + + +## Data Classes + +On the tin, *attrs* might remind you of `dataclasses` (and indeed, `dataclasses` [are a descendant](https://hynek.me/articles/import-attrs/) of *attrs*). +In practice it does a lot more and is more flexible. +For instance it allows you to define [special handling of NumPy arrays for equality checks](https://www.attrs.org/en/stable/comparison.html#customization), or allows more ways to [plug into the initialization process](https://www.attrs.org/en/stable/init.html#hooking-yourself-into-initialization). + +For more details, please refer to our [comparison page](https://www.attrs.org/en/stable/why.html#data-classes). + + +## Project Information + +- [**Changelog**](https://www.attrs.org/en/stable/changelog.html) +- [**Documentation**](https://www.attrs.org/) +- [**PyPI**](https://pypi.org/project/attrs/) +- [**Source Code**](https://github.com/python-attrs/attrs) +- [**Contributing**](https://github.com/python-attrs/attrs/blob/main/.github/CONTRIBUTING.md) +- [**Third-party Extensions**](https://github.com/python-attrs/attrs/wiki/Extensions-to-attrs) +- **License**: [MIT](https://www.attrs.org/en/latest/license.html) +- **Get Help**: please use the `python-attrs` tag on [StackOverflow](https://stackoverflow.com/questions/tagged/python-attrs) +- **Supported Python Versions**: 3.6 and later + + +### *attrs* for Enterprise + +Available as part of the Tidelift Subscription. + +The maintainers of *attrs* and thousands of other packages are working with Tidelift to deliver commercial support and maintenance for the open source packages you use to build your applications. +Save time, reduce risk, and improve code health, while paying the maintainers of the exact packages you use. +[Learn more.](https://tidelift.com/subscription/pkg/pypi-attrs?utm_source=pypi-attrs&utm_medium=referral&utm_campaign=enterprise&utm_term=repo) + + +## Changes in This Release + +### Backwards-incompatible Changes + +- Python 3.5 is not supported anymore. + [#988](https://github.com/python-attrs/attrs/issues/988) + + +### Deprecations + +- Python 3.6 is now deprecated and support will be removed in the next release. + [#1017](https://github.com/python-attrs/attrs/issues/1017) + + +### Changes + +- `attrs.field()` now supports an *alias* option for explicit `__init__` argument names. + + Get `__init__` signatures matching any taste, peculiar or plain! + The [PEP 681 compatible](https://peps.python.org/pep-0681/#field-specifier-parameters) *alias* option can be use to override private attribute name mangling, or add other arbitrary field argument name overrides. + [#950](https://github.com/python-attrs/attrs/issues/950) +- `attrs.NOTHING` is now an enum value, making it possible to use with e.g. [`typing.Literal`](https://docs.python.org/3/library/typing.html#typing.Literal). + [#983](https://github.com/python-attrs/attrs/issues/983) +- Added missing re-import of `attr.AttrsInstance` to the `attrs` namespace. + [#987](https://github.com/python-attrs/attrs/issues/987) +- Fix slight performance regression in classes with custom `__setattr__` and speedup even more. + [#991](https://github.com/python-attrs/attrs/issues/991) +- Class-creation performance improvements by switching performance-sensitive templating operations to f-strings. + + You can expect an improvement of about 5% -- even for very simple classes. + [#995](https://github.com/python-attrs/attrs/issues/995) +- `attrs.has()` is now a [`TypeGuard`](https://docs.python.org/3/library/typing.html#typing.TypeGuard) for `AttrsInstance`. + That means that type checkers know a class is an instance of an `attrs` class if you check it using `attrs.has()` (or `attr.has()`) first. + [#997](https://github.com/python-attrs/attrs/issues/997) +- Made `attrs.AttrsInstance` stub available at runtime and fixed type errors related to the usage of `attrs.AttrsInstance` in *Pyright*. + [#999](https://github.com/python-attrs/attrs/issues/999) +- On Python 3.10 and later, call [`abc.update_abstractmethods()`](https://docs.python.org/3/library/abc.html#abc.update_abstractmethods) on dict classes after creation. + This improves the detection of abstractness. + [#1001](https://github.com/python-attrs/attrs/issues/1001) +- *attrs*'s pickling methods now use dicts instead of tuples. + That is safer and more robust across different versions of a class. + [#1009](https://github.com/python-attrs/attrs/issues/1009) +- Added `attrs.validators.not_(wrapped_validator)` to logically invert *wrapped_validator* by accepting only values where *wrapped_validator* rejects the value with a `ValueError` or `TypeError` (by default, exception types configurable). + [#1010](https://github.com/python-attrs/attrs/issues/1010) +- The type stubs for `attrs.cmp_using()` now have default values. + [#1027](https://github.com/python-attrs/attrs/issues/1027) +- To conform with [PEP 681](https://peps.python.org/pep-0681/), `attr.s()` and `attrs.define()` now accept *unsafe_hash* in addition to *hash*. + [#1065](https://github.com/python-attrs/attrs/issues/1065) + +--- + +[Full changelog](https://www.attrs.org/en/stable/changelog.html) diff --git a/Splunk_TA_paloalto/bin/splunk_ta_paloalto/aob_py3/attrs-22.2.0.dist-info/RECORD b/Splunk_TA_paloalto/bin/splunk_ta_paloalto/aob_py3/attrs-22.2.0.dist-info/RECORD new file mode 100644 index 00000000..609a4af1 --- /dev/null +++ b/Splunk_TA_paloalto/bin/splunk_ta_paloalto/aob_py3/attrs-22.2.0.dist-info/RECORD @@ -0,0 +1,37 @@ +attr/__init__.py,sha256=-lJ5CXKE5yKk97Z2HSMRJFiGz1TdXLU9q4Ysb2Id4IQ,1947 +attr/__init__.pyi,sha256=qOjUNync7Lq8NLk30l_DRTh1h62mMl1e4VnqBgY2x24,15831 +attr/_cmp.py,sha256=mwr1ImJlkFL9Zi0E55-90IfchMKr94ko6e-p4y__M_4,4094 +attr/_cmp.pyi,sha256=sGQmOM0w3_K4-X8cTXR7g0Hqr290E8PTObA9JQxWQqc,399 +attr/_compat.py,sha256=Da-SeMicy7SkTKCCwKtfX41sUMf0o54tK96zsu1qE60,5435 +attr/_config.py,sha256=5W8lgRePuIOWu1ZuqF1899e2CmXGc95-ipwTpF1cEU4,826 +attr/_funcs.py,sha256=0EqqZgKNZBk4PXQvCF_fuWWAz14mSdZpk4UBZpX_fDQ,14545 +attr/_make.py,sha256=MdYHoWXJ2WlQNZPMTX4gkBO06QgPyb3qwSWSxaJ6QVg,96087 +attr/_next_gen.py,sha256=95DRKAfIuHbcwO9W_yWtRsHt3IbfxbAgpyB6agxbghw,6059 +attr/_typing_compat.pyi,sha256=XDP54TUn-ZKhD62TOQebmzrwFyomhUCoGRpclb6alRA,469 +attr/_version_info.py,sha256=exSqb3b5E-fMSsgZAlEw9XcLpEgobPORCZpcaEglAM4,2121 +attr/_version_info.pyi,sha256=x_M3L3WuB7r_ULXAWjx959udKQ4HLB8l-hsc1FDGNvk,209 +attr/converters.py,sha256=xfGVSPRgWGcym6N5FZM9fyfvCQePqFyApWeC5BXKvoM,3602 +attr/converters.pyi,sha256=jKlpHBEt6HVKJvgrMFJRrHq8p61GXg4-Nd5RZWKJX7M,406 +attr/exceptions.py,sha256=ZGEMLv0CDY1TOtj49OF84myejOn-LCCXAKGIKalKkVU,1915 +attr/exceptions.pyi,sha256=zZq8bCUnKAy9mDtBEw42ZhPhAUIHoTKedDQInJD883M,539 +attr/filters.py,sha256=aZep54h8-4ZYV5lmZ3Dx2mqeQH4cMx6tuCmCylLNbEU,1038 +attr/filters.pyi,sha256=_Sm80jGySETX_Clzdkon5NHVjQWRl3Y3liQKZX1czXc,215 +attr/py.typed,sha256=47DEQpj8HBSa-_TImW-5JCeuQeRkm5NMpJWZG3hSuFU,0 +attr/setters.py,sha256=pbCZQ-pE6ZxjDqZfWWUhUFefXtpekIU4qS_YDMLPQ50,1400 +attr/setters.pyi,sha256=pyY8TVNBu8TWhOldv_RxHzmGvdgFQH981db70r0fn5I,567 +attr/validators.py,sha256=gBJAzoo1UNDRTG9-kE0LUoUTgDr2slJymPxb6-UPt7c,20501 +attr/validators.pyi,sha256=ZbJDuF6Kom-L6ym9Cc6eT370S_a7z8YhWmP7z35ayXc,2538 +attrs-22.2.0.dist-info/INSTALLER,sha256=zuuue4knoyJ-UwPPXg8fezS7VCrXJQrAP7zeNuwvFQg,4 +attrs-22.2.0.dist-info/LICENSE,sha256=iCEVyV38KvHutnFPjsbVy8q_Znyv-HKfQkINpj9xTp8,1109 +attrs-22.2.0.dist-info/METADATA,sha256=jgQypZGK_yplaxCh1S1gnQ_NZYKk-EwtfWygdZ_NgIc,13531 +attrs-22.2.0.dist-info/RECORD,, +attrs-22.2.0.dist-info/WHEEL,sha256=2wepM1nk4DS4eFpYrW1TTqPcoGNfHhhO_i5m4cOimbo,92 +attrs-22.2.0.dist-info/top_level.txt,sha256=AGbmKnOtYpdkLRsDRQVSBIwfL32pAQ6BSo1mt-BxI7M,11 +attrs/__init__.py,sha256=90bKLoqyIHpMjnzJuXSar1dH5anUQXHqT7-yI1Qzg00,1149 +attrs/__init__.pyi,sha256=KMHncABV_sq4pubLAli-iOQjc9EM3g9y2r6M9V71_vY,2148 +attrs/converters.py,sha256=fCBEdlYWcmI3sCnpUk2pz22GYtXzqTkp6NeOpdI64PY,70 +attrs/exceptions.py,sha256=SlDli6AY77f6ny-H7oy98OkQjsrw-D_supEuErIVYkE,70 +attrs/filters.py,sha256=dc_dNey29kH6KLU1mT2Dakq7tZ3kBfzEGwzOmDzw1F8,67 +attrs/py.typed,sha256=47DEQpj8HBSa-_TImW-5JCeuQeRkm5NMpJWZG3hSuFU,0 +attrs/setters.py,sha256=oKw51C72Hh45wTwYvDHJP9kbicxiMhMR4Y5GvdpKdHQ,67 +attrs/validators.py,sha256=4ag1SyVD2Hm3PYKiNG_NOtR_e7f81Hr6GiNl4YvXo4Q,70 diff --git a/Splunk_TA_paloalto/bin/splunk_ta_paloalto/aob_py3/attrs-22.2.0.dist-info/WHEEL b/Splunk_TA_paloalto/bin/splunk_ta_paloalto/aob_py3/attrs-22.2.0.dist-info/WHEEL new file mode 100644 index 00000000..57e3d840 --- /dev/null +++ b/Splunk_TA_paloalto/bin/splunk_ta_paloalto/aob_py3/attrs-22.2.0.dist-info/WHEEL @@ -0,0 +1,5 @@ +Wheel-Version: 1.0 +Generator: bdist_wheel (0.38.4) +Root-Is-Purelib: true +Tag: py3-none-any + diff --git a/Splunk_TA_paloalto/bin/splunk_ta_paloalto/aob_py3/attrs-22.2.0.dist-info/top_level.txt b/Splunk_TA_paloalto/bin/splunk_ta_paloalto/aob_py3/attrs-22.2.0.dist-info/top_level.txt new file mode 100644 index 00000000..eca8ba9f --- /dev/null +++ b/Splunk_TA_paloalto/bin/splunk_ta_paloalto/aob_py3/attrs-22.2.0.dist-info/top_level.txt @@ -0,0 +1,2 @@ +attr +attrs diff --git a/Splunk_TA_paloalto/bin/splunk_ta_paloalto/aob_py3/attrs/__init__.py b/Splunk_TA_paloalto/bin/splunk_ta_paloalto/aob_py3/attrs/__init__.py new file mode 100644 index 00000000..81dd6b2f --- /dev/null +++ b/Splunk_TA_paloalto/bin/splunk_ta_paloalto/aob_py3/attrs/__init__.py @@ -0,0 +1,72 @@ +# SPDX-License-Identifier: MIT + +from attr import ( + NOTHING, + Attribute, + AttrsInstance, + Factory, + __author__, + __copyright__, + __description__, + __doc__, + __email__, + __license__, + __title__, + __url__, + __version__, + __version_info__, + assoc, + cmp_using, + define, + evolve, + field, + fields, + fields_dict, + frozen, + has, + make_class, + mutable, + resolve_types, + validate, +) +from attr._next_gen import asdict, astuple + +from . import converters, exceptions, filters, setters, validators + + +__all__ = [ + "__author__", + "__copyright__", + "__description__", + "__doc__", + "__email__", + "__license__", + "__title__", + "__url__", + "__version__", + "__version_info__", + "asdict", + "assoc", + "astuple", + "Attribute", + "AttrsInstance", + "cmp_using", + "converters", + "define", + "evolve", + "exceptions", + "Factory", + "field", + "fields_dict", + "fields", + "filters", + "frozen", + "has", + "make_class", + "mutable", + "NOTHING", + "resolve_types", + "setters", + "validate", + "validators", +] diff --git a/Splunk_TA_paloalto/bin/splunk_ta_paloalto/aob_py3/attrs/__init__.pyi b/Splunk_TA_paloalto/bin/splunk_ta_paloalto/aob_py3/attrs/__init__.pyi new file mode 100644 index 00000000..4ea64d8e --- /dev/null +++ b/Splunk_TA_paloalto/bin/splunk_ta_paloalto/aob_py3/attrs/__init__.pyi @@ -0,0 +1,67 @@ +from typing import ( + Any, + Callable, + Dict, + Mapping, + Optional, + Sequence, + Tuple, + Type, +) + +# Because we need to type our own stuff, we have to make everything from +# attr explicitly public too. +from attr import __author__ as __author__ +from attr import __copyright__ as __copyright__ +from attr import __description__ as __description__ +from attr import __email__ as __email__ +from attr import __license__ as __license__ +from attr import __title__ as __title__ +from attr import __url__ as __url__ +from attr import __version__ as __version__ +from attr import __version_info__ as __version_info__ +from attr import _FilterType +from attr import assoc as assoc +from attr import Attribute as Attribute +from attr import AttrsInstance as AttrsInstance +from attr import cmp_using as cmp_using +from attr import converters as converters +from attr import define as define +from attr import evolve as evolve +from attr import exceptions as exceptions +from attr import Factory as Factory +from attr import field as field +from attr import fields as fields +from attr import fields_dict as fields_dict +from attr import filters as filters +from attr import frozen as frozen +from attr import has as has +from attr import make_class as make_class +from attr import mutable as mutable +from attr import NOTHING as NOTHING +from attr import resolve_types as resolve_types +from attr import setters as setters +from attr import validate as validate +from attr import validators as validators + +# TODO: see definition of attr.asdict/astuple +def asdict( + inst: Any, + recurse: bool = ..., + filter: Optional[_FilterType[Any]] = ..., + dict_factory: Type[Mapping[Any, Any]] = ..., + retain_collection_types: bool = ..., + value_serializer: Optional[ + Callable[[type, Attribute[Any], Any], Any] + ] = ..., + tuple_keys: bool = ..., +) -> Dict[str, Any]: ... + +# TODO: add support for returning NamedTuple from the mypy plugin +def astuple( + inst: Any, + recurse: bool = ..., + filter: Optional[_FilterType[Any]] = ..., + tuple_factory: Type[Sequence[Any]] = ..., + retain_collection_types: bool = ..., +) -> Tuple[Any, ...]: ... diff --git a/Splunk_TA_paloalto/bin/splunk_ta_paloalto/aob_py3/attrs/converters.py b/Splunk_TA_paloalto/bin/splunk_ta_paloalto/aob_py3/attrs/converters.py new file mode 100644 index 00000000..edfa8d3c --- /dev/null +++ b/Splunk_TA_paloalto/bin/splunk_ta_paloalto/aob_py3/attrs/converters.py @@ -0,0 +1,3 @@ +# SPDX-License-Identifier: MIT + +from attr.converters import * # noqa diff --git a/Splunk_TA_paloalto/bin/splunk_ta_paloalto/aob_py3/attrs/exceptions.py b/Splunk_TA_paloalto/bin/splunk_ta_paloalto/aob_py3/attrs/exceptions.py new file mode 100644 index 00000000..bd9efed2 --- /dev/null +++ b/Splunk_TA_paloalto/bin/splunk_ta_paloalto/aob_py3/attrs/exceptions.py @@ -0,0 +1,3 @@ +# SPDX-License-Identifier: MIT + +from attr.exceptions import * # noqa diff --git a/Splunk_TA_paloalto/bin/splunk_ta_paloalto/aob_py3/attrs/filters.py b/Splunk_TA_paloalto/bin/splunk_ta_paloalto/aob_py3/attrs/filters.py new file mode 100644 index 00000000..52959005 --- /dev/null +++ b/Splunk_TA_paloalto/bin/splunk_ta_paloalto/aob_py3/attrs/filters.py @@ -0,0 +1,3 @@ +# SPDX-License-Identifier: MIT + +from attr.filters import * # noqa diff --git a/Splunk_TA_paloalto/bin/splunk_ta_paloalto/aob_py3/attrs/py.typed b/Splunk_TA_paloalto/bin/splunk_ta_paloalto/aob_py3/attrs/py.typed new file mode 100644 index 00000000..e69de29b diff --git a/Splunk_TA_paloalto/bin/splunk_ta_paloalto/aob_py3/attrs/setters.py b/Splunk_TA_paloalto/bin/splunk_ta_paloalto/aob_py3/attrs/setters.py new file mode 100644 index 00000000..9b507708 --- /dev/null +++ b/Splunk_TA_paloalto/bin/splunk_ta_paloalto/aob_py3/attrs/setters.py @@ -0,0 +1,3 @@ +# SPDX-License-Identifier: MIT + +from attr.setters import * # noqa diff --git a/Splunk_TA_paloalto/bin/splunk_ta_paloalto/aob_py3/attrs/validators.py b/Splunk_TA_paloalto/bin/splunk_ta_paloalto/aob_py3/attrs/validators.py new file mode 100644 index 00000000..ab2c9b30 --- /dev/null +++ b/Splunk_TA_paloalto/bin/splunk_ta_paloalto/aob_py3/attrs/validators.py @@ -0,0 +1,3 @@ +# SPDX-License-Identifier: MIT + +from attr.validators import * # noqa diff --git a/Splunk_TA_paloalto/bin/splunk_ta_paloalto/aob_py3/bin/futurize b/Splunk_TA_paloalto/bin/splunk_ta_paloalto/aob_py3/bin/futurize index c2cbec45..392e2fad 100755 --- a/Splunk_TA_paloalto/bin/splunk_ta_paloalto/aob_py3/bin/futurize +++ b/Splunk_TA_paloalto/bin/splunk_ta_paloalto/aob_py3/bin/futurize @@ -1,12 +1,8 @@ -#!/usr/local/bin/python3.7 -# EASY-INSTALL-ENTRY-SCRIPT: 'future==0.17.1','console_scripts','futurize' -__requires__ = 'future==0.17.1' +#!/usr/bin/python3 +# -*- coding: utf-8 -*- import re import sys -from pkg_resources import load_entry_point - +from libfuturize.main import main if __name__ == '__main__': - sys.argv[0] = re.sub(r'(-script\.pyw?|\.exe)?$', '', sys.argv[0]) - sys.exit( - load_entry_point('future==0.17.1', 'console_scripts', 'futurize')() - ) + sys.argv[0] = re.sub(r'(-script\.pyw|\.exe)?$', '', sys.argv[0]) + sys.exit(main()) diff --git a/Splunk_TA_paloalto/bin/splunk_ta_paloalto/aob_py3/bin/json b/Splunk_TA_paloalto/bin/splunk_ta_paloalto/aob_py3/bin/json new file mode 100755 index 00000000..76697c5c --- /dev/null +++ b/Splunk_TA_paloalto/bin/splunk_ta_paloalto/aob_py3/bin/json @@ -0,0 +1,8 @@ +#!/usr/bin/python3 +# -*- coding: utf-8 -*- +import re +import sys +from jsonspec.cli import main +if __name__ == '__main__': + sys.argv[0] = re.sub(r'(-script\.pyw|\.exe)?$', '', sys.argv[0]) + sys.exit(main()) diff --git a/Splunk_TA_paloalto/bin/splunk_ta_paloalto/aob_py3/bin/jsonpath.py b/Splunk_TA_paloalto/bin/splunk_ta_paloalto/aob_py3/bin/jsonpath.py index 7c34053d..6b674505 100755 --- a/Splunk_TA_paloalto/bin/splunk_ta_paloalto/aob_py3/bin/jsonpath.py +++ b/Splunk_TA_paloalto/bin/splunk_ta_paloalto/aob_py3/bin/jsonpath.py @@ -1,12 +1,8 @@ -#!/usr/local/bin/python3.7 -# EASY-INSTALL-ENTRY-SCRIPT: 'jsonpath-rw==1.4.0','console_scripts','jsonpath.py' -__requires__ = 'jsonpath-rw==1.4.0' +#!/usr/bin/python3 +# -*- coding: utf-8 -*- import re import sys -from pkg_resources import load_entry_point - +from jsonpath_rw.bin.jsonpath import entry_point if __name__ == '__main__': - sys.argv[0] = re.sub(r'(-script\.pyw?|\.exe)?$', '', sys.argv[0]) - sys.exit( - load_entry_point('jsonpath-rw==1.4.0', 'console_scripts', 'jsonpath.py')() - ) + sys.argv[0] = re.sub(r'(-script\.pyw|\.exe)?$', '', sys.argv[0]) + sys.exit(entry_point()) diff --git a/Splunk_TA_paloalto/bin/splunk_ta_paloalto/aob_py3/bin/jsonpath_ng b/Splunk_TA_paloalto/bin/splunk_ta_paloalto/aob_py3/bin/jsonpath_ng new file mode 100755 index 00000000..2290ad32 --- /dev/null +++ b/Splunk_TA_paloalto/bin/splunk_ta_paloalto/aob_py3/bin/jsonpath_ng @@ -0,0 +1,8 @@ +#!/usr/bin/python3 +# -*- coding: utf-8 -*- +import re +import sys +from jsonpath_ng.bin.jsonpath import entry_point +if __name__ == '__main__': + sys.argv[0] = re.sub(r'(-script\.pyw|\.exe)?$', '', sys.argv[0]) + sys.exit(entry_point()) diff --git a/Splunk_TA_paloalto/bin/splunk_ta_paloalto/aob_py3/bin/jsonschema b/Splunk_TA_paloalto/bin/splunk_ta_paloalto/aob_py3/bin/jsonschema index ceb4e1de..902c2bae 100755 --- a/Splunk_TA_paloalto/bin/splunk_ta_paloalto/aob_py3/bin/jsonschema +++ b/Splunk_TA_paloalto/bin/splunk_ta_paloalto/aob_py3/bin/jsonschema @@ -1,12 +1,8 @@ -#!/usr/local/bin/python3.7 -# EASY-INSTALL-ENTRY-SCRIPT: 'jsonschema==2.6.0','console_scripts','jsonschema' -__requires__ = 'jsonschema==2.6.0' +#!/usr/bin/python3 +# -*- coding: utf-8 -*- import re import sys -from pkg_resources import load_entry_point - +from jsonschema.cli import main if __name__ == '__main__': - sys.argv[0] = re.sub(r'(-script\.pyw?|\.exe)?$', '', sys.argv[0]) - sys.exit( - load_entry_point('jsonschema==2.6.0', 'console_scripts', 'jsonschema')() - ) + sys.argv[0] = re.sub(r'(-script\.pyw|\.exe)?$', '', sys.argv[0]) + sys.exit(main()) diff --git a/Splunk_TA_paloalto/bin/splunk_ta_paloalto/aob_py3/bin/mako-render b/Splunk_TA_paloalto/bin/splunk_ta_paloalto/aob_py3/bin/mako-render index 8d3e76d7..2b8ef7c3 100755 --- a/Splunk_TA_paloalto/bin/splunk_ta_paloalto/aob_py3/bin/mako-render +++ b/Splunk_TA_paloalto/bin/splunk_ta_paloalto/aob_py3/bin/mako-render @@ -1,12 +1,8 @@ -#!/usr/local/bin/python3.7 -# EASY-INSTALL-ENTRY-SCRIPT: 'Mako==1.1.0','console_scripts','mako-render' -__requires__ = 'Mako==1.1.0' +#!/usr/bin/python3 +# -*- coding: utf-8 -*- import re import sys -from pkg_resources import load_entry_point - +from mako.cmd import cmdline if __name__ == '__main__': - sys.argv[0] = re.sub(r'(-script\.pyw?|\.exe)?$', '', sys.argv[0]) - sys.exit( - load_entry_point('Mako==1.1.0', 'console_scripts', 'mako-render')() - ) + sys.argv[0] = re.sub(r'(-script\.pyw|\.exe)?$', '', sys.argv[0]) + sys.exit(cmdline()) diff --git a/Splunk_TA_paloalto/bin/splunk_ta_paloalto/aob_py3/bin/normalizer b/Splunk_TA_paloalto/bin/splunk_ta_paloalto/aob_py3/bin/normalizer new file mode 100755 index 00000000..937b45c9 --- /dev/null +++ b/Splunk_TA_paloalto/bin/splunk_ta_paloalto/aob_py3/bin/normalizer @@ -0,0 +1,8 @@ +#!/usr/bin/python3 +# -*- coding: utf-8 -*- +import re +import sys +from charset_normalizer.cli.normalizer import cli_detect +if __name__ == '__main__': + sys.argv[0] = re.sub(r'(-script\.pyw|\.exe)?$', '', sys.argv[0]) + sys.exit(cli_detect()) diff --git a/Splunk_TA_paloalto/bin/splunk_ta_paloalto/aob_py3/bin/pasteurize b/Splunk_TA_paloalto/bin/splunk_ta_paloalto/aob_py3/bin/pasteurize index d1e6ed32..6106a533 100755 --- a/Splunk_TA_paloalto/bin/splunk_ta_paloalto/aob_py3/bin/pasteurize +++ b/Splunk_TA_paloalto/bin/splunk_ta_paloalto/aob_py3/bin/pasteurize @@ -1,12 +1,8 @@ -#!/usr/local/bin/python3.7 -# EASY-INSTALL-ENTRY-SCRIPT: 'future==0.17.1','console_scripts','pasteurize' -__requires__ = 'future==0.17.1' +#!/usr/bin/python3 +# -*- coding: utf-8 -*- import re import sys -from pkg_resources import load_entry_point - +from libpasteurize.main import main if __name__ == '__main__': - sys.argv[0] = re.sub(r'(-script\.pyw?|\.exe)?$', '', sys.argv[0]) - sys.exit( - load_entry_point('future==0.17.1', 'console_scripts', 'pasteurize')() - ) + sys.argv[0] = re.sub(r'(-script\.pyw|\.exe)?$', '', sys.argv[0]) + sys.exit(main()) diff --git a/Splunk_TA_paloalto/bin/splunk_ta_paloalto/aob_py3/certifi-2021.10.8.dist-info/INSTALLER b/Splunk_TA_paloalto/bin/splunk_ta_paloalto/aob_py3/certifi-2021.10.8.dist-info/INSTALLER new file mode 100644 index 00000000..a1b589e3 --- /dev/null +++ b/Splunk_TA_paloalto/bin/splunk_ta_paloalto/aob_py3/certifi-2021.10.8.dist-info/INSTALLER @@ -0,0 +1 @@ +pip diff --git a/Splunk_TA_paloalto/bin/splunk_ta_paloalto/aob_py3/certifi-2021.10.8.dist-info/LICENSE b/Splunk_TA_paloalto/bin/splunk_ta_paloalto/aob_py3/certifi-2021.10.8.dist-info/LICENSE new file mode 100644 index 00000000..c2fda9a2 --- /dev/null +++ b/Splunk_TA_paloalto/bin/splunk_ta_paloalto/aob_py3/certifi-2021.10.8.dist-info/LICENSE @@ -0,0 +1,21 @@ +This package contains a modified version of ca-bundle.crt: + +ca-bundle.crt -- Bundle of CA Root Certificates + +Certificate data from Mozilla as of: Thu Nov 3 19:04:19 2011# +This is a bundle of X.509 certificates of public Certificate Authorities +(CA). These were automatically extracted from Mozilla's root certificates +file (certdata.txt). This file can be found in the mozilla source tree: +http://mxr.mozilla.org/mozilla/source/security/nss/lib/ckfw/builtins/certdata.txt?raw=1# +It contains the certificates in PEM format and therefore +can be directly used with curl / libcurl / php_curl, or with +an Apache+mod_ssl webserver for SSL client authentication. +Just configure this file as the SSLCACertificateFile.# + +***** BEGIN LICENSE BLOCK ***** +This Source Code Form is subject to the terms of the Mozilla Public License, +v. 2.0. If a copy of the MPL was not distributed with this file, You can obtain +one at http://mozilla.org/MPL/2.0/. + +***** END LICENSE BLOCK ***** +@(#) $RCSfile: certdata.txt,v $ $Revision: 1.80 $ $Date: 2011/11/03 15:11:58 $ diff --git a/Splunk_TA_paloalto/bin/splunk_ta_paloalto/aob_py3/certifi-2021.10.8.dist-info/METADATA b/Splunk_TA_paloalto/bin/splunk_ta_paloalto/aob_py3/certifi-2021.10.8.dist-info/METADATA new file mode 100644 index 00000000..7a6860db --- /dev/null +++ b/Splunk_TA_paloalto/bin/splunk_ta_paloalto/aob_py3/certifi-2021.10.8.dist-info/METADATA @@ -0,0 +1,83 @@ +Metadata-Version: 2.1 +Name: certifi +Version: 2021.10.8 +Summary: Python package for providing Mozilla's CA Bundle. +Home-page: https://certifiio.readthedocs.io/en/latest/ +Author: Kenneth Reitz +Author-email: me@kennethreitz.com +License: MPL-2.0 +Project-URL: Documentation, https://certifiio.readthedocs.io/en/latest/ +Project-URL: Source, https://github.com/certifi/python-certifi +Platform: UNKNOWN +Classifier: Development Status :: 5 - Production/Stable +Classifier: Intended Audience :: Developers +Classifier: License :: OSI Approved :: Mozilla Public License 2.0 (MPL 2.0) +Classifier: Natural Language :: English +Classifier: Programming Language :: Python +Classifier: Programming Language :: Python :: 3 +Classifier: Programming Language :: Python :: 3.3 +Classifier: Programming Language :: Python :: 3.4 +Classifier: Programming Language :: Python :: 3.5 +Classifier: Programming Language :: Python :: 3.6 +Classifier: Programming Language :: Python :: 3.7 +Classifier: Programming Language :: Python :: 3.8 +Classifier: Programming Language :: Python :: 3.9 + +Certifi: Python SSL Certificates +================================ + +`Certifi`_ provides Mozilla's carefully curated collection of Root Certificates for +validating the trustworthiness of SSL certificates while verifying the identity +of TLS hosts. It has been extracted from the `Requests`_ project. + +Installation +------------ + +``certifi`` is available on PyPI. Simply install it with ``pip``:: + + $ pip install certifi + +Usage +----- + +To reference the installed certificate authority (CA) bundle, you can use the +built-in function:: + + >>> import certifi + + >>> certifi.where() + '/usr/local/lib/python3.7/site-packages/certifi/cacert.pem' + +Or from the command line:: + + $ python -m certifi + /usr/local/lib/python3.7/site-packages/certifi/cacert.pem + +Enjoy! + +1024-bit Root Certificates +~~~~~~~~~~~~~~~~~~~~~~~~~~ + +Browsers and certificate authorities have concluded that 1024-bit keys are +unacceptably weak for certificates, particularly root certificates. For this +reason, Mozilla has removed any weak (i.e. 1024-bit key) certificate from its +bundle, replacing it with an equivalent strong (i.e. 2048-bit or greater key) +certificate from the same CA. Because Mozilla removed these certificates from +its bundle, ``certifi`` removed them as well. + +In previous versions, ``certifi`` provided the ``certifi.old_where()`` function +to intentionally re-add the 1024-bit roots back into your bundle. This was not +recommended in production and therefore was removed at the end of 2018. + +.. _`Certifi`: https://certifiio.readthedocs.io/en/latest/ +.. _`Requests`: https://requests.readthedocs.io/en/master/ + +Addition/Removal of Certificates +-------------------------------- + +Certifi does not support any addition/removal or other modification of the +CA trust store content. This project is intended to provide a reliable and +highly portable root of trust to python deployments. Look to upstream projects +for methods to use alternate trust. + + diff --git a/Splunk_TA_paloalto/bin/splunk_ta_paloalto/aob_py3/certifi-2021.10.8.dist-info/RECORD b/Splunk_TA_paloalto/bin/splunk_ta_paloalto/aob_py3/certifi-2021.10.8.dist-info/RECORD new file mode 100644 index 00000000..0c541d4b --- /dev/null +++ b/Splunk_TA_paloalto/bin/splunk_ta_paloalto/aob_py3/certifi-2021.10.8.dist-info/RECORD @@ -0,0 +1,10 @@ +certifi-2021.10.8.dist-info/INSTALLER,sha256=zuuue4knoyJ-UwPPXg8fezS7VCrXJQrAP7zeNuwvFQg,4 +certifi-2021.10.8.dist-info/LICENSE,sha256=vp2C82ES-Hp_HXTs1Ih-FGe7roh4qEAEoAEXseR1o-I,1049 +certifi-2021.10.8.dist-info/METADATA,sha256=iB_zbT1uX_8_NC7iGv0YEB-9b3idhQwHrFTSq8R1kD8,2994 +certifi-2021.10.8.dist-info/RECORD,, +certifi-2021.10.8.dist-info/WHEEL,sha256=ADKeyaGyKF5DwBNE0sRE5pvW-bSkFMJfBuhzZ3rceP4,110 +certifi-2021.10.8.dist-info/top_level.txt,sha256=KMu4vUCfsjLrkPbSNdgdekS-pVJzBAJFO__nI8NF6-U,8 +certifi/__init__.py,sha256=xWdRgntT3j1V95zkRipGOg_A1UfEju2FcpujhysZLRI,62 +certifi/__main__.py,sha256=xBBoj905TUWBLRGANOcf7oi6e-3dMP4cEoG9OyMs11g,243 +certifi/cacert.pem,sha256=-og4Keu4zSpgL5shwfhd4kz0eUnVILzrGCi0zRy2kGw,265969 +certifi/core.py,sha256=V0uyxKOYdz6ulDSusclrLmjbPgOXsD0BnEf0SQ7OnoE,2303 diff --git a/Splunk_TA_paloalto/bin/splunk_ta_paloalto/aob_py3/certifi-2021.10.8.dist-info/WHEEL b/Splunk_TA_paloalto/bin/splunk_ta_paloalto/aob_py3/certifi-2021.10.8.dist-info/WHEEL new file mode 100644 index 00000000..6d38aa06 --- /dev/null +++ b/Splunk_TA_paloalto/bin/splunk_ta_paloalto/aob_py3/certifi-2021.10.8.dist-info/WHEEL @@ -0,0 +1,6 @@ +Wheel-Version: 1.0 +Generator: bdist_wheel (0.35.1) +Root-Is-Purelib: true +Tag: py2-none-any +Tag: py3-none-any + diff --git a/Splunk_TA_paloalto/bin/splunk_ta_paloalto/aob_py3/certifi-2021.10.8.dist-info/top_level.txt b/Splunk_TA_paloalto/bin/splunk_ta_paloalto/aob_py3/certifi-2021.10.8.dist-info/top_level.txt new file mode 100644 index 00000000..963eac53 --- /dev/null +++ b/Splunk_TA_paloalto/bin/splunk_ta_paloalto/aob_py3/certifi-2021.10.8.dist-info/top_level.txt @@ -0,0 +1 @@ +certifi diff --git a/Splunk_TA_paloalto/bin/splunk_ta_paloalto/aob_py3/certifi-2022.12.7.dist-info/INSTALLER b/Splunk_TA_paloalto/bin/splunk_ta_paloalto/aob_py3/certifi-2022.12.7.dist-info/INSTALLER new file mode 100644 index 00000000..a1b589e3 --- /dev/null +++ b/Splunk_TA_paloalto/bin/splunk_ta_paloalto/aob_py3/certifi-2022.12.7.dist-info/INSTALLER @@ -0,0 +1 @@ +pip diff --git a/Splunk_TA_paloalto/bin/splunk_ta_paloalto/aob_py3/certifi-2022.12.7.dist-info/LICENSE b/Splunk_TA_paloalto/bin/splunk_ta_paloalto/aob_py3/certifi-2022.12.7.dist-info/LICENSE new file mode 100644 index 00000000..0a64774e --- /dev/null +++ b/Splunk_TA_paloalto/bin/splunk_ta_paloalto/aob_py3/certifi-2022.12.7.dist-info/LICENSE @@ -0,0 +1,21 @@ +This package contains a modified version of ca-bundle.crt: + +ca-bundle.crt -- Bundle of CA Root Certificates + +Certificate data from Mozilla as of: Thu Nov 3 19:04:19 2011# +This is a bundle of X.509 certificates of public Certificate Authorities +(CA). These were automatically extracted from Mozilla's root certificates +file (certdata.txt). This file can be found in the mozilla source tree: +https://hg.mozilla.org/mozilla-central/file/tip/security/nss/lib/ckfw/builtins/certdata.txt +It contains the certificates in PEM format and therefore +can be directly used with curl / libcurl / php_curl, or with +an Apache+mod_ssl webserver for SSL client authentication. +Just configure this file as the SSLCACertificateFile.# + +***** BEGIN LICENSE BLOCK ***** +This Source Code Form is subject to the terms of the Mozilla Public License, +v. 2.0. If a copy of the MPL was not distributed with this file, You can obtain +one at http://mozilla.org/MPL/2.0/. + +***** END LICENSE BLOCK ***** +@(#) $RCSfile: certdata.txt,v $ $Revision: 1.80 $ $Date: 2011/11/03 15:11:58 $ diff --git a/Splunk_TA_paloalto/bin/splunk_ta_paloalto/aob_py3/certifi-2022.12.7.dist-info/METADATA b/Splunk_TA_paloalto/bin/splunk_ta_paloalto/aob_py3/certifi-2022.12.7.dist-info/METADATA new file mode 100644 index 00000000..aeb1991a --- /dev/null +++ b/Splunk_TA_paloalto/bin/splunk_ta_paloalto/aob_py3/certifi-2022.12.7.dist-info/METADATA @@ -0,0 +1,83 @@ +Metadata-Version: 2.1 +Name: certifi +Version: 2022.12.7 +Summary: Python package for providing Mozilla's CA Bundle. +Home-page: https://github.com/certifi/python-certifi +Author: Kenneth Reitz +Author-email: me@kennethreitz.com +License: MPL-2.0 +Project-URL: Source, https://github.com/certifi/python-certifi +Platform: UNKNOWN +Classifier: Development Status :: 5 - Production/Stable +Classifier: Intended Audience :: Developers +Classifier: License :: OSI Approved :: Mozilla Public License 2.0 (MPL 2.0) +Classifier: Natural Language :: English +Classifier: Programming Language :: Python +Classifier: Programming Language :: Python :: 3 +Classifier: Programming Language :: Python :: 3 :: Only +Classifier: Programming Language :: Python :: 3.6 +Classifier: Programming Language :: Python :: 3.7 +Classifier: Programming Language :: Python :: 3.8 +Classifier: Programming Language :: Python :: 3.9 +Classifier: Programming Language :: Python :: 3.10 +Classifier: Programming Language :: Python :: 3.11 +Requires-Python: >=3.6 +License-File: LICENSE + +Certifi: Python SSL Certificates +================================ + +Certifi provides Mozilla's carefully curated collection of Root Certificates for +validating the trustworthiness of SSL certificates while verifying the identity +of TLS hosts. It has been extracted from the `Requests`_ project. + +Installation +------------ + +``certifi`` is available on PyPI. Simply install it with ``pip``:: + + $ pip install certifi + +Usage +----- + +To reference the installed certificate authority (CA) bundle, you can use the +built-in function:: + + >>> import certifi + + >>> certifi.where() + '/usr/local/lib/python3.7/site-packages/certifi/cacert.pem' + +Or from the command line:: + + $ python -m certifi + /usr/local/lib/python3.7/site-packages/certifi/cacert.pem + +Enjoy! + +1024-bit Root Certificates +~~~~~~~~~~~~~~~~~~~~~~~~~~ + +Browsers and certificate authorities have concluded that 1024-bit keys are +unacceptably weak for certificates, particularly root certificates. For this +reason, Mozilla has removed any weak (i.e. 1024-bit key) certificate from its +bundle, replacing it with an equivalent strong (i.e. 2048-bit or greater key) +certificate from the same CA. Because Mozilla removed these certificates from +its bundle, ``certifi`` removed them as well. + +In previous versions, ``certifi`` provided the ``certifi.old_where()`` function +to intentionally re-add the 1024-bit roots back into your bundle. This was not +recommended in production and therefore was removed at the end of 2018. + +.. _`Requests`: https://requests.readthedocs.io/en/master/ + +Addition/Removal of Certificates +-------------------------------- + +Certifi does not support any addition/removal or other modification of the +CA trust store content. This project is intended to provide a reliable and +highly portable root of trust to python deployments. Look to upstream projects +for methods to use alternate trust. + + diff --git a/Splunk_TA_paloalto/bin/splunk_ta_paloalto/aob_py3/certifi-2022.12.7.dist-info/RECORD b/Splunk_TA_paloalto/bin/splunk_ta_paloalto/aob_py3/certifi-2022.12.7.dist-info/RECORD new file mode 100644 index 00000000..1f00d6ad --- /dev/null +++ b/Splunk_TA_paloalto/bin/splunk_ta_paloalto/aob_py3/certifi-2022.12.7.dist-info/RECORD @@ -0,0 +1,11 @@ +certifi-2022.12.7.dist-info/INSTALLER,sha256=zuuue4knoyJ-UwPPXg8fezS7VCrXJQrAP7zeNuwvFQg,4 +certifi-2022.12.7.dist-info/LICENSE,sha256=oC9sY4-fuE0G93ZMOrCF2K9-2luTwWbaVDEkeQd8b7A,1052 +certifi-2022.12.7.dist-info/METADATA,sha256=chFpcxKhCPEQ3d8-Vz36zr2Micf1eQhKkFFk7_JvJNo,2911 +certifi-2022.12.7.dist-info/RECORD,, +certifi-2022.12.7.dist-info/WHEEL,sha256=ewwEueio1C2XeHTvT17n8dZUJgOvyCWCt0WVNLClP9o,92 +certifi-2022.12.7.dist-info/top_level.txt,sha256=KMu4vUCfsjLrkPbSNdgdekS-pVJzBAJFO__nI8NF6-U,8 +certifi/__init__.py,sha256=bK_nm9bLJzNvWZc2oZdiTwg2KWD4HSPBWGaM0zUDvMw,94 +certifi/__main__.py,sha256=xBBoj905TUWBLRGANOcf7oi6e-3dMP4cEoG9OyMs11g,243 +certifi/cacert.pem,sha256=LBHDzgj_xA05AxnHK8ENT5COnGNElNZe0svFUHMf1SQ,275233 +certifi/core.py,sha256=lhewz0zFb2b4ULsQurElmloYwQoecjWzPqY67P8T7iM,4219 +certifi/py.typed,sha256=47DEQpj8HBSa-_TImW-5JCeuQeRkm5NMpJWZG3hSuFU,0 diff --git a/Splunk_TA_paloalto/bin/splunk_ta_paloalto/aob_py3/certifi-2022.12.7.dist-info/WHEEL b/Splunk_TA_paloalto/bin/splunk_ta_paloalto/aob_py3/certifi-2022.12.7.dist-info/WHEEL new file mode 100644 index 00000000..5bad85fd --- /dev/null +++ b/Splunk_TA_paloalto/bin/splunk_ta_paloalto/aob_py3/certifi-2022.12.7.dist-info/WHEEL @@ -0,0 +1,5 @@ +Wheel-Version: 1.0 +Generator: bdist_wheel (0.37.0) +Root-Is-Purelib: true +Tag: py3-none-any + diff --git a/Splunk_TA_paloalto/bin/splunk_ta_paloalto/aob_py3/certifi-2022.12.7.dist-info/top_level.txt b/Splunk_TA_paloalto/bin/splunk_ta_paloalto/aob_py3/certifi-2022.12.7.dist-info/top_level.txt new file mode 100644 index 00000000..963eac53 --- /dev/null +++ b/Splunk_TA_paloalto/bin/splunk_ta_paloalto/aob_py3/certifi-2022.12.7.dist-info/top_level.txt @@ -0,0 +1 @@ +certifi diff --git a/Splunk_TA_paloalto/bin/splunk_ta_paloalto/aob_py3/certifi/__init__.py b/Splunk_TA_paloalto/bin/splunk_ta_paloalto/aob_py3/certifi/__init__.py old mode 100755 new mode 100644 index 0d59a056..a3546f12 --- a/Splunk_TA_paloalto/bin/splunk_ta_paloalto/aob_py3/certifi/__init__.py +++ b/Splunk_TA_paloalto/bin/splunk_ta_paloalto/aob_py3/certifi/__init__.py @@ -1,3 +1,4 @@ -from .core import where +from .core import contents, where -__version__ = "2019.11.28" +__all__ = ["contents", "where"] +__version__ = "2022.12.07" diff --git a/Splunk_TA_paloalto/bin/splunk_ta_paloalto/aob_py3/certifi/__main__.py b/Splunk_TA_paloalto/bin/splunk_ta_paloalto/aob_py3/certifi/__main__.py old mode 100755 new mode 100644 index 5f1da0dd..8945b5da --- a/Splunk_TA_paloalto/bin/splunk_ta_paloalto/aob_py3/certifi/__main__.py +++ b/Splunk_TA_paloalto/bin/splunk_ta_paloalto/aob_py3/certifi/__main__.py @@ -1,2 +1,12 @@ -from certifi import where -print(where()) +import argparse + +from certifi import contents, where + +parser = argparse.ArgumentParser() +parser.add_argument("-c", "--contents", action="store_true") +args = parser.parse_args() + +if args.contents: + print(contents()) +else: + print(where()) diff --git a/Splunk_TA_paloalto/bin/splunk_ta_paloalto/aob_py3/certifi/cacert.pem b/Splunk_TA_paloalto/bin/splunk_ta_paloalto/aob_py3/certifi/cacert.pem index a4758ef3..df9e4e3c 100644 --- a/Splunk_TA_paloalto/bin/splunk_ta_paloalto/aob_py3/certifi/cacert.pem +++ b/Splunk_TA_paloalto/bin/splunk_ta_paloalto/aob_py3/certifi/cacert.pem @@ -28,68 +28,6 @@ DKqC5JlR3XC321Y9YeRq4VzW9v493kHMB65jUr9TU/Qr6cf9tveCX4XSQRjbgbME HMUfpIBvFSDJ3gyICh3WZlXi/EjJKSZp4A== -----END CERTIFICATE----- -# Issuer: CN=GlobalSign O=GlobalSign OU=GlobalSign Root CA - R2 -# Subject: CN=GlobalSign O=GlobalSign OU=GlobalSign Root CA - R2 -# Label: "GlobalSign Root CA - R2" -# Serial: 4835703278459682885658125 -# MD5 Fingerprint: 94:14:77:7e:3e:5e:fd:8f:30:bd:41:b0:cf:e7:d0:30 -# SHA1 Fingerprint: 75:e0:ab:b6:13:85:12:27:1c:04:f8:5f:dd:de:38:e4:b7:24:2e:fe -# SHA256 Fingerprint: ca:42:dd:41:74:5f:d0:b8:1e:b9:02:36:2c:f9:d8:bf:71:9d:a1:bd:1b:1e:fc:94:6f:5b:4c:99:f4:2c:1b:9e ------BEGIN CERTIFICATE----- -MIIDujCCAqKgAwIBAgILBAAAAAABD4Ym5g0wDQYJKoZIhvcNAQEFBQAwTDEgMB4G -A1UECxMXR2xvYmFsU2lnbiBSb290IENBIC0gUjIxEzARBgNVBAoTCkdsb2JhbFNp -Z24xEzARBgNVBAMTCkdsb2JhbFNpZ24wHhcNMDYxMjE1MDgwMDAwWhcNMjExMjE1 -MDgwMDAwWjBMMSAwHgYDVQQLExdHbG9iYWxTaWduIFJvb3QgQ0EgLSBSMjETMBEG -A1UEChMKR2xvYmFsU2lnbjETMBEGA1UEAxMKR2xvYmFsU2lnbjCCASIwDQYJKoZI -hvcNAQEBBQADggEPADCCAQoCggEBAKbPJA6+Lm8omUVCxKs+IVSbC9N/hHD6ErPL -v4dfxn+G07IwXNb9rfF73OX4YJYJkhD10FPe+3t+c4isUoh7SqbKSaZeqKeMWhG8 -eoLrvozps6yWJQeXSpkqBy+0Hne/ig+1AnwblrjFuTosvNYSuetZfeLQBoZfXklq -tTleiDTsvHgMCJiEbKjNS7SgfQx5TfC4LcshytVsW33hoCmEofnTlEnLJGKRILzd -C9XZzPnqJworc5HGnRusyMvo4KD0L5CLTfuwNhv2GXqF4G3yYROIXJ/gkwpRl4pa -zq+r1feqCapgvdzZX99yqWATXgAByUr6P6TqBwMhAo6CygPCm48CAwEAAaOBnDCB -mTAOBgNVHQ8BAf8EBAMCAQYwDwYDVR0TAQH/BAUwAwEB/zAdBgNVHQ4EFgQUm+IH -V2ccHsBqBt5ZtJot39wZhi4wNgYDVR0fBC8wLTAroCmgJ4YlaHR0cDovL2NybC5n -bG9iYWxzaWduLm5ldC9yb290LXIyLmNybDAfBgNVHSMEGDAWgBSb4gdXZxwewGoG -3lm0mi3f3BmGLjANBgkqhkiG9w0BAQUFAAOCAQEAmYFThxxol4aR7OBKuEQLq4Gs -J0/WwbgcQ3izDJr86iw8bmEbTUsp9Z8FHSbBuOmDAGJFtqkIk7mpM0sYmsL4h4hO -291xNBrBVNpGP+DTKqttVCL1OmLNIG+6KYnX3ZHu01yiPqFbQfXf5WRDLenVOavS -ot+3i9DAgBkcRcAtjOj4LaR0VknFBbVPFd5uRHg5h6h+u/N5GJG79G+dwfCMNYxd -AfvDbbnvRG15RjF+Cv6pgsH/76tuIMRQyV+dTZsXjAzlAcmgQWpzU/qlULRuJQ/7 -TBj0/VLZjmmx6BEP3ojY+x1J96relc8geMJgEtslQIxq/H5COEBkEveegeGTLg== ------END CERTIFICATE----- - -# Issuer: CN=VeriSign Class 3 Public Primary Certification Authority - G3 O=VeriSign, Inc. OU=VeriSign Trust Network/(c) 1999 VeriSign, Inc. - For authorized use only -# Subject: CN=VeriSign Class 3 Public Primary Certification Authority - G3 O=VeriSign, Inc. OU=VeriSign Trust Network/(c) 1999 VeriSign, Inc. - For authorized use only -# Label: "Verisign Class 3 Public Primary Certification Authority - G3" -# Serial: 206684696279472310254277870180966723415 -# MD5 Fingerprint: cd:68:b6:a7:c7:c4:ce:75:e0:1d:4f:57:44:61:92:09 -# SHA1 Fingerprint: 13:2d:0d:45:53:4b:69:97:cd:b2:d5:c3:39:e2:55:76:60:9b:5c:c6 -# SHA256 Fingerprint: eb:04:cf:5e:b1:f3:9a:fa:76:2f:2b:b1:20:f2:96:cb:a5:20:c1:b9:7d:b1:58:95:65:b8:1c:b9:a1:7b:72:44 ------BEGIN CERTIFICATE----- -MIIEGjCCAwICEQCbfgZJoz5iudXukEhxKe9XMA0GCSqGSIb3DQEBBQUAMIHKMQsw -CQYDVQQGEwJVUzEXMBUGA1UEChMOVmVyaVNpZ24sIEluYy4xHzAdBgNVBAsTFlZl -cmlTaWduIFRydXN0IE5ldHdvcmsxOjA4BgNVBAsTMShjKSAxOTk5IFZlcmlTaWdu -LCBJbmMuIC0gRm9yIGF1dGhvcml6ZWQgdXNlIG9ubHkxRTBDBgNVBAMTPFZlcmlT -aWduIENsYXNzIDMgUHVibGljIFByaW1hcnkgQ2VydGlmaWNhdGlvbiBBdXRob3Jp -dHkgLSBHMzAeFw05OTEwMDEwMDAwMDBaFw0zNjA3MTYyMzU5NTlaMIHKMQswCQYD -VQQGEwJVUzEXMBUGA1UEChMOVmVyaVNpZ24sIEluYy4xHzAdBgNVBAsTFlZlcmlT -aWduIFRydXN0IE5ldHdvcmsxOjA4BgNVBAsTMShjKSAxOTk5IFZlcmlTaWduLCBJ -bmMuIC0gRm9yIGF1dGhvcml6ZWQgdXNlIG9ubHkxRTBDBgNVBAMTPFZlcmlTaWdu -IENsYXNzIDMgUHVibGljIFByaW1hcnkgQ2VydGlmaWNhdGlvbiBBdXRob3JpdHkg -LSBHMzCCASIwDQYJKoZIhvcNAQEBBQADggEPADCCAQoCggEBAMu6nFL8eB8aHm8b -N3O9+MlrlBIwT/A2R/XQkQr1F8ilYcEWQE37imGQ5XYgwREGfassbqb1EUGO+i2t -KmFZpGcmTNDovFJbcCAEWNF6yaRpvIMXZK0Fi7zQWM6NjPXr8EJJC52XJ2cybuGu -kxUccLwgTS8Y3pKI6GyFVxEa6X7jJhFUokWWVYPKMIno3Nij7SqAP395ZVc+FSBm -CC+Vk7+qRy+oRpfwEuL+wgorUeZ25rdGt+INpsyow0xZVYnm6FNcHOqd8GIWC6fJ -Xwzw3sJ2zq/3avL6QaaiMxTJ5Xpj055iN9WFZZ4O5lMkdBteHRJTW8cs54NJOxWu -imi5V5cCAwEAATANBgkqhkiG9w0BAQUFAAOCAQEAERSWwauSCPc/L8my/uRan2Te -2yFPhpk0djZX3dAVL8WtfxUfN2JzPtTnX84XA9s1+ivbrmAJXx5fj267Cz3qWhMe -DGBvtcC1IyIuBwvLqXTLR7sdwdela8wv0kL9Sd2nic9TutoAWii/gt/4uhMdUIaC -/Y4wjylGsB49Ndo4YhYYSq3mtlFs3q9i6wHQHiT+eo8SGhJouPtmmRQURVyu565p -F4ErWjfJXir0xuKhXFSbplQAz/DxwceYMBo7Nhbbo27q/a2ywtrvAkcTisDxszGt -TxzhT5yvDwyd93gN2PQ1VoDat20Xj50egWTh/sVFuq1ruQp6Tk9LhO5L8X3dEQ== ------END CERTIFICATE----- - # Issuer: CN=Entrust.net Certification Authority (2048) O=Entrust.net OU=www.entrust.net/CPS_2048 incorp. by ref. (limits liab.)/(c) 1999 Entrust.net Limited # Subject: CN=Entrust.net Certification Authority (2048) O=Entrust.net OU=www.entrust.net/CPS_2048 incorp. by ref. (limits liab.)/(c) 1999 Entrust.net Limited # Label: "Entrust.net Premium 2048 Secure Server CA" @@ -152,39 +90,6 @@ ksLi4xaNmjICq44Y3ekQEe5+NauQrz4wlHrQMz2nZQ/1/I6eYs9HRCwBXbsdtTLS R9I4LtD+gdwyah617jzV/OeBHRnDJELqYzmp -----END CERTIFICATE----- -# Issuer: CN=AddTrust External CA Root O=AddTrust AB OU=AddTrust External TTP Network -# Subject: CN=AddTrust External CA Root O=AddTrust AB OU=AddTrust External TTP Network -# Label: "AddTrust External Root" -# Serial: 1 -# MD5 Fingerprint: 1d:35:54:04:85:78:b0:3f:42:42:4d:bf:20:73:0a:3f -# SHA1 Fingerprint: 02:fa:f3:e2:91:43:54:68:60:78:57:69:4d:f5:e4:5b:68:85:18:68 -# SHA256 Fingerprint: 68:7f:a4:51:38:22:78:ff:f0:c8:b1:1f:8d:43:d5:76:67:1c:6e:b2:bc:ea:b4:13:fb:83:d9:65:d0:6d:2f:f2 ------BEGIN CERTIFICATE----- -MIIENjCCAx6gAwIBAgIBATANBgkqhkiG9w0BAQUFADBvMQswCQYDVQQGEwJTRTEU -MBIGA1UEChMLQWRkVHJ1c3QgQUIxJjAkBgNVBAsTHUFkZFRydXN0IEV4dGVybmFs -IFRUUCBOZXR3b3JrMSIwIAYDVQQDExlBZGRUcnVzdCBFeHRlcm5hbCBDQSBSb290 -MB4XDTAwMDUzMDEwNDgzOFoXDTIwMDUzMDEwNDgzOFowbzELMAkGA1UEBhMCU0Ux -FDASBgNVBAoTC0FkZFRydXN0IEFCMSYwJAYDVQQLEx1BZGRUcnVzdCBFeHRlcm5h -bCBUVFAgTmV0d29yazEiMCAGA1UEAxMZQWRkVHJ1c3QgRXh0ZXJuYWwgQ0EgUm9v -dDCCASIwDQYJKoZIhvcNAQEBBQADggEPADCCAQoCggEBALf3GjPm8gAELTngTlvt -H7xsD821+iO2zt6bETOXpClMfZOfvUq8k+0DGuOPz+VtUFrWlymUWoCwSXrbLpX9 -uMq/NzgtHj6RQa1wVsfwTz/oMp50ysiQVOnGXw94nZpAPA6sYapeFI+eh6FqUNzX -mk6vBbOmcZSccbNQYArHE504B4YCqOmoaSYYkKtMsE8jqzpPhNjfzp/haW+710LX -a0Tkx63ubUFfclpxCDezeWWkWaCUN/cALw3CknLa0Dhy2xSoRcRdKn23tNbE7qzN -E0S3ySvdQwAl+mG5aWpYIxG3pzOPVnVZ9c0p10a3CitlttNCbxWyuHv77+ldU9U0 -WicCAwEAAaOB3DCB2TAdBgNVHQ4EFgQUrb2YejS0Jvf6xCZU7wO94CTLVBowCwYD -VR0PBAQDAgEGMA8GA1UdEwEB/wQFMAMBAf8wgZkGA1UdIwSBkTCBjoAUrb2YejS0 -Jvf6xCZU7wO94CTLVBqhc6RxMG8xCzAJBgNVBAYTAlNFMRQwEgYDVQQKEwtBZGRU -cnVzdCBBQjEmMCQGA1UECxMdQWRkVHJ1c3QgRXh0ZXJuYWwgVFRQIE5ldHdvcmsx -IjAgBgNVBAMTGUFkZFRydXN0IEV4dGVybmFsIENBIFJvb3SCAQEwDQYJKoZIhvcN -AQEFBQADggEBALCb4IUlwtYj4g+WBpKdQZic2YR5gdkeWxQHIzZlj7DYd7usQWxH -YINRsPkyPef89iYTx4AWpb9a/IfPeHmJIZriTAcKhjW88t5RxNKWt9x+Tu5w/Rw5 -6wwCURQtjr0W4MHfRnXnJK3s9EK0hZNwEGe6nQY1ShjTK3rMUUKhemPR5ruhxSvC -Nr4TDea9Y355e6cJDUCrat2PisP29owaQgVR1EX1n6diIWgVIEM8med8vSTYqZEX -c4g/VhsxOBi0cQ+azcgOno4uG+GMmIPLHzHxREzGBHNJdmAPx/i9F4BrLunMTA5a -mnkPIAou1Z5jJh5VkpTYghdae9C8x49OhgQ= ------END CERTIFICATE----- - # Issuer: CN=Entrust Root Certification Authority O=Entrust, Inc. OU=www.entrust.net/CPS is incorporated by reference/(c) 2006 Entrust, Inc. # Subject: CN=Entrust Root Certification Authority O=Entrust, Inc. OU=www.entrust.net/CPS is incorporated by reference/(c) 2006 Entrust, Inc. # Label: "Entrust Root Certification Authority" @@ -220,112 +125,6 @@ eu6FSqdQgPCnXEqULl8FmTxSQeDNtGPPAUO6nIPcj2A781q0tHuu2guQOHXvgR1m 0vdXcDazv/wor3ElhVsT/h5/WrQ8 -----END CERTIFICATE----- -# Issuer: CN=GeoTrust Global CA O=GeoTrust Inc. -# Subject: CN=GeoTrust Global CA O=GeoTrust Inc. -# Label: "GeoTrust Global CA" -# Serial: 144470 -# MD5 Fingerprint: f7:75:ab:29:fb:51:4e:b7:77:5e:ff:05:3c:99:8e:f5 -# SHA1 Fingerprint: de:28:f4:a4:ff:e5:b9:2f:a3:c5:03:d1:a3:49:a7:f9:96:2a:82:12 -# SHA256 Fingerprint: ff:85:6a:2d:25:1d:cd:88:d3:66:56:f4:50:12:67:98:cf:ab:aa:de:40:79:9c:72:2d:e4:d2:b5:db:36:a7:3a ------BEGIN CERTIFICATE----- -MIIDVDCCAjygAwIBAgIDAjRWMA0GCSqGSIb3DQEBBQUAMEIxCzAJBgNVBAYTAlVT -MRYwFAYDVQQKEw1HZW9UcnVzdCBJbmMuMRswGQYDVQQDExJHZW9UcnVzdCBHbG9i -YWwgQ0EwHhcNMDIwNTIxMDQwMDAwWhcNMjIwNTIxMDQwMDAwWjBCMQswCQYDVQQG -EwJVUzEWMBQGA1UEChMNR2VvVHJ1c3QgSW5jLjEbMBkGA1UEAxMSR2VvVHJ1c3Qg -R2xvYmFsIENBMIIBIjANBgkqhkiG9w0BAQEFAAOCAQ8AMIIBCgKCAQEA2swYYzD9 -9BcjGlZ+W988bDjkcbd4kdS8odhM+KhDtgPpTSEHCIjaWC9mOSm9BXiLnTjoBbdq -fnGk5sRgprDvgOSJKA+eJdbtg/OtppHHmMlCGDUUna2YRpIuT8rxh0PBFpVXLVDv -iS2Aelet8u5fa9IAjbkU+BQVNdnARqN7csiRv8lVK83Qlz6cJmTM386DGXHKTubU -1XupGc1V3sjs0l44U+VcT4wt/lAjNvxm5suOpDkZALeVAjmRCw7+OC7RHQWa9k0+ -bw8HHa8sHo9gOeL6NlMTOdReJivbPagUvTLrGAMoUgRx5aszPeE4uwc2hGKceeoW -MPRfwCvocWvk+QIDAQABo1MwUTAPBgNVHRMBAf8EBTADAQH/MB0GA1UdDgQWBBTA -ephojYn7qwVkDBF9qn1luMrMTjAfBgNVHSMEGDAWgBTAephojYn7qwVkDBF9qn1l -uMrMTjANBgkqhkiG9w0BAQUFAAOCAQEANeMpauUvXVSOKVCUn5kaFOSPeCpilKIn -Z57QzxpeR+nBsqTP3UEaBU6bS+5Kb1VSsyShNwrrZHYqLizz/Tt1kL/6cdjHPTfS -tQWVYrmm3ok9Nns4d0iXrKYgjy6myQzCsplFAMfOEVEiIuCl6rYVSAlk6l5PdPcF -PseKUgzbFbS9bZvlxrFUaKnjaZC2mqUPuLk/IH2uSrW4nOQdtqvmlKXBx4Ot2/Un -hw4EbNX/3aBd7YdStysVAq45pmp06drE57xNNB6pXE0zX5IJL4hmXXeXxx12E6nV -5fEWCRE11azbJHFwLJhWC9kXtNHjUStedejV0NxPNO3CBWaAocvmMw== ------END CERTIFICATE----- - -# Issuer: CN=GeoTrust Universal CA O=GeoTrust Inc. -# Subject: CN=GeoTrust Universal CA O=GeoTrust Inc. -# Label: "GeoTrust Universal CA" -# Serial: 1 -# MD5 Fingerprint: 92:65:58:8b:a2:1a:31:72:73:68:5c:b4:a5:7a:07:48 -# SHA1 Fingerprint: e6:21:f3:35:43:79:05:9a:4b:68:30:9d:8a:2f:74:22:15:87:ec:79 -# SHA256 Fingerprint: a0:45:9b:9f:63:b2:25:59:f5:fa:5d:4c:6d:b3:f9:f7:2f:f1:93:42:03:35:78:f0:73:bf:1d:1b:46:cb:b9:12 ------BEGIN CERTIFICATE----- -MIIFaDCCA1CgAwIBAgIBATANBgkqhkiG9w0BAQUFADBFMQswCQYDVQQGEwJVUzEW -MBQGA1UEChMNR2VvVHJ1c3QgSW5jLjEeMBwGA1UEAxMVR2VvVHJ1c3QgVW5pdmVy -c2FsIENBMB4XDTA0MDMwNDA1MDAwMFoXDTI5MDMwNDA1MDAwMFowRTELMAkGA1UE -BhMCVVMxFjAUBgNVBAoTDUdlb1RydXN0IEluYy4xHjAcBgNVBAMTFUdlb1RydXN0 -IFVuaXZlcnNhbCBDQTCCAiIwDQYJKoZIhvcNAQEBBQADggIPADCCAgoCggIBAKYV -VaCjxuAfjJ0hUNfBvitbtaSeodlyWL0AG0y/YckUHUWCq8YdgNY96xCcOq9tJPi8 -cQGeBvV8Xx7BDlXKg5pZMK4ZyzBIle0iN430SppyZj6tlcDgFgDgEB8rMQ7XlFTT -QjOgNB0eRXbdT8oYN+yFFXoZCPzVx5zw8qkuEKmS5j1YPakWaDwvdSEYfyh3peFh -F7em6fgemdtzbvQKoiFs7tqqhZJmr/Z6a4LauiIINQ/PQvE1+mrufislzDoR5G2v -c7J2Ha3QsnhnGqQ5HFELZ1aD/ThdDc7d8Lsrlh/eezJS/R27tQahsiFepdaVaH/w -mZ7cRQg+59IJDTWU3YBOU5fXtQlEIGQWFwMCTFMNaN7VqnJNk22CDtucvc+081xd -VHppCZbW2xHBjXWotM85yM48vCR85mLK4b19p71XZQvk/iXttmkQ3CgaRr0BHdCX -teGYO8A3ZNY9lO4L4fUorgtWv3GLIylBjobFS1J72HGrH4oVpjuDWtdYAVHGTEHZ -f9hBZ3KiKN9gg6meyHv8U3NyWfWTehd2Ds735VzZC1U0oqpbtWpU5xPKV+yXbfRe -Bi9Fi1jUIxaS5BZuKGNZMN9QAZxjiRqf2xeUgnA3wySemkfWWspOqGmJch+RbNt+ -nhutxx9z3SxPGWX9f5NAEC7S8O08ni4oPmkmM8V7AgMBAAGjYzBhMA8GA1UdEwEB -/wQFMAMBAf8wHQYDVR0OBBYEFNq7LqqwDLiIJlF0XG0D08DYj3rWMB8GA1UdIwQY -MBaAFNq7LqqwDLiIJlF0XG0D08DYj3rWMA4GA1UdDwEB/wQEAwIBhjANBgkqhkiG -9w0BAQUFAAOCAgEAMXjmx7XfuJRAyXHEqDXsRh3ChfMoWIawC/yOsjmPRFWrZIRc -aanQmjg8+uUfNeVE44B5lGiku8SfPeE0zTBGi1QrlaXv9z+ZhP015s8xxtxqv6fX -IwjhmF7DWgh2qaavdy+3YL1ERmrvl/9zlcGO6JP7/TG37FcREUWbMPEaiDnBTzyn -ANXH/KttgCJwpQzgXQQpAvvLoJHRfNbDflDVnVi+QTjruXU8FdmbyUqDWcDaU/0z -uzYYm4UPFd3uLax2k7nZAY1IEKj79TiG8dsKxr2EoyNB3tZ3b4XUhRxQ4K5RirqN -Pnbiucon8l+f725ZDQbYKxek0nxru18UGkiPGkzns0ccjkxFKyDuSN/n3QmOGKja -QI2SJhFTYXNd673nxE0pN2HrrDktZy4W1vUAg4WhzH92xH3kt0tm7wNFYGm2DFKW -koRepqO1pD4r2czYG0eq8kTaT/kD6PAUyz/zg97QwVTjt+gKN02LIFkDMBmhLMi9 -ER/frslKxfMnZmaGrGiR/9nmUxwPi1xpZQomyB40w11Re9epnAahNt3ViZS82eQt -DF4JbAiXfKM9fJP/P6EUp8+1Xevb2xzEdt+Iub1FBZUbrvxGakyvSOPOrg/Sfuvm -bJxPgWp6ZKy7PtXny3YuxadIwVyQD8vIP/rmMuGNG2+k5o7Y+SlIis5z/iw= ------END CERTIFICATE----- - -# Issuer: CN=GeoTrust Universal CA 2 O=GeoTrust Inc. -# Subject: CN=GeoTrust Universal CA 2 O=GeoTrust Inc. -# Label: "GeoTrust Universal CA 2" -# Serial: 1 -# MD5 Fingerprint: 34:fc:b8:d0:36:db:9e:14:b3:c2:f2:db:8f:e4:94:c7 -# SHA1 Fingerprint: 37:9a:19:7b:41:85:45:35:0c:a6:03:69:f3:3c:2e:af:47:4f:20:79 -# SHA256 Fingerprint: a0:23:4f:3b:c8:52:7c:a5:62:8e:ec:81:ad:5d:69:89:5d:a5:68:0d:c9:1d:1c:b8:47:7f:33:f8:78:b9:5b:0b ------BEGIN CERTIFICATE----- -MIIFbDCCA1SgAwIBAgIBATANBgkqhkiG9w0BAQUFADBHMQswCQYDVQQGEwJVUzEW -MBQGA1UEChMNR2VvVHJ1c3QgSW5jLjEgMB4GA1UEAxMXR2VvVHJ1c3QgVW5pdmVy -c2FsIENBIDIwHhcNMDQwMzA0MDUwMDAwWhcNMjkwMzA0MDUwMDAwWjBHMQswCQYD -VQQGEwJVUzEWMBQGA1UEChMNR2VvVHJ1c3QgSW5jLjEgMB4GA1UEAxMXR2VvVHJ1 -c3QgVW5pdmVyc2FsIENBIDIwggIiMA0GCSqGSIb3DQEBAQUAA4ICDwAwggIKAoIC -AQCzVFLByT7y2dyxUxpZKeexw0Uo5dfR7cXFS6GqdHtXr0om/Nj1XqduGdt0DE81 -WzILAePb63p3NeqqWuDW6KFXlPCQo3RWlEQwAx5cTiuFJnSCegx2oG9NzkEtoBUG -FF+3Qs17j1hhNNwqCPkuwwGmIkQcTAeC5lvO0Ep8BNMZcyfwqph/Lq9O64ceJHdq -XbboW0W63MOhBW9Wjo8QJqVJwy7XQYci4E+GymC16qFjwAGXEHm9ADwSbSsVsaxL -se4YuU6W3Nx2/zu+z18DwPw76L5GG//aQMJS9/7jOvdqdzXQ2o3rXhhqMcceujwb -KNZrVMaqW9eiLBsZzKIC9ptZvTdrhrVtgrrY6slWvKk2WP0+GfPtDCapkzj4T8Fd -IgbQl+rhrcZV4IErKIM6+vR7IVEAvlI4zs1meaj0gVbi0IMJR1FbUGrP20gaXT73 -y/Zl92zxlfgCOzJWgjl6W70viRu/obTo/3+NjN8D8WBOWBFM66M/ECuDmgFz2ZRt -hAAnZqzwcEAJQpKtT5MNYQlRJNiS1QuUYbKHsu3/mjX/hVTK7URDrBs8FmtISgoc -QIgfksILAAX/8sgCSqSqqcyZlpwvWOB94b67B9xfBHJcMTTD7F8t4D1kkCLm0ey4 -Lt1ZrtmhN79UNdxzMk+MBB4zsslG8dhcyFVQyWi9qLo2CQIDAQABo2MwYTAPBgNV -HRMBAf8EBTADAQH/MB0GA1UdDgQWBBR281Xh+qQ2+/CfXGJx7Tz0RzgQKzAfBgNV -HSMEGDAWgBR281Xh+qQ2+/CfXGJx7Tz0RzgQKzAOBgNVHQ8BAf8EBAMCAYYwDQYJ -KoZIhvcNAQEFBQADggIBAGbBxiPz2eAubl/oz66wsCVNK/g7WJtAJDday6sWSf+z -dXkzoS9tcBc0kf5nfo/sm+VegqlVHy/c1FEHEv6sFj4sNcZj/NwQ6w2jqtB8zNHQ -L1EuxBRa3ugZ4T7GzKQp5y6EqgYweHZUcyiYWTjgAA1i00J9IZ+uPTqM1fp3DRgr -Fg5fNuH8KrUwJM/gYwx7WBr+mbpCErGR9Hxo4sjoryzqyX6uuyo9DRXcNJW2GHSo -ag/HtPQTxORb7QrSpJdMKu0vbBKJPfEncKpqA1Ihn0CoZ1Dy81of398j9tx4TuaY -T1U6U+Pv8vSfx3zYWK8pIpe44L2RLrB27FcRz+8pRPPphXpgY+RdM4kX2TGq2tbz -GDVyz4crL2MjhF2EjD9XoIj8mZEoJmmZ1I+XRL6O1UixpCgp8RW04eWe3fiPpm8m -1wk8OhwRDqZsN/etRIcsKMfYdIKz0G9KV7s1KSegi+ghp4dkNl3M2Basx7InQJJV -OCiNUW7dFGdTbHFcJoRNdVq2fmBWqU2t+5sel/MN2dKXVHfaPRK34B7vCAas+YWH -6aLcr34YEoP9VhdBLtUpgn2Z9DH2canPLAEnpQW5qrJITirvn5NSUZU8UnOOVkwX -QMAJKOSLakhT2+zNVVXxxvjpoixMptEmX36vWkzaH6byHCx+rgIW0lbQL1dTR+iS ------END CERTIFICATE----- - # Issuer: CN=AAA Certificate Services O=Comodo CA Limited # Subject: CN=AAA Certificate Services O=Comodo CA Limited # Label: "Comodo AAA Services root" @@ -359,48 +158,6 @@ l2D4kF501KKaU73yqWjgom7C12yxow+ev+to51byrvLjKzg6CYG1a4XXvi3tPxq3 smPi9WIsgtRqAEFQ8TmDn5XpNpaYbg== -----END CERTIFICATE----- -# Issuer: CN=QuoVadis Root Certification Authority O=QuoVadis Limited OU=Root Certification Authority -# Subject: CN=QuoVadis Root Certification Authority O=QuoVadis Limited OU=Root Certification Authority -# Label: "QuoVadis Root CA" -# Serial: 985026699 -# MD5 Fingerprint: 27:de:36:fe:72:b7:00:03:00:9d:f4:f0:1e:6c:04:24 -# SHA1 Fingerprint: de:3f:40:bd:50:93:d3:9b:6c:60:f6:da:bc:07:62:01:00:89:76:c9 -# SHA256 Fingerprint: a4:5e:de:3b:bb:f0:9c:8a:e1:5c:72:ef:c0:72:68:d6:93:a2:1c:99:6f:d5:1e:67:ca:07:94:60:fd:6d:88:73 ------BEGIN CERTIFICATE----- -MIIF0DCCBLigAwIBAgIEOrZQizANBgkqhkiG9w0BAQUFADB/MQswCQYDVQQGEwJC -TTEZMBcGA1UEChMQUXVvVmFkaXMgTGltaXRlZDElMCMGA1UECxMcUm9vdCBDZXJ0 -aWZpY2F0aW9uIEF1dGhvcml0eTEuMCwGA1UEAxMlUXVvVmFkaXMgUm9vdCBDZXJ0 -aWZpY2F0aW9uIEF1dGhvcml0eTAeFw0wMTAzMTkxODMzMzNaFw0yMTAzMTcxODMz -MzNaMH8xCzAJBgNVBAYTAkJNMRkwFwYDVQQKExBRdW9WYWRpcyBMaW1pdGVkMSUw -IwYDVQQLExxSb290IENlcnRpZmljYXRpb24gQXV0aG9yaXR5MS4wLAYDVQQDEyVR -dW9WYWRpcyBSb290IENlcnRpZmljYXRpb24gQXV0aG9yaXR5MIIBIjANBgkqhkiG -9w0BAQEFAAOCAQ8AMIIBCgKCAQEAv2G1lVO6V/z68mcLOhrfEYBklbTRvM16z/Yp -li4kVEAkOPcahdxYTMukJ0KX0J+DisPkBgNbAKVRHnAEdOLB1Dqr1607BxgFjv2D -rOpm2RgbaIr1VxqYuvXtdj182d6UajtLF8HVj71lODqV0D1VNk7feVcxKh7YWWVJ -WCCYfqtffp/p1k3sg3Spx2zY7ilKhSoGFPlU5tPaZQeLYzcS19Dsw3sgQUSj7cug -F+FxZc4dZjH3dgEZyH0DWLaVSR2mEiboxgx24ONmy+pdpibu5cxfvWenAScOospU -xbF6lR1xHkopigPcakXBpBlebzbNw6Kwt/5cOOJSvPhEQ+aQuwIDAQABo4ICUjCC -Ak4wPQYIKwYBBQUHAQEEMTAvMC0GCCsGAQUFBzABhiFodHRwczovL29jc3AucXVv -dmFkaXNvZmZzaG9yZS5jb20wDwYDVR0TAQH/BAUwAwEB/zCCARoGA1UdIASCAREw -ggENMIIBCQYJKwYBBAG+WAABMIH7MIHUBggrBgEFBQcCAjCBxxqBxFJlbGlhbmNl -IG9uIHRoZSBRdW9WYWRpcyBSb290IENlcnRpZmljYXRlIGJ5IGFueSBwYXJ0eSBh -c3N1bWVzIGFjY2VwdGFuY2Ugb2YgdGhlIHRoZW4gYXBwbGljYWJsZSBzdGFuZGFy -ZCB0ZXJtcyBhbmQgY29uZGl0aW9ucyBvZiB1c2UsIGNlcnRpZmljYXRpb24gcHJh -Y3RpY2VzLCBhbmQgdGhlIFF1b1ZhZGlzIENlcnRpZmljYXRlIFBvbGljeS4wIgYI -KwYBBQUHAgEWFmh0dHA6Ly93d3cucXVvdmFkaXMuYm0wHQYDVR0OBBYEFItLbe3T -KbkGGew5Oanwl4Rqy+/fMIGuBgNVHSMEgaYwgaOAFItLbe3TKbkGGew5Oanwl4Rq -y+/foYGEpIGBMH8xCzAJBgNVBAYTAkJNMRkwFwYDVQQKExBRdW9WYWRpcyBMaW1p -dGVkMSUwIwYDVQQLExxSb290IENlcnRpZmljYXRpb24gQXV0aG9yaXR5MS4wLAYD -VQQDEyVRdW9WYWRpcyBSb290IENlcnRpZmljYXRpb24gQXV0aG9yaXR5ggQ6tlCL -MA4GA1UdDwEB/wQEAwIBBjANBgkqhkiG9w0BAQUFAAOCAQEAitQUtf70mpKnGdSk -fnIYj9lofFIk3WdvOXrEql494liwTXCYhGHoG+NpGA7O+0dQoE7/8CQfvbLO9Sf8 -7C9TqnN7Az10buYWnuulLsS/VidQK2K6vkscPFVcQR0kvoIgR13VRH56FmjffU1R -cHhXHTMe/QKZnAzNCgVPx7uOpHX6Sm2xgI4JVrmcGmD+XcHXetwReNDWXcG31a0y -mQM6isxUJTkxgXsTIlG6Rmyhu576BGxJJnSP0nPrzDCi5upZIof4l/UO/erMkqQW -xFIY6iHOsfHmhIHluqmGKPJDWl0Snawe2ajlCmqnf6CHKc/yiU3U7MXi5nrQNiOK -SnQ2+Q== ------END CERTIFICATE----- - # Issuer: CN=QuoVadis Root CA 2 O=QuoVadis Limited # Subject: CN=QuoVadis Root CA 2 O=QuoVadis Limited # Label: "QuoVadis Root CA 2" @@ -516,33 +273,6 @@ JRDL8Try2frbSVa7pv6nQTXD4IhhyYjH3zYQIphZ6rBK+1YWc26sTfcioU+tHXot RSflMMFe8toTyyVCUZVHA4xsIcx0Qu1T/zOLjw9XARYvz6buyXAiFL39vmwLAw== -----END CERTIFICATE----- -# Issuer: CN=Sonera Class2 CA O=Sonera -# Subject: CN=Sonera Class2 CA O=Sonera -# Label: "Sonera Class 2 Root CA" -# Serial: 29 -# MD5 Fingerprint: a3:ec:75:0f:2e:88:df:fa:48:01:4e:0b:5c:48:6f:fb -# SHA1 Fingerprint: 37:f7:6d:e6:07:7c:90:c5:b1:3e:93:1a:b7:41:10:b4:f2:e4:9a:27 -# SHA256 Fingerprint: 79:08:b4:03:14:c1:38:10:0b:51:8d:07:35:80:7f:fb:fc:f8:51:8a:00:95:33:71:05:ba:38:6b:15:3d:d9:27 ------BEGIN CERTIFICATE----- -MIIDIDCCAgigAwIBAgIBHTANBgkqhkiG9w0BAQUFADA5MQswCQYDVQQGEwJGSTEP -MA0GA1UEChMGU29uZXJhMRkwFwYDVQQDExBTb25lcmEgQ2xhc3MyIENBMB4XDTAx -MDQwNjA3Mjk0MFoXDTIxMDQwNjA3Mjk0MFowOTELMAkGA1UEBhMCRkkxDzANBgNV -BAoTBlNvbmVyYTEZMBcGA1UEAxMQU29uZXJhIENsYXNzMiBDQTCCASIwDQYJKoZI -hvcNAQEBBQADggEPADCCAQoCggEBAJAXSjWdyvANlsdE+hY3/Ei9vX+ALTU74W+o -Z6m/AxxNjG8yR9VBaKQTBME1DJqEQ/xcHf+Js+gXGM2RX/uJ4+q/Tl18GybTdXnt -5oTjV+WtKcT0OijnpXuENmmz/V52vaMtmdOQTiMofRhj8VQ7Jp12W5dCsv+u8E7s -3TmVToMGf+dJQMjFAbJUWmYdPfz56TwKnoG4cPABi+QjVHzIrviQHgCWctRUz2Ej -vOr7nQKV0ba5cTppCD8PtOFCx4j1P5iop7oc4HFx71hXgVB6XGt0Rg6DA5jDjqhu -8nYybieDwnPz3BjotJPqdURrBGAgcVeHnfO+oJAjPYok4doh28MCAwEAAaMzMDEw -DwYDVR0TAQH/BAUwAwEB/zARBgNVHQ4ECgQISqCqWITTXjwwCwYDVR0PBAQDAgEG -MA0GCSqGSIb3DQEBBQUAA4IBAQBazof5FnIVV0sd2ZvnoiYw7JNn39Yt0jSv9zil -zqsWuasvfDXLrNAPtEwr/IDva4yRXzZ299uzGxnq9LIR/WFxRL8oszodv7ND6J+/ -3DEIcbCdjdY0RzKQxmUk96BKfARzjzlvF4xytb1LyHr4e4PDKE6cCepnP7JnBBvD -FNr450kkkdAdavphOe9r5yF1BgfYErQhIHBCcYHaPJo2vqZbDWpsmh+Re/n570K6 -Tk6ezAyNlNzZRZxe7EJQY670XcSxEtzKO6gunRRaBXW37Ndj4ro1tgQIkejanZz2 -ZrUYrAqmVCY0M9IbwdR/GjqOC6oybtv8TyWf2TLHllpwrN9M ------END CERTIFICATE----- - # Issuer: CN=XRamp Global Certification Authority O=XRamp Security Services Inc OU=www.xrampsecurity.com # Subject: CN=XRamp Global Certification Authority O=XRamp Security Services Inc OU=www.xrampsecurity.com # Label: "XRamp Global CA Root" @@ -640,46 +370,6 @@ VSJYACPq4xJDKVtHCN2MQWplBqjlIapBtJUhlbl90TSrE9atvNziPTnNvT51cKEY WQPJIrSPnNVeKtelttQKbfi3QBFGmh95DmK/D5fs4C8fF5Q= -----END CERTIFICATE----- -# Issuer: O=Government Root Certification Authority -# Subject: O=Government Root Certification Authority -# Label: "Taiwan GRCA" -# Serial: 42023070807708724159991140556527066870 -# MD5 Fingerprint: 37:85:44:53:32:45:1f:20:f0:f3:95:e1:25:c4:43:4e -# SHA1 Fingerprint: f4:8b:11:bf:de:ab:be:94:54:20:71:e6:41:de:6b:be:88:2b:40:b9 -# SHA256 Fingerprint: 76:00:29:5e:ef:e8:5b:9e:1f:d6:24:db:76:06:2a:aa:ae:59:81:8a:54:d2:77:4c:d4:c0:b2:c0:11:31:e1:b3 ------BEGIN CERTIFICATE----- -MIIFcjCCA1qgAwIBAgIQH51ZWtcvwgZEpYAIaeNe9jANBgkqhkiG9w0BAQUFADA/ -MQswCQYDVQQGEwJUVzEwMC4GA1UECgwnR292ZXJubWVudCBSb290IENlcnRpZmlj -YXRpb24gQXV0aG9yaXR5MB4XDTAyMTIwNTEzMjMzM1oXDTMyMTIwNTEzMjMzM1ow -PzELMAkGA1UEBhMCVFcxMDAuBgNVBAoMJ0dvdmVybm1lbnQgUm9vdCBDZXJ0aWZp -Y2F0aW9uIEF1dGhvcml0eTCCAiIwDQYJKoZIhvcNAQEBBQADggIPADCCAgoCggIB -AJoluOzMonWoe/fOW1mKydGGEghU7Jzy50b2iPN86aXfTEc2pBsBHH8eV4qNw8XR -IePaJD9IK/ufLqGU5ywck9G/GwGHU5nOp/UKIXZ3/6m3xnOUT0b3EEk3+qhZSV1q -gQdW8or5BtD3cCJNtLdBuTK4sfCxw5w/cP1T3YGq2GN49thTbqGsaoQkclSGxtKy -yhwOeYHWtXBiCAEuTk8O1RGvqa/lmr/czIdtJuTJV6L7lvnM4T9TjGxMfptTCAts -F/tnyMKtsc2AtJfcdgEWFelq16TheEfOhtX7MfP6Mb40qij7cEwdScevLJ1tZqa2 -jWR+tSBqnTuBto9AAGdLiYa4zGX+FVPpBMHWXx1E1wovJ5pGfaENda1UhhXcSTvx -ls4Pm6Dso3pdvtUqdULle96ltqqvKKyskKw4t9VoNSZ63Pc78/1Fm9G7Q3hub/FC -VGqY8A2tl+lSXunVanLeavcbYBT0peS2cWeqH+riTcFCQP5nRhc4L0c/cZyu5SHK -YS1tB6iEfC3uUSXxY5Ce/eFXiGvviiNtsea9P63RPZYLhY3Naye7twWb7LuRqQoH -EgKXTiCQ8P8NHuJBO9NAOueNXdpm5AKwB1KYXA6OM5zCppX7VRluTI6uSw+9wThN -Xo+EHWbNxWCWtFJaBYmOlXqYwZE8lSOyDvR5tMl8wUohAgMBAAGjajBoMB0GA1Ud -DgQWBBTMzO/MKWCkO7GStjz6MmKPrCUVOzAMBgNVHRMEBTADAQH/MDkGBGcqBwAE -MTAvMC0CAQAwCQYFKw4DAhoFADAHBgVnKgMAAAQUA5vwIhP/lSg209yewDL7MTqK -UWUwDQYJKoZIhvcNAQEFBQADggIBAECASvomyc5eMN1PhnR2WPWus4MzeKR6dBcZ -TulStbngCnRiqmjKeKBMmo4sIy7VahIkv9Ro04rQ2JyftB8M3jh+Vzj8jeJPXgyf -qzvS/3WXy6TjZwj/5cAWtUgBfen5Cv8b5Wppv3ghqMKnI6mGq3ZW6A4M9hPdKmaK -ZEk9GhiHkASfQlK3T8v+R0F2Ne//AHY2RTKbxkaFXeIksB7jSJaYV0eUVXoPQbFE -JPPB/hprv4j9wabak2BegUqZIJxIZhm1AHlUD7gsL0u8qV1bYH+Mh6XgUmMqvtg7 -hUAV/h62ZT/FS9p+tXo1KaMuephgIqP0fSdOLeq0dDzpD6QzDxARvBMB1uUO07+1 -EqLhRSPAzAhuYbeJq4PjJB7mXQfnHyA+z2fI56wwbSdLaG5LKlwCCDTb+HbkZ6Mm -nD+iMsJKxYEYMRBWqoTvLQr/uB930r+lWKBi5NdLkXWNiYCYfm3LU05er/ayl4WX -udpVBrkk7tfGOB5jGxI7leFYrPLfhNVfmS8NVVvmONsuP3LpSIXLuykTjx44Vbnz -ssQwmSNOXfJIoRIM3BKQCZBUkQM8R+XVyWXgt0t97EfTsws+rZ7QdAAO671RrcDe -LMDDav7v3Aun+kbfYNucpllQdSNpc5Oy+fwC00fmcc4QAu4njIT/rEUNE1yDMuAl -pYYsfPQS ------END CERTIFICATE----- - # Issuer: CN=DigiCert Assured ID Root CA O=DigiCert Inc OU=www.digicert.com # Subject: CN=DigiCert Assured ID Root CA O=DigiCert Inc OU=www.digicert.com # Label: "DigiCert Assured ID Root CA" @@ -771,34 +461,6 @@ vEsXCS+0yx5DaMkHJ8HSXPfqIbloEpw8nL+e/IBcm2PN7EeqJSdnoDfzAIJ9VNep +OkuE6N36B9K -----END CERTIFICATE----- -# Issuer: CN=DST Root CA X3 O=Digital Signature Trust Co. -# Subject: CN=DST Root CA X3 O=Digital Signature Trust Co. -# Label: "DST Root CA X3" -# Serial: 91299735575339953335919266965803778155 -# MD5 Fingerprint: 41:03:52:dc:0f:f7:50:1b:16:f0:02:8e:ba:6f:45:c5 -# SHA1 Fingerprint: da:c9:02:4f:54:d8:f6:df:94:93:5f:b1:73:26:38:ca:6a:d7:7c:13 -# SHA256 Fingerprint: 06:87:26:03:31:a7:24:03:d9:09:f1:05:e6:9b:cf:0d:32:e1:bd:24:93:ff:c6:d9:20:6d:11:bc:d6:77:07:39 ------BEGIN CERTIFICATE----- -MIIDSjCCAjKgAwIBAgIQRK+wgNajJ7qJMDmGLvhAazANBgkqhkiG9w0BAQUFADA/ -MSQwIgYDVQQKExtEaWdpdGFsIFNpZ25hdHVyZSBUcnVzdCBDby4xFzAVBgNVBAMT -DkRTVCBSb290IENBIFgzMB4XDTAwMDkzMDIxMTIxOVoXDTIxMDkzMDE0MDExNVow -PzEkMCIGA1UEChMbRGlnaXRhbCBTaWduYXR1cmUgVHJ1c3QgQ28uMRcwFQYDVQQD -Ew5EU1QgUm9vdCBDQSBYMzCCASIwDQYJKoZIhvcNAQEBBQADggEPADCCAQoCggEB -AN+v6ZdQCINXtMxiZfaQguzH0yxrMMpb7NnDfcdAwRgUi+DoM3ZJKuM/IUmTrE4O -rz5Iy2Xu/NMhD2XSKtkyj4zl93ewEnu1lcCJo6m67XMuegwGMoOifooUMM0RoOEq -OLl5CjH9UL2AZd+3UWODyOKIYepLYYHsUmu5ouJLGiifSKOeDNoJjj4XLh7dIN9b -xiqKqy69cK3FCxolkHRyxXtqqzTWMIn/5WgTe1QLyNau7Fqckh49ZLOMxt+/yUFw -7BZy1SbsOFU5Q9D8/RhcQPGX69Wam40dutolucbY38EVAjqr2m7xPi71XAicPNaD -aeQQmxkqtilX4+U9m5/wAl0CAwEAAaNCMEAwDwYDVR0TAQH/BAUwAwEB/zAOBgNV -HQ8BAf8EBAMCAQYwHQYDVR0OBBYEFMSnsaR7LHH62+FLkHX/xBVghYkQMA0GCSqG -SIb3DQEBBQUAA4IBAQCjGiybFwBcqR7uKGY3Or+Dxz9LwwmglSBd49lZRNI+DT69 -ikugdB/OEIKcdBodfpga3csTS7MgROSR6cz8faXbauX+5v3gTt23ADq1cEmv8uXr -AvHRAosZy5Q6XkjEGB5YGV8eAlrwDPGxrancWYaLbumR9YbK+rlmM6pZW87ipxZz -R8srzJmwN0jP41ZL9c8PDHIyh8bwRLtTcm1D9SZImlJnt1ir/md2cXjbDaJWFBM5 -JDGFoqgCWjBH4d1QB7wCCZAA62RjYJsWvIjJEubSfZGL+T0yjWW06XyxV3bqxbYo -Ob8VZRzI9neWagqNdwvYkQsEjgfbKbYK7p2CNTUQ ------END CERTIFICATE----- - # Issuer: CN=SwissSign Gold CA - G2 O=SwissSign AG # Subject: CN=SwissSign Gold CA - G2 O=SwissSign AG # Label: "SwissSign Gold CA - G2" @@ -881,104 +543,6 @@ hAhm0sQ2fac+EPyI4NSA5QC9qvNOBqN6avlicuMJT+ubDgEj8Z+7fNzcbBGXJbLy tGMU0gYqZ4yD9c7qB9iaah7s5Aq7KkzrCWA5zspi2C5u -----END CERTIFICATE----- -# Issuer: CN=GeoTrust Primary Certification Authority O=GeoTrust Inc. -# Subject: CN=GeoTrust Primary Certification Authority O=GeoTrust Inc. -# Label: "GeoTrust Primary Certification Authority" -# Serial: 32798226551256963324313806436981982369 -# MD5 Fingerprint: 02:26:c3:01:5e:08:30:37:43:a9:d0:7d:cf:37:e6:bf -# SHA1 Fingerprint: 32:3c:11:8e:1b:f7:b8:b6:52:54:e2:e2:10:0d:d6:02:90:37:f0:96 -# SHA256 Fingerprint: 37:d5:10:06:c5:12:ea:ab:62:64:21:f1:ec:8c:92:01:3f:c5:f8:2a:e9:8e:e5:33:eb:46:19:b8:de:b4:d0:6c ------BEGIN CERTIFICATE----- -MIIDfDCCAmSgAwIBAgIQGKy1av1pthU6Y2yv2vrEoTANBgkqhkiG9w0BAQUFADBY -MQswCQYDVQQGEwJVUzEWMBQGA1UEChMNR2VvVHJ1c3QgSW5jLjExMC8GA1UEAxMo -R2VvVHJ1c3QgUHJpbWFyeSBDZXJ0aWZpY2F0aW9uIEF1dGhvcml0eTAeFw0wNjEx -MjcwMDAwMDBaFw0zNjA3MTYyMzU5NTlaMFgxCzAJBgNVBAYTAlVTMRYwFAYDVQQK -Ew1HZW9UcnVzdCBJbmMuMTEwLwYDVQQDEyhHZW9UcnVzdCBQcmltYXJ5IENlcnRp -ZmljYXRpb24gQXV0aG9yaXR5MIIBIjANBgkqhkiG9w0BAQEFAAOCAQ8AMIIBCgKC -AQEAvrgVe//UfH1nrYNke8hCUy3f9oQIIGHWAVlqnEQRr+92/ZV+zmEwu3qDXwK9 -AWbK7hWNb6EwnL2hhZ6UOvNWiAAxz9juapYC2e0DjPt1befquFUWBRaa9OBesYjA -ZIVcFU2Ix7e64HXprQU9nceJSOC7KMgD4TCTZF5SwFlwIjVXiIrxlQqD17wxcwE0 -7e9GceBrAqg1cmuXm2bgyxx5X9gaBGgeRwLmnWDiNpcB3841kt++Z8dtd1k7j53W -kBWUvEI0EME5+bEnPn7WinXFsq+W06Lem+SYvn3h6YGttm/81w7a4DSwDRp35+MI -mO9Y+pyEtzavwt+s0vQQBnBxNQIDAQABo0IwQDAPBgNVHRMBAf8EBTADAQH/MA4G -A1UdDwEB/wQEAwIBBjAdBgNVHQ4EFgQULNVQQZcVi/CPNmFbSvtr2ZnJM5IwDQYJ -KoZIhvcNAQEFBQADggEBAFpwfyzdtzRP9YZRqSa+S7iq8XEN3GHHoOo0Hnp3DwQ1 -6CePbJC/kRYkRj5KTs4rFtULUh38H2eiAkUxT87z+gOneZ1TatnaYzr4gNfTmeGl -4b7UVXGYNTq+k+qurUKykG/g/CFNNWMziUnWm07Kx+dOCQD32sfvmWKZd7aVIl6K -oKv0uHiYyjgZmclynnjNS6yvGaBzEi38wkG6gZHaFloxt/m0cYASSJlyc1pZU8Fj -UjPtp8nSOQJw+uCxQmYpqptR7TBUIhRf2asdweSU8Pj1K/fqynhG1riR/aYNKxoU -AT6A8EKglQdebc3MS6RFjasS6LPeWuWgfOgPIh1a6Vk= ------END CERTIFICATE----- - -# Issuer: CN=thawte Primary Root CA O=thawte, Inc. OU=Certification Services Division/(c) 2006 thawte, Inc. - For authorized use only -# Subject: CN=thawte Primary Root CA O=thawte, Inc. OU=Certification Services Division/(c) 2006 thawte, Inc. - For authorized use only -# Label: "thawte Primary Root CA" -# Serial: 69529181992039203566298953787712940909 -# MD5 Fingerprint: 8c:ca:dc:0b:22:ce:f5:be:72:ac:41:1a:11:a8:d8:12 -# SHA1 Fingerprint: 91:c6:d6:ee:3e:8a:c8:63:84:e5:48:c2:99:29:5c:75:6c:81:7b:81 -# SHA256 Fingerprint: 8d:72:2f:81:a9:c1:13:c0:79:1d:f1:36:a2:96:6d:b2:6c:95:0a:97:1d:b4:6b:41:99:f4:ea:54:b7:8b:fb:9f ------BEGIN CERTIFICATE----- -MIIEIDCCAwigAwIBAgIQNE7VVyDV7exJ9C/ON9srbTANBgkqhkiG9w0BAQUFADCB -qTELMAkGA1UEBhMCVVMxFTATBgNVBAoTDHRoYXd0ZSwgSW5jLjEoMCYGA1UECxMf -Q2VydGlmaWNhdGlvbiBTZXJ2aWNlcyBEaXZpc2lvbjE4MDYGA1UECxMvKGMpIDIw -MDYgdGhhd3RlLCBJbmMuIC0gRm9yIGF1dGhvcml6ZWQgdXNlIG9ubHkxHzAdBgNV -BAMTFnRoYXd0ZSBQcmltYXJ5IFJvb3QgQ0EwHhcNMDYxMTE3MDAwMDAwWhcNMzYw -NzE2MjM1OTU5WjCBqTELMAkGA1UEBhMCVVMxFTATBgNVBAoTDHRoYXd0ZSwgSW5j -LjEoMCYGA1UECxMfQ2VydGlmaWNhdGlvbiBTZXJ2aWNlcyBEaXZpc2lvbjE4MDYG -A1UECxMvKGMpIDIwMDYgdGhhd3RlLCBJbmMuIC0gRm9yIGF1dGhvcml6ZWQgdXNl -IG9ubHkxHzAdBgNVBAMTFnRoYXd0ZSBQcmltYXJ5IFJvb3QgQ0EwggEiMA0GCSqG -SIb3DQEBAQUAA4IBDwAwggEKAoIBAQCsoPD7gFnUnMekz52hWXMJEEUMDSxuaPFs -W0hoSVk3/AszGcJ3f8wQLZU0HObrTQmnHNK4yZc2AreJ1CRfBsDMRJSUjQJib+ta -3RGNKJpchJAQeg29dGYvajig4tVUROsdB58Hum/u6f1OCyn1PoSgAfGcq/gcfomk -6KHYcWUNo1F77rzSImANuVud37r8UVsLr5iy6S7pBOhih94ryNdOwUxkHt3Ph1i6 -Sk/KaAcdHJ1KxtUvkcx8cXIcxcBn6zL9yZJclNqFwJu/U30rCfSMnZEfl2pSy94J -NqR32HuHUETVPm4pafs5SSYeCaWAe0At6+gnhcn+Yf1+5nyXHdWdAgMBAAGjQjBA -MA8GA1UdEwEB/wQFMAMBAf8wDgYDVR0PAQH/BAQDAgEGMB0GA1UdDgQWBBR7W0XP -r87Lev0xkhpqtvNG61dIUDANBgkqhkiG9w0BAQUFAAOCAQEAeRHAS7ORtvzw6WfU -DW5FvlXok9LOAz/t2iWwHVfLHjp2oEzsUHboZHIMpKnxuIvW1oeEuzLlQRHAd9mz -YJ3rG9XRbkREqaYB7FViHXe4XI5ISXycO1cRrK1zN44veFyQaEfZYGDm/Ac9IiAX -xPcW6cTYcvnIc3zfFi8VqT79aie2oetaupgf1eNNZAqdE8hhuvU5HIe6uL17In/2 -/qxAeeWsEG89jxt5dovEN7MhGITlNgDrYyCZuen+MwS7QcjBAvlEYyCegc5C09Y/ -LHbTY5xZ3Y+m4Q6gLkH3LpVHz7z9M/P2C2F+fpErgUfCJzDupxBdN49cOSvkBPB7 -jVaMaA== ------END CERTIFICATE----- - -# Issuer: CN=VeriSign Class 3 Public Primary Certification Authority - G5 O=VeriSign, Inc. OU=VeriSign Trust Network/(c) 2006 VeriSign, Inc. - For authorized use only -# Subject: CN=VeriSign Class 3 Public Primary Certification Authority - G5 O=VeriSign, Inc. OU=VeriSign Trust Network/(c) 2006 VeriSign, Inc. - For authorized use only -# Label: "VeriSign Class 3 Public Primary Certification Authority - G5" -# Serial: 33037644167568058970164719475676101450 -# MD5 Fingerprint: cb:17:e4:31:67:3e:e2:09:fe:45:57:93:f3:0a:fa:1c -# SHA1 Fingerprint: 4e:b6:d5:78:49:9b:1c:cf:5f:58:1e:ad:56:be:3d:9b:67:44:a5:e5 -# SHA256 Fingerprint: 9a:cf:ab:7e:43:c8:d8:80:d0:6b:26:2a:94:de:ee:e4:b4:65:99:89:c3:d0:ca:f1:9b:af:64:05:e4:1a:b7:df ------BEGIN CERTIFICATE----- -MIIE0zCCA7ugAwIBAgIQGNrRniZ96LtKIVjNzGs7SjANBgkqhkiG9w0BAQUFADCB -yjELMAkGA1UEBhMCVVMxFzAVBgNVBAoTDlZlcmlTaWduLCBJbmMuMR8wHQYDVQQL -ExZWZXJpU2lnbiBUcnVzdCBOZXR3b3JrMTowOAYDVQQLEzEoYykgMjAwNiBWZXJp -U2lnbiwgSW5jLiAtIEZvciBhdXRob3JpemVkIHVzZSBvbmx5MUUwQwYDVQQDEzxW -ZXJpU2lnbiBDbGFzcyAzIFB1YmxpYyBQcmltYXJ5IENlcnRpZmljYXRpb24gQXV0 -aG9yaXR5IC0gRzUwHhcNMDYxMTA4MDAwMDAwWhcNMzYwNzE2MjM1OTU5WjCByjEL -MAkGA1UEBhMCVVMxFzAVBgNVBAoTDlZlcmlTaWduLCBJbmMuMR8wHQYDVQQLExZW -ZXJpU2lnbiBUcnVzdCBOZXR3b3JrMTowOAYDVQQLEzEoYykgMjAwNiBWZXJpU2ln -biwgSW5jLiAtIEZvciBhdXRob3JpemVkIHVzZSBvbmx5MUUwQwYDVQQDEzxWZXJp -U2lnbiBDbGFzcyAzIFB1YmxpYyBQcmltYXJ5IENlcnRpZmljYXRpb24gQXV0aG9y -aXR5IC0gRzUwggEiMA0GCSqGSIb3DQEBAQUAA4IBDwAwggEKAoIBAQCvJAgIKXo1 -nmAMqudLO07cfLw8RRy7K+D+KQL5VwijZIUVJ/XxrcgxiV0i6CqqpkKzj/i5Vbex -t0uz/o9+B1fs70PbZmIVYc9gDaTY3vjgw2IIPVQT60nKWVSFJuUrjxuf6/WhkcIz -SdhDY2pSS9KP6HBRTdGJaXvHcPaz3BJ023tdS1bTlr8Vd6Gw9KIl8q8ckmcY5fQG -BO+QueQA5N06tRn/Arr0PO7gi+s3i+z016zy9vA9r911kTMZHRxAy3QkGSGT2RT+ -rCpSx4/VBEnkjWNHiDxpg8v+R70rfk/Fla4OndTRQ8Bnc+MUCH7lP59zuDMKz10/ -NIeWiu5T6CUVAgMBAAGjgbIwga8wDwYDVR0TAQH/BAUwAwEB/zAOBgNVHQ8BAf8E -BAMCAQYwbQYIKwYBBQUHAQwEYTBfoV2gWzBZMFcwVRYJaW1hZ2UvZ2lmMCEwHzAH -BgUrDgMCGgQUj+XTGoasjY5rw8+AatRIGCx7GS4wJRYjaHR0cDovL2xvZ28udmVy -aXNpZ24uY29tL3ZzbG9nby5naWYwHQYDVR0OBBYEFH/TZafC3ey78DAJ80M5+gKv -MzEzMA0GCSqGSIb3DQEBBQUAA4IBAQCTJEowX2LP2BqYLz3q3JktvXf2pXkiOOzE -p6B4Eq1iDkVwZMXnl2YtmAl+X6/WzChl8gGqCBpH3vn5fJJaCGkgDdk+bW48DW7Y -5gaRQBi5+MHt39tBquCWIMnNZBU4gcmU7qKEKQsTb47bDN0lAtukixlE0kF6BWlK -WE9gyn6CagsCqiUXObXbf+eEZSqVir2G3l6BFoMtEMze/aiCKm0oHw0LxOXnGiYZ -4fQRbxC1lfznQgUy286dUV4otp6F01vvpX1FQHKOtw5rDgb7MzVIcbidJ4vEZV8N -hnacRHr2lVz2XTIIM6RUthg/aFzyQkqFOFSDX9HoLPKsEdao7WNq ------END CERTIFICATE----- - # Issuer: CN=SecureTrust CA O=SecureTrust Corporation # Subject: CN=SecureTrust CA O=SecureTrust Corporation # Label: "SecureTrust CA" @@ -1072,37 +636,6 @@ BA6+C4OmF4O5MBKgxTMVBbkN+8cFduPYSo38NBejxiEovjBFMR7HeL5YYTisO+IB ZQ== -----END CERTIFICATE----- -# Issuer: CN=Network Solutions Certificate Authority O=Network Solutions L.L.C. -# Subject: CN=Network Solutions Certificate Authority O=Network Solutions L.L.C. -# Label: "Network Solutions Certificate Authority" -# Serial: 116697915152937497490437556386812487904 -# MD5 Fingerprint: d3:f3:a6:16:c0:fa:6b:1d:59:b1:2d:96:4d:0e:11:2e -# SHA1 Fingerprint: 74:f8:a3:c3:ef:e7:b3:90:06:4b:83:90:3c:21:64:60:20:e5:df:ce -# SHA256 Fingerprint: 15:f0:ba:00:a3:ac:7a:f3:ac:88:4c:07:2b:10:11:a0:77:bd:77:c0:97:f4:01:64:b2:f8:59:8a:bd:83:86:0c ------BEGIN CERTIFICATE----- -MIID5jCCAs6gAwIBAgIQV8szb8JcFuZHFhfjkDFo4DANBgkqhkiG9w0BAQUFADBi -MQswCQYDVQQGEwJVUzEhMB8GA1UEChMYTmV0d29yayBTb2x1dGlvbnMgTC5MLkMu -MTAwLgYDVQQDEydOZXR3b3JrIFNvbHV0aW9ucyBDZXJ0aWZpY2F0ZSBBdXRob3Jp -dHkwHhcNMDYxMjAxMDAwMDAwWhcNMjkxMjMxMjM1OTU5WjBiMQswCQYDVQQGEwJV -UzEhMB8GA1UEChMYTmV0d29yayBTb2x1dGlvbnMgTC5MLkMuMTAwLgYDVQQDEydO -ZXR3b3JrIFNvbHV0aW9ucyBDZXJ0aWZpY2F0ZSBBdXRob3JpdHkwggEiMA0GCSqG -SIb3DQEBAQUAA4IBDwAwggEKAoIBAQDkvH6SMG3G2I4rC7xGzuAnlt7e+foS0zwz -c7MEL7xxjOWftiJgPl9dzgn/ggwbmlFQGiaJ3dVhXRncEg8tCqJDXRfQNJIg6nPP -OCwGJgl6cvf6UDL4wpPTaaIjzkGxzOTVHzbRijr4jGPiFFlp7Q3Tf2vouAPlT2rl -mGNpSAW+Lv8ztumXWWn4Zxmuk2GWRBXTcrA/vGp97Eh/jcOrqnErU2lBUzS1sLnF -BgrEsEX1QV1uiUV7PTsmjHTC5dLRfbIR1PtYMiKagMnc/Qzpf14Dl847ABSHJ3A4 -qY5usyd2mFHgBeMhqxrVhSI8KbWaFsWAqPS7azCPL0YCorEMIuDTAgMBAAGjgZcw -gZQwHQYDVR0OBBYEFCEwyfsA106Y2oeqKtCnLrFAMadMMA4GA1UdDwEB/wQEAwIB -BjAPBgNVHRMBAf8EBTADAQH/MFIGA1UdHwRLMEkwR6BFoEOGQWh0dHA6Ly9jcmwu -bmV0c29sc3NsLmNvbS9OZXR3b3JrU29sdXRpb25zQ2VydGlmaWNhdGVBdXRob3Jp -dHkuY3JsMA0GCSqGSIb3DQEBBQUAA4IBAQC7rkvnt1frf6ott3NHhWrB5KUd5Oc8 -6fRZZXe1eltajSU24HqXLjjAV2CDmAaDn7l2em5Q4LqILPxFzBiwmZVRDuwduIj/ -h1AcgsLj4DKAv6ALR8jDMe+ZZzKATxcheQxpXN5eNK4CtSbqUN9/GGUsyfJj4akH -/nxxH2szJGoeBfcFaMBqEssuXmHLrijTfsK0ZpEmXzwuJF/LWA/rKOyvEZbz3Htv -wKeI8lN3s2Berq4o2jUsbzRF0ybh3uxbTydrFny9RAQYgrOJeRcQcT16ohZO9QHN -pGxlaKFJdlxDydi8NmdspZS11My5vWo1ViHe2MPr+8ukYEywVaCge1ey ------END CERTIFICATE----- - # Issuer: CN=COMODO ECC Certification Authority O=COMODO CA Limited # Subject: CN=COMODO ECC Certification Authority O=COMODO CA Limited # Label: "COMODO ECC Certification Authority" @@ -1127,38 +660,6 @@ fQjGGoe9GKhzvSbKYAydzpmfz1wPMOG+FDHqAjAU9JM8SaczepBGR7NjfRObTrdv GDeAU/7dIOA1mjbRxwG55tzd8/8dLDoWV9mSOdY= -----END CERTIFICATE----- -# Issuer: CN=OISTE WISeKey Global Root GA CA O=WISeKey OU=Copyright (c) 2005/OISTE Foundation Endorsed -# Subject: CN=OISTE WISeKey Global Root GA CA O=WISeKey OU=Copyright (c) 2005/OISTE Foundation Endorsed -# Label: "OISTE WISeKey Global Root GA CA" -# Serial: 86718877871133159090080555911823548314 -# MD5 Fingerprint: bc:6c:51:33:a7:e9:d3:66:63:54:15:72:1b:21:92:93 -# SHA1 Fingerprint: 59:22:a1:e1:5a:ea:16:35:21:f8:98:39:6a:46:46:b0:44:1b:0f:a9 -# SHA256 Fingerprint: 41:c9:23:86:6a:b4:ca:d6:b7:ad:57:80:81:58:2e:02:07:97:a6:cb:df:4f:ff:78:ce:83:96:b3:89:37:d7:f5 ------BEGIN CERTIFICATE----- -MIID8TCCAtmgAwIBAgIQQT1yx/RrH4FDffHSKFTfmjANBgkqhkiG9w0BAQUFADCB -ijELMAkGA1UEBhMCQ0gxEDAOBgNVBAoTB1dJU2VLZXkxGzAZBgNVBAsTEkNvcHly -aWdodCAoYykgMjAwNTEiMCAGA1UECxMZT0lTVEUgRm91bmRhdGlvbiBFbmRvcnNl -ZDEoMCYGA1UEAxMfT0lTVEUgV0lTZUtleSBHbG9iYWwgUm9vdCBHQSBDQTAeFw0w -NTEyMTExNjAzNDRaFw0zNzEyMTExNjA5NTFaMIGKMQswCQYDVQQGEwJDSDEQMA4G -A1UEChMHV0lTZUtleTEbMBkGA1UECxMSQ29weXJpZ2h0IChjKSAyMDA1MSIwIAYD -VQQLExlPSVNURSBGb3VuZGF0aW9uIEVuZG9yc2VkMSgwJgYDVQQDEx9PSVNURSBX -SVNlS2V5IEdsb2JhbCBSb290IEdBIENBMIIBIjANBgkqhkiG9w0BAQEFAAOCAQ8A -MIIBCgKCAQEAy0+zAJs9Nt350UlqaxBJH+zYK7LG+DKBKUOVTJoZIyEVRd7jyBxR -VVuuk+g3/ytr6dTqvirdqFEr12bDYVxgAsj1znJ7O7jyTmUIms2kahnBAbtzptf2 -w93NvKSLtZlhuAGio9RN1AU9ka34tAhxZK9w8RxrfvbDd50kc3vkDIzh2TbhmYsF -mQvtRTEJysIA2/dyoJaqlYfQjse2YXMNdmaM3Bu0Y6Kff5MTMPGhJ9vZ/yxViJGg -4E8HsChWjBgbl0SOid3gF27nKu+POQoxhILYQBRJLnpB5Kf+42TMwVlxSywhp1t9 -4B3RLoGbw9ho972WG6xwsRYUC9tguSYBBQIDAQABo1EwTzALBgNVHQ8EBAMCAYYw -DwYDVR0TAQH/BAUwAwEB/zAdBgNVHQ4EFgQUswN+rja8sHnR3JQmthG+IbJphpQw -EAYJKwYBBAGCNxUBBAMCAQAwDQYJKoZIhvcNAQEFBQADggEBAEuh/wuHbrP5wUOx -SPMowB0uyQlB+pQAHKSkq0lPjz0e701vvbyk9vImMMkQyh2I+3QZH4VFvbBsUfk2 -ftv1TDI6QU9bR8/oCy22xBmddMVHxjtqD6wU2zz0c5ypBd8A3HR4+vg1YFkCExh8 -vPtNsCBtQ7tgMHpnM1zFmdH4LTlSc/uMqpclXHLZCB6rTjzjgTGfA6b7wP4piFXa -hNVQA7bihKOmNqoROgHhGEvWRGizPflTdISzRpFGlgC3gCy24eMQ4tui5yiPAZZi -Fj4A4xylNoEYokxSdsARo27mHbrjWr42U8U+dY+GaSlYU7Wcu2+fXMUY7N0v4ZjJ -/L7fCg0= ------END CERTIFICATE----- - # Issuer: CN=Certigna O=Dhimyotis # Subject: CN=Certigna O=Dhimyotis # Label: "Certigna" @@ -1189,36 +690,6 @@ t0QmwCbAr1UwnjvVNioZBPRcHv/PLLf/0P2HQBHVESO7SMAhqaQoLf0V+LBOK/Qw WyH8EZE0vkHve52Xdf+XlcCWWC/qu0bXu+TZLg== -----END CERTIFICATE----- -# Issuer: CN=Cybertrust Global Root O=Cybertrust, Inc -# Subject: CN=Cybertrust Global Root O=Cybertrust, Inc -# Label: "Cybertrust Global Root" -# Serial: 4835703278459682877484360 -# MD5 Fingerprint: 72:e4:4a:87:e3:69:40:80:77:ea:bc:e3:f4:ff:f0:e1 -# SHA1 Fingerprint: 5f:43:e5:b1:bf:f8:78:8c:ac:1c:c7:ca:4a:9a:c6:22:2b:cc:34:c6 -# SHA256 Fingerprint: 96:0a:df:00:63:e9:63:56:75:0c:29:65:dd:0a:08:67:da:0b:9c:bd:6e:77:71:4a:ea:fb:23:49:ab:39:3d:a3 ------BEGIN CERTIFICATE----- -MIIDoTCCAomgAwIBAgILBAAAAAABD4WqLUgwDQYJKoZIhvcNAQEFBQAwOzEYMBYG -A1UEChMPQ3liZXJ0cnVzdCwgSW5jMR8wHQYDVQQDExZDeWJlcnRydXN0IEdsb2Jh -bCBSb290MB4XDTA2MTIxNTA4MDAwMFoXDTIxMTIxNTA4MDAwMFowOzEYMBYGA1UE -ChMPQ3liZXJ0cnVzdCwgSW5jMR8wHQYDVQQDExZDeWJlcnRydXN0IEdsb2JhbCBS -b290MIIBIjANBgkqhkiG9w0BAQEFAAOCAQ8AMIIBCgKCAQEA+Mi8vRRQZhP/8NN5 -7CPytxrHjoXxEnOmGaoQ25yiZXRadz5RfVb23CO21O1fWLE3TdVJDm71aofW0ozS -J8bi/zafmGWgE07GKmSb1ZASzxQG9Dvj1Ci+6A74q05IlG2OlTEQXO2iLb3VOm2y -HLtgwEZLAfVJrn5GitB0jaEMAs7u/OePuGtm839EAL9mJRQr3RAwHQeWP032a7iP -t3sMpTjr3kfb1V05/Iin89cqdPHoWqI7n1C6poxFNcJQZZXcY4Lv3b93TZxiyWNz -FtApD0mpSPCzqrdsxacwOUBdrsTiXSZT8M4cIwhhqJQZugRiQOwfOHB3EgZxpzAY -XSUnpQIDAQABo4GlMIGiMA4GA1UdDwEB/wQEAwIBBjAPBgNVHRMBAf8EBTADAQH/ -MB0GA1UdDgQWBBS2CHsNesysIEyGVjJez6tuhS1wVzA/BgNVHR8EODA2MDSgMqAw -hi5odHRwOi8vd3d3Mi5wdWJsaWMtdHJ1c3QuY29tL2NybC9jdC9jdHJvb3QuY3Js -MB8GA1UdIwQYMBaAFLYIew16zKwgTIZWMl7Pq26FLXBXMA0GCSqGSIb3DQEBBQUA -A4IBAQBW7wojoFROlZfJ+InaRcHUowAl9B8Tq7ejhVhpwjCt2BWKLePJzYFa+HMj -Wqd8BfP9IjsO0QbE2zZMcwSO5bAi5MXzLqXZI+O4Tkogp24CJJ8iYGd7ix1yCcUx -XOl5n4BHPa2hCwcUPUf/A2kaDAtE52Mlp3+yybh2hO0j9n0Hq0V+09+zv+mKts2o -omcrUtW3ZfA5TGOgkXmTUg9U3YO7n9GPp1Nzw8v/MOx8BLjYRB+TX3EJIrduPuoc -A06dGiBh+4E37F78CkWr1+cXVdCg6mCbpvbjjFspwgZgFJ0tl0ypkxWdYcQBX0jW -WL1WMRJOEcgh4LMRkWXbtKaIOM5V ------END CERTIFICATE----- - # Issuer: O=Chunghwa Telecom Co., Ltd. OU=ePKI Root Certification Authority # Subject: O=Chunghwa Telecom Co., Ltd. OU=ePKI Root Certification Authority # Label: "ePKI Root Certification Authority" @@ -1288,185 +759,6 @@ i/nDhDwTqn6Sm1dTk/pwwpEOMfmbZ13pljheX7NzTogVZ96edhBiIL5VaZVDADlN 9u6wWk5JRFRYX0KD -----END CERTIFICATE----- -# Issuer: CN=GeoTrust Primary Certification Authority - G3 O=GeoTrust Inc. OU=(c) 2008 GeoTrust Inc. - For authorized use only -# Subject: CN=GeoTrust Primary Certification Authority - G3 O=GeoTrust Inc. OU=(c) 2008 GeoTrust Inc. - For authorized use only -# Label: "GeoTrust Primary Certification Authority - G3" -# Serial: 28809105769928564313984085209975885599 -# MD5 Fingerprint: b5:e8:34:36:c9:10:44:58:48:70:6d:2e:83:d4:b8:05 -# SHA1 Fingerprint: 03:9e:ed:b8:0b:e7:a0:3c:69:53:89:3b:20:d2:d9:32:3a:4c:2a:fd -# SHA256 Fingerprint: b4:78:b8:12:25:0d:f8:78:63:5c:2a:a7:ec:7d:15:5e:aa:62:5e:e8:29:16:e2:cd:29:43:61:88:6c:d1:fb:d4 ------BEGIN CERTIFICATE----- -MIID/jCCAuagAwIBAgIQFaxulBmyeUtB9iepwxgPHzANBgkqhkiG9w0BAQsFADCB -mDELMAkGA1UEBhMCVVMxFjAUBgNVBAoTDUdlb1RydXN0IEluYy4xOTA3BgNVBAsT -MChjKSAyMDA4IEdlb1RydXN0IEluYy4gLSBGb3IgYXV0aG9yaXplZCB1c2Ugb25s -eTE2MDQGA1UEAxMtR2VvVHJ1c3QgUHJpbWFyeSBDZXJ0aWZpY2F0aW9uIEF1dGhv -cml0eSAtIEczMB4XDTA4MDQwMjAwMDAwMFoXDTM3MTIwMTIzNTk1OVowgZgxCzAJ -BgNVBAYTAlVTMRYwFAYDVQQKEw1HZW9UcnVzdCBJbmMuMTkwNwYDVQQLEzAoYykg -MjAwOCBHZW9UcnVzdCBJbmMuIC0gRm9yIGF1dGhvcml6ZWQgdXNlIG9ubHkxNjA0 -BgNVBAMTLUdlb1RydXN0IFByaW1hcnkgQ2VydGlmaWNhdGlvbiBBdXRob3JpdHkg -LSBHMzCCASIwDQYJKoZIhvcNAQEBBQADggEPADCCAQoCggEBANziXmJYHTNXOTIz -+uvLh4yn1ErdBojqZI4xmKU4kB6Yzy5jK/BGvESyiaHAKAxJcCGVn2TAppMSAmUm -hsalifD614SgcK9PGpc/BkTVyetyEH3kMSj7HGHmKAdEc5IiaacDiGydY8hS2pgn -5whMcD60yRLBxWeDXTPzAxHsatBT4tG6NmCUgLthY2xbF37fQJQeqw3CIShwiP/W -JmxsYAQlTlV+fe+/lEjetx3dcI0FX4ilm/LC7urRQEFtYjgdVgbFA0dRIBn8exAL -DmKudlW/X3e+PkkBUz2YJQN2JFodtNuJ6nnltrM7P7pMKEF/BqxqjsHQ9gUdfeZC -huOl1UcCAwEAAaNCMEAwDwYDVR0TAQH/BAUwAwEB/zAOBgNVHQ8BAf8EBAMCAQYw -HQYDVR0OBBYEFMR5yo6hTgMdHNxr2zFblD4/MH8tMA0GCSqGSIb3DQEBCwUAA4IB -AQAtxRPPVoB7eni9n64smefv2t+UXglpp+duaIy9cr5HqQ6XErhK8WTTOd8lNNTB -zU6B8A8ExCSzNJbGpqow32hhc9f5joWJ7w5elShKKiePEI4ufIbEAp7aDHdlDkQN -kv39sxY2+hENHYwOB4lqKVb3cvTdFZx3NWZXqxNT2I7BQMXXExZacse3aQHEerGD -AWh9jUGhlBjBJVz88P6DAod8DQ3PLghcSkANPuyBYeYk28rgDi0Hsj5W3I31QYUH -SJsMC8tJP33st/3LjWeJGqvtux6jAAgIFyqCXDFdRootD4abdNlF+9RAsXqqaC2G -spki4cErx5z481+oghLrGREt ------END CERTIFICATE----- - -# Issuer: CN=thawte Primary Root CA - G2 O=thawte, Inc. OU=(c) 2007 thawte, Inc. - For authorized use only -# Subject: CN=thawte Primary Root CA - G2 O=thawte, Inc. OU=(c) 2007 thawte, Inc. - For authorized use only -# Label: "thawte Primary Root CA - G2" -# Serial: 71758320672825410020661621085256472406 -# MD5 Fingerprint: 74:9d:ea:60:24:c4:fd:22:53:3e:cc:3a:72:d9:29:4f -# SHA1 Fingerprint: aa:db:bc:22:23:8f:c4:01:a1:27:bb:38:dd:f4:1d:db:08:9e:f0:12 -# SHA256 Fingerprint: a4:31:0d:50:af:18:a6:44:71:90:37:2a:86:af:af:8b:95:1f:fb:43:1d:83:7f:1e:56:88:b4:59:71:ed:15:57 ------BEGIN CERTIFICATE----- -MIICiDCCAg2gAwIBAgIQNfwmXNmET8k9Jj1Xm67XVjAKBggqhkjOPQQDAzCBhDEL -MAkGA1UEBhMCVVMxFTATBgNVBAoTDHRoYXd0ZSwgSW5jLjE4MDYGA1UECxMvKGMp -IDIwMDcgdGhhd3RlLCBJbmMuIC0gRm9yIGF1dGhvcml6ZWQgdXNlIG9ubHkxJDAi -BgNVBAMTG3RoYXd0ZSBQcmltYXJ5IFJvb3QgQ0EgLSBHMjAeFw0wNzExMDUwMDAw -MDBaFw0zODAxMTgyMzU5NTlaMIGEMQswCQYDVQQGEwJVUzEVMBMGA1UEChMMdGhh -d3RlLCBJbmMuMTgwNgYDVQQLEy8oYykgMjAwNyB0aGF3dGUsIEluYy4gLSBGb3Ig -YXV0aG9yaXplZCB1c2Ugb25seTEkMCIGA1UEAxMbdGhhd3RlIFByaW1hcnkgUm9v -dCBDQSAtIEcyMHYwEAYHKoZIzj0CAQYFK4EEACIDYgAEotWcgnuVnfFSeIf+iha/ -BebfowJPDQfGAFG6DAJSLSKkQjnE/o/qycG+1E3/n3qe4rF8mq2nhglzh9HnmuN6 -papu+7qzcMBniKI11KOasf2twu8x+qi58/sIxpHR+ymVo0IwQDAPBgNVHRMBAf8E -BTADAQH/MA4GA1UdDwEB/wQEAwIBBjAdBgNVHQ4EFgQUmtgAMADna3+FGO6Lts6K -DPgR4bswCgYIKoZIzj0EAwMDaQAwZgIxAN344FdHW6fmCsO99YCKlzUNG4k8VIZ3 -KMqh9HneteY4sPBlcIx/AlTCv//YoT7ZzwIxAMSNlPzcU9LcnXgWHxUzI1NS41ox -XZ3Krr0TKUQNJ1uo52icEvdYPy5yAlejj6EULg== ------END CERTIFICATE----- - -# Issuer: CN=thawte Primary Root CA - G3 O=thawte, Inc. OU=Certification Services Division/(c) 2008 thawte, Inc. - For authorized use only -# Subject: CN=thawte Primary Root CA - G3 O=thawte, Inc. OU=Certification Services Division/(c) 2008 thawte, Inc. - For authorized use only -# Label: "thawte Primary Root CA - G3" -# Serial: 127614157056681299805556476275995414779 -# MD5 Fingerprint: fb:1b:5d:43:8a:94:cd:44:c6:76:f2:43:4b:47:e7:31 -# SHA1 Fingerprint: f1:8b:53:8d:1b:e9:03:b6:a6:f0:56:43:5b:17:15:89:ca:f3:6b:f2 -# SHA256 Fingerprint: 4b:03:f4:58:07:ad:70:f2:1b:fc:2c:ae:71:c9:fd:e4:60:4c:06:4c:f5:ff:b6:86:ba:e5:db:aa:d7:fd:d3:4c ------BEGIN CERTIFICATE----- -MIIEKjCCAxKgAwIBAgIQYAGXt0an6rS0mtZLL/eQ+zANBgkqhkiG9w0BAQsFADCB -rjELMAkGA1UEBhMCVVMxFTATBgNVBAoTDHRoYXd0ZSwgSW5jLjEoMCYGA1UECxMf -Q2VydGlmaWNhdGlvbiBTZXJ2aWNlcyBEaXZpc2lvbjE4MDYGA1UECxMvKGMpIDIw -MDggdGhhd3RlLCBJbmMuIC0gRm9yIGF1dGhvcml6ZWQgdXNlIG9ubHkxJDAiBgNV -BAMTG3RoYXd0ZSBQcmltYXJ5IFJvb3QgQ0EgLSBHMzAeFw0wODA0MDIwMDAwMDBa -Fw0zNzEyMDEyMzU5NTlaMIGuMQswCQYDVQQGEwJVUzEVMBMGA1UEChMMdGhhd3Rl -LCBJbmMuMSgwJgYDVQQLEx9DZXJ0aWZpY2F0aW9uIFNlcnZpY2VzIERpdmlzaW9u -MTgwNgYDVQQLEy8oYykgMjAwOCB0aGF3dGUsIEluYy4gLSBGb3IgYXV0aG9yaXpl -ZCB1c2Ugb25seTEkMCIGA1UEAxMbdGhhd3RlIFByaW1hcnkgUm9vdCBDQSAtIEcz -MIIBIjANBgkqhkiG9w0BAQEFAAOCAQ8AMIIBCgKCAQEAsr8nLPvb2FvdeHsbnndm -gcs+vHyu86YnmjSjaDFxODNi5PNxZnmxqWWjpYvVj2AtP0LMqmsywCPLLEHd5N/8 -YZzic7IilRFDGF/Eth9XbAoFWCLINkw6fKXRz4aviKdEAhN0cXMKQlkC+BsUa0Lf -b1+6a4KinVvnSr0eAXLbS3ToO39/fR8EtCab4LRarEc9VbjXsCZSKAExQGbY2SS9 -9irY7CFJXJv2eul/VTV+lmuNk5Mny5K76qxAwJ/C+IDPXfRa3M50hqY+bAtTyr2S -zhkGcuYMXDhpxwTWvGzOW/b3aJzcJRVIiKHpqfiYnODz1TEoYRFsZ5aNOZnLwkUk -OQIDAQABo0IwQDAPBgNVHRMBAf8EBTADAQH/MA4GA1UdDwEB/wQEAwIBBjAdBgNV -HQ4EFgQUrWyqlGCc7eT/+j4KdCtjA/e2Wb8wDQYJKoZIhvcNAQELBQADggEBABpA -2JVlrAmSicY59BDlqQ5mU1143vokkbvnRFHfxhY0Cu9qRFHqKweKA3rD6z8KLFIW -oCtDuSWQP3CpMyVtRRooOyfPqsMpQhvfO0zAMzRbQYi/aytlryjvsvXDqmbOe1bu -t8jLZ8HJnBoYuMTDSQPxYA5QzUbF83d597YV4Djbxy8ooAw/dyZ02SUS2jHaGh7c -KUGRIjxpp7sC8rZcJwOJ9Abqm+RyguOhCcHpABnTPtRwa7pxpqpYrvS76Wy274fM -m7v/OeZWYdMKp8RcTGB7BXcmer/YB1IsYvdwY9k5vG8cwnncdimvzsUsZAReiDZu -MdRAGmI0Nj81Aa6sY6A= ------END CERTIFICATE----- - -# Issuer: CN=GeoTrust Primary Certification Authority - G2 O=GeoTrust Inc. OU=(c) 2007 GeoTrust Inc. - For authorized use only -# Subject: CN=GeoTrust Primary Certification Authority - G2 O=GeoTrust Inc. OU=(c) 2007 GeoTrust Inc. - For authorized use only -# Label: "GeoTrust Primary Certification Authority - G2" -# Serial: 80682863203381065782177908751794619243 -# MD5 Fingerprint: 01:5e:d8:6b:bd:6f:3d:8e:a1:31:f8:12:e0:98:73:6a -# SHA1 Fingerprint: 8d:17:84:d5:37:f3:03:7d:ec:70:fe:57:8b:51:9a:99:e6:10:d7:b0 -# SHA256 Fingerprint: 5e:db:7a:c4:3b:82:a0:6a:87:61:e8:d7:be:49:79:eb:f2:61:1f:7d:d7:9b:f9:1c:1c:6b:56:6a:21:9e:d7:66 ------BEGIN CERTIFICATE----- -MIICrjCCAjWgAwIBAgIQPLL0SAoA4v7rJDteYD7DazAKBggqhkjOPQQDAzCBmDEL -MAkGA1UEBhMCVVMxFjAUBgNVBAoTDUdlb1RydXN0IEluYy4xOTA3BgNVBAsTMChj -KSAyMDA3IEdlb1RydXN0IEluYy4gLSBGb3IgYXV0aG9yaXplZCB1c2Ugb25seTE2 -MDQGA1UEAxMtR2VvVHJ1c3QgUHJpbWFyeSBDZXJ0aWZpY2F0aW9uIEF1dGhvcml0 -eSAtIEcyMB4XDTA3MTEwNTAwMDAwMFoXDTM4MDExODIzNTk1OVowgZgxCzAJBgNV -BAYTAlVTMRYwFAYDVQQKEw1HZW9UcnVzdCBJbmMuMTkwNwYDVQQLEzAoYykgMjAw -NyBHZW9UcnVzdCBJbmMuIC0gRm9yIGF1dGhvcml6ZWQgdXNlIG9ubHkxNjA0BgNV -BAMTLUdlb1RydXN0IFByaW1hcnkgQ2VydGlmaWNhdGlvbiBBdXRob3JpdHkgLSBH -MjB2MBAGByqGSM49AgEGBSuBBAAiA2IABBWx6P0DFUPlrOuHNxFi79KDNlJ9RVcL -So17VDs6bl8VAsBQps8lL33KSLjHUGMcKiEIfJo22Av+0SbFWDEwKCXzXV2juLal -tJLtbCyf691DiaI8S0iRHVDsJt/WYC69IaNCMEAwDwYDVR0TAQH/BAUwAwEB/zAO -BgNVHQ8BAf8EBAMCAQYwHQYDVR0OBBYEFBVfNVdRVfslsq0DafwBo/q+EVXVMAoG -CCqGSM49BAMDA2cAMGQCMGSWWaboCd6LuvpaiIjwH5HTRqjySkwCY/tsXzjbLkGT -qQ7mndwxHLKgpxgceeHHNgIwOlavmnRs9vuD4DPTCF+hnMJbn0bWtsuRBmOiBucz -rD6ogRLQy7rQkgu2npaqBA+K ------END CERTIFICATE----- - -# Issuer: CN=VeriSign Universal Root Certification Authority O=VeriSign, Inc. OU=VeriSign Trust Network/(c) 2008 VeriSign, Inc. - For authorized use only -# Subject: CN=VeriSign Universal Root Certification Authority O=VeriSign, Inc. OU=VeriSign Trust Network/(c) 2008 VeriSign, Inc. - For authorized use only -# Label: "VeriSign Universal Root Certification Authority" -# Serial: 85209574734084581917763752644031726877 -# MD5 Fingerprint: 8e:ad:b5:01:aa:4d:81:e4:8c:1d:d1:e1:14:00:95:19 -# SHA1 Fingerprint: 36:79:ca:35:66:87:72:30:4d:30:a5:fb:87:3b:0f:a7:7b:b7:0d:54 -# SHA256 Fingerprint: 23:99:56:11:27:a5:71:25:de:8c:ef:ea:61:0d:df:2f:a0:78:b5:c8:06:7f:4e:82:82:90:bf:b8:60:e8:4b:3c ------BEGIN CERTIFICATE----- -MIIEuTCCA6GgAwIBAgIQQBrEZCGzEyEDDrvkEhrFHTANBgkqhkiG9w0BAQsFADCB -vTELMAkGA1UEBhMCVVMxFzAVBgNVBAoTDlZlcmlTaWduLCBJbmMuMR8wHQYDVQQL -ExZWZXJpU2lnbiBUcnVzdCBOZXR3b3JrMTowOAYDVQQLEzEoYykgMjAwOCBWZXJp -U2lnbiwgSW5jLiAtIEZvciBhdXRob3JpemVkIHVzZSBvbmx5MTgwNgYDVQQDEy9W -ZXJpU2lnbiBVbml2ZXJzYWwgUm9vdCBDZXJ0aWZpY2F0aW9uIEF1dGhvcml0eTAe -Fw0wODA0MDIwMDAwMDBaFw0zNzEyMDEyMzU5NTlaMIG9MQswCQYDVQQGEwJVUzEX -MBUGA1UEChMOVmVyaVNpZ24sIEluYy4xHzAdBgNVBAsTFlZlcmlTaWduIFRydXN0 -IE5ldHdvcmsxOjA4BgNVBAsTMShjKSAyMDA4IFZlcmlTaWduLCBJbmMuIC0gRm9y -IGF1dGhvcml6ZWQgdXNlIG9ubHkxODA2BgNVBAMTL1ZlcmlTaWduIFVuaXZlcnNh -bCBSb290IENlcnRpZmljYXRpb24gQXV0aG9yaXR5MIIBIjANBgkqhkiG9w0BAQEF -AAOCAQ8AMIIBCgKCAQEAx2E3XrEBNNti1xWb/1hajCMj1mCOkdeQmIN65lgZOIzF -9uVkhbSicfvtvbnazU0AtMgtc6XHaXGVHzk8skQHnOgO+k1KxCHfKWGPMiJhgsWH -H26MfF8WIFFE0XBPV+rjHOPMee5Y2A7Cs0WTwCznmhcrewA3ekEzeOEz4vMQGn+H -LL729fdC4uW/h2KJXwBL38Xd5HVEMkE6HnFuacsLdUYI0crSK5XQz/u5QGtkjFdN -/BMReYTtXlT2NJ8IAfMQJQYXStrxHXpma5hgZqTZ79IugvHw7wnqRMkVauIDbjPT -rJ9VAMf2CGqUuV/c4DPxhGD5WycRtPwW8rtWaoAljQIDAQABo4GyMIGvMA8GA1Ud -EwEB/wQFMAMBAf8wDgYDVR0PAQH/BAQDAgEGMG0GCCsGAQUFBwEMBGEwX6FdoFsw -WTBXMFUWCWltYWdlL2dpZjAhMB8wBwYFKw4DAhoEFI/l0xqGrI2Oa8PPgGrUSBgs -exkuMCUWI2h0dHA6Ly9sb2dvLnZlcmlzaWduLmNvbS92c2xvZ28uZ2lmMB0GA1Ud -DgQWBBS2d/ppSEefUxLVwuoHMnYH0ZcHGTANBgkqhkiG9w0BAQsFAAOCAQEASvj4 -sAPmLGd75JR3Y8xuTPl9Dg3cyLk1uXBPY/ok+myDjEedO2Pzmvl2MpWRsXe8rJq+ -seQxIcaBlVZaDrHC1LGmWazxY8u4TB1ZkErvkBYoH1quEPuBUDgMbMzxPcP1Y+Oz -4yHJJDnp/RVmRvQbEdBNc6N9Rvk97ahfYtTxP/jgdFcrGJ2BtMQo2pSXpXDrrB2+ -BxHw1dvd5Yzw1TKwg+ZX4o+/vqGqvz0dtdQ46tewXDpPaj+PwGZsY6rp2aQW9IHR -lRQOfc2VNNnSj3BzgXucfr2YYdhFh5iQxeuGMMY1v/D/w1WIg0vvBZIGcfK4mJO3 -7M2CYfE45k+XmCpajQ== ------END CERTIFICATE----- - -# Issuer: CN=VeriSign Class 3 Public Primary Certification Authority - G4 O=VeriSign, Inc. OU=VeriSign Trust Network/(c) 2007 VeriSign, Inc. - For authorized use only -# Subject: CN=VeriSign Class 3 Public Primary Certification Authority - G4 O=VeriSign, Inc. OU=VeriSign Trust Network/(c) 2007 VeriSign, Inc. - For authorized use only -# Label: "VeriSign Class 3 Public Primary Certification Authority - G4" -# Serial: 63143484348153506665311985501458640051 -# MD5 Fingerprint: 3a:52:e1:e7:fd:6f:3a:e3:6f:f3:6f:99:1b:f9:22:41 -# SHA1 Fingerprint: 22:d5:d8:df:8f:02:31:d1:8d:f7:9d:b7:cf:8a:2d:64:c9:3f:6c:3a -# SHA256 Fingerprint: 69:dd:d7:ea:90:bb:57:c9:3e:13:5d:c8:5e:a6:fc:d5:48:0b:60:32:39:bd:c4:54:fc:75:8b:2a:26:cf:7f:79 ------BEGIN CERTIFICATE----- -MIIDhDCCAwqgAwIBAgIQL4D+I4wOIg9IZxIokYesszAKBggqhkjOPQQDAzCByjEL -MAkGA1UEBhMCVVMxFzAVBgNVBAoTDlZlcmlTaWduLCBJbmMuMR8wHQYDVQQLExZW -ZXJpU2lnbiBUcnVzdCBOZXR3b3JrMTowOAYDVQQLEzEoYykgMjAwNyBWZXJpU2ln -biwgSW5jLiAtIEZvciBhdXRob3JpemVkIHVzZSBvbmx5MUUwQwYDVQQDEzxWZXJp -U2lnbiBDbGFzcyAzIFB1YmxpYyBQcmltYXJ5IENlcnRpZmljYXRpb24gQXV0aG9y -aXR5IC0gRzQwHhcNMDcxMTA1MDAwMDAwWhcNMzgwMTE4MjM1OTU5WjCByjELMAkG -A1UEBhMCVVMxFzAVBgNVBAoTDlZlcmlTaWduLCBJbmMuMR8wHQYDVQQLExZWZXJp -U2lnbiBUcnVzdCBOZXR3b3JrMTowOAYDVQQLEzEoYykgMjAwNyBWZXJpU2lnbiwg -SW5jLiAtIEZvciBhdXRob3JpemVkIHVzZSBvbmx5MUUwQwYDVQQDEzxWZXJpU2ln -biBDbGFzcyAzIFB1YmxpYyBQcmltYXJ5IENlcnRpZmljYXRpb24gQXV0aG9yaXR5 -IC0gRzQwdjAQBgcqhkjOPQIBBgUrgQQAIgNiAASnVnp8Utpkmw4tXNherJI9/gHm -GUo9FANL+mAnINmDiWn6VMaaGF5VKmTeBvaNSjutEDxlPZCIBIngMGGzrl0Bp3ve -fLK+ymVhAIau2o970ImtTR1ZmkGxvEeA3J5iw/mjgbIwga8wDwYDVR0TAQH/BAUw -AwEB/zAOBgNVHQ8BAf8EBAMCAQYwbQYIKwYBBQUHAQwEYTBfoV2gWzBZMFcwVRYJ -aW1hZ2UvZ2lmMCEwHzAHBgUrDgMCGgQUj+XTGoasjY5rw8+AatRIGCx7GS4wJRYj -aHR0cDovL2xvZ28udmVyaXNpZ24uY29tL3ZzbG9nby5naWYwHQYDVR0OBBYEFLMW -kf3upm7ktS5Jj4d4gYDs5bG1MAoGCCqGSM49BAMDA2gAMGUCMGYhDBgmYFo4e1ZC -4Kf8NoRRkSAsdk1DPcQdhCPQrNZ8NQbOzWm9kA3bbEhCHQ6qQgIxAJw9SDkjOVga -FRJZap7v1VmyHVIsmXHNxynfGyphe3HR3vPA5Q06Sqotp9iGKt0uEA== ------END CERTIFICATE----- - # Issuer: CN=NetLock Arany (Class Gold) F\u0151tan\xfas\xedtv\xe1ny O=NetLock Kft. OU=Tan\xfas\xedtv\xe1nykiad\xf3k (Certification Services) # Subject: CN=NetLock Arany (Class Gold) F\u0151tan\xfas\xedtv\xe1ny O=NetLock Kft. OU=Tan\xfas\xedtv\xe1nykiad\xf3k (Certification Services) # Label: "NetLock Arany (Class Gold) F\u0151tan\xfas\xedtv\xe1ny" @@ -1499,47 +791,6 @@ uLjbvrW5KfnaNwUASZQDhETnv0Mxz3WLJdH0pmT1kvarBes96aULNmLazAZfNou2 XjG4Kvte9nHfRCaexOYNkbQudZWAUWpLMKawYqGT8ZvYzsRjdT9ZR7E= -----END CERTIFICATE----- -# Issuer: CN=Staat der Nederlanden Root CA - G2 O=Staat der Nederlanden -# Subject: CN=Staat der Nederlanden Root CA - G2 O=Staat der Nederlanden -# Label: "Staat der Nederlanden Root CA - G2" -# Serial: 10000012 -# MD5 Fingerprint: 7c:a5:0f:f8:5b:9a:7d:6d:30:ae:54:5a:e3:42:a2:8a -# SHA1 Fingerprint: 59:af:82:79:91:86:c7:b4:75:07:cb:cf:03:57:46:eb:04:dd:b7:16 -# SHA256 Fingerprint: 66:8c:83:94:7d:a6:3b:72:4b:ec:e1:74:3c:31:a0:e6:ae:d0:db:8e:c5:b3:1b:e3:77:bb:78:4f:91:b6:71:6f ------BEGIN CERTIFICATE----- -MIIFyjCCA7KgAwIBAgIEAJiWjDANBgkqhkiG9w0BAQsFADBaMQswCQYDVQQGEwJO -TDEeMBwGA1UECgwVU3RhYXQgZGVyIE5lZGVybGFuZGVuMSswKQYDVQQDDCJTdGFh -dCBkZXIgTmVkZXJsYW5kZW4gUm9vdCBDQSAtIEcyMB4XDTA4MDMyNjExMTgxN1oX -DTIwMDMyNTExMDMxMFowWjELMAkGA1UEBhMCTkwxHjAcBgNVBAoMFVN0YWF0IGRl -ciBOZWRlcmxhbmRlbjErMCkGA1UEAwwiU3RhYXQgZGVyIE5lZGVybGFuZGVuIFJv -b3QgQ0EgLSBHMjCCAiIwDQYJKoZIhvcNAQEBBQADggIPADCCAgoCggIBAMVZ5291 -qj5LnLW4rJ4L5PnZyqtdj7U5EILXr1HgO+EASGrP2uEGQxGZqhQlEq0i6ABtQ8Sp -uOUfiUtnvWFI7/3S4GCI5bkYYCjDdyutsDeqN95kWSpGV+RLufg3fNU254DBtvPU -Z5uW6M7XxgpT0GtJlvOjCwV3SPcl5XCsMBQgJeN/dVrlSPhOewMHBPqCYYdu8DvE -pMfQ9XQ+pV0aCPKbJdL2rAQmPlU6Yiile7Iwr/g3wtG61jj99O9JMDeZJiFIhQGp -5Rbn3JBV3w/oOM2ZNyFPXfUib2rFEhZgF1XyZWampzCROME4HYYEhLoaJXhena/M -UGDWE4dS7WMfbWV9whUYdMrhfmQpjHLYFhN9C0lK8SgbIHRrxT3dsKpICT0ugpTN -GmXZK4iambwYfp/ufWZ8Pr2UuIHOzZgweMFvZ9C+X+Bo7d7iscksWXiSqt8rYGPy -5V6548r6f1CGPqI0GAwJaCgRHOThuVw+R7oyPxjMW4T182t0xHJ04eOLoEq9jWYv -6q012iDTiIJh8BIitrzQ1aTsr1SIJSQ8p22xcik/Plemf1WvbibG/ufMQFxRRIEK -eN5KzlW/HdXZt1bv8Hb/C3m1r737qWmRRpdogBQ2HbN/uymYNqUg+oJgYjOk7Na6 -B6duxc8UpufWkjTYgfX8HV2qXB72o007uPc5AgMBAAGjgZcwgZQwDwYDVR0TAQH/ -BAUwAwEB/zBSBgNVHSAESzBJMEcGBFUdIAAwPzA9BggrBgEFBQcCARYxaHR0cDov -L3d3dy5wa2lvdmVyaGVpZC5ubC9wb2xpY2llcy9yb290LXBvbGljeS1HMjAOBgNV -HQ8BAf8EBAMCAQYwHQYDVR0OBBYEFJFoMocVHYnitfGsNig0jQt8YojrMA0GCSqG -SIb3DQEBCwUAA4ICAQCoQUpnKpKBglBu4dfYszk78wIVCVBR7y29JHuIhjv5tLyS -CZa59sCrI2AGeYwRTlHSeYAz+51IvuxBQ4EffkdAHOV6CMqqi3WtFMTC6GY8ggen -5ieCWxjmD27ZUD6KQhgpxrRW/FYQoAUXvQwjf/ST7ZwaUb7dRUG/kSS0H4zpX897 -IZmflZ85OkYcbPnNe5yQzSipx6lVu6xiNGI1E0sUOlWDuYaNkqbG9AclVMwWVxJK -gnjIFNkXgiYtXSAfea7+1HAWFpWD2DU5/1JddRwWxRNVz0fMdWVSSt7wsKfkCpYL -+63C4iWEst3kvX5ZbJvw8NjnyvLplzh+ib7M+zkXYT9y2zqR2GUBGR2tUKRXCnxL -vJxxcypFURmFzI79R6d0lR2o0a9OF7FpJsKqeFdbxU2n5Z4FF5TKsl+gSRiNNOkm -bEgeqmiSBeGCc1qb3AdbCG19ndeNIdn8FCCqwkXfP+cAslHkwvgFuXkajDTznlvk -N1trSt8sV4pAWja63XVECDdCcAz+3F4hoKOKwJCcaNpQ5kUQR3i2TtJlycM33+FC -Y7BXN0Ute4qcvwXqZVUz9zkQxSgqIXobisQk+T8VyJoVIPVVYpbtbZNQvOSqeK3Z -ywplh6ZmwcSBo3c6WB4L7oOLnR7SUqTMHW+wmG2UMbX4cQrcufx9MmDm66+KAQ== ------END CERTIFICATE----- - # Issuer: CN=Hongkong Post Root CA 1 O=Hongkong Post # Subject: CN=Hongkong Post Root CA 1 O=Hongkong Post # Label: "Hongkong Post Root CA 1" @@ -1743,105 +994,6 @@ naM8THLCV8Sg1Mw4J87VBp6iSNnpn86CcDaTmjvfliHjWbcM2pE38P1ZWrOZyGls QyYBNWNgVYkDOnXYukrZVP/u3oDYLdE41V4tC5h9Pmzb/CaIxw== -----END CERTIFICATE----- -# Issuer: CN=Chambers of Commerce Root - 2008 O=AC Camerfirma S.A. -# Subject: CN=Chambers of Commerce Root - 2008 O=AC Camerfirma S.A. -# Label: "Chambers of Commerce Root - 2008" -# Serial: 11806822484801597146 -# MD5 Fingerprint: 5e:80:9e:84:5a:0e:65:0b:17:02:f3:55:18:2a:3e:d7 -# SHA1 Fingerprint: 78:6a:74:ac:76:ab:14:7f:9c:6a:30:50:ba:9e:a8:7e:fe:9a:ce:3c -# SHA256 Fingerprint: 06:3e:4a:fa:c4:91:df:d3:32:f3:08:9b:85:42:e9:46:17:d8:93:d7:fe:94:4e:10:a7:93:7e:e2:9d:96:93:c0 ------BEGIN CERTIFICATE----- -MIIHTzCCBTegAwIBAgIJAKPaQn6ksa7aMA0GCSqGSIb3DQEBBQUAMIGuMQswCQYD -VQQGEwJFVTFDMEEGA1UEBxM6TWFkcmlkIChzZWUgY3VycmVudCBhZGRyZXNzIGF0 -IHd3dy5jYW1lcmZpcm1hLmNvbS9hZGRyZXNzKTESMBAGA1UEBRMJQTgyNzQzMjg3 -MRswGQYDVQQKExJBQyBDYW1lcmZpcm1hIFMuQS4xKTAnBgNVBAMTIENoYW1iZXJz -IG9mIENvbW1lcmNlIFJvb3QgLSAyMDA4MB4XDTA4MDgwMTEyMjk1MFoXDTM4MDcz -MTEyMjk1MFowga4xCzAJBgNVBAYTAkVVMUMwQQYDVQQHEzpNYWRyaWQgKHNlZSBj -dXJyZW50IGFkZHJlc3MgYXQgd3d3LmNhbWVyZmlybWEuY29tL2FkZHJlc3MpMRIw -EAYDVQQFEwlBODI3NDMyODcxGzAZBgNVBAoTEkFDIENhbWVyZmlybWEgUy5BLjEp -MCcGA1UEAxMgQ2hhbWJlcnMgb2YgQ29tbWVyY2UgUm9vdCAtIDIwMDgwggIiMA0G -CSqGSIb3DQEBAQUAA4ICDwAwggIKAoICAQCvAMtwNyuAWko6bHiUfaN/Gh/2NdW9 -28sNRHI+JrKQUrpjOyhYb6WzbZSm891kDFX29ufyIiKAXuFixrYp4YFs8r/lfTJq -VKAyGVn+H4vXPWCGhSRv4xGzdz4gljUha7MI2XAuZPeEklPWDrCQiorjh40G072Q -DuKZoRuGDtqaCrsLYVAGUvGef3bsyw/QHg3PmTA9HMRFEFis1tPo1+XqxQEHd9ZR -5gN/ikilTWh1uem8nk4ZcfUyS5xtYBkL+8ydddy/Js2Pk3g5eXNeJQ7KXOt3EgfL -ZEFHcpOrUMPrCXZkNNI5t3YRCQ12RcSprj1qr7V9ZS+UWBDsXHyvfuK2GNnQm05a -Sd+pZgvMPMZ4fKecHePOjlO+Bd5gD2vlGts/4+EhySnB8esHnFIbAURRPHsl18Tl -UlRdJQfKFiC4reRB7noI/plvg6aRArBsNlVq5331lubKgdaX8ZSD6e2wsWsSaR6s -+12pxZjptFtYer49okQ6Y1nUCyXeG0+95QGezdIp1Z8XGQpvvwyQ0wlf2eOKNcx5 -Wk0ZN5K3xMGtr/R5JJqyAQuxr1yW84Ay+1w9mPGgP0revq+ULtlVmhduYJ1jbLhj -ya6BXBg14JC7vjxPNyK5fuvPnnchpj04gftI2jE9K+OJ9dC1vX7gUMQSibMjmhAx -hduub+84Mxh2EQIDAQABo4IBbDCCAWgwEgYDVR0TAQH/BAgwBgEB/wIBDDAdBgNV -HQ4EFgQU+SSsD7K1+HnA+mCIG8TZTQKeFxkwgeMGA1UdIwSB2zCB2IAU+SSsD7K1 -+HnA+mCIG8TZTQKeFxmhgbSkgbEwga4xCzAJBgNVBAYTAkVVMUMwQQYDVQQHEzpN -YWRyaWQgKHNlZSBjdXJyZW50IGFkZHJlc3MgYXQgd3d3LmNhbWVyZmlybWEuY29t -L2FkZHJlc3MpMRIwEAYDVQQFEwlBODI3NDMyODcxGzAZBgNVBAoTEkFDIENhbWVy -ZmlybWEgUy5BLjEpMCcGA1UEAxMgQ2hhbWJlcnMgb2YgQ29tbWVyY2UgUm9vdCAt -IDIwMDiCCQCj2kJ+pLGu2jAOBgNVHQ8BAf8EBAMCAQYwPQYDVR0gBDYwNDAyBgRV -HSAAMCowKAYIKwYBBQUHAgEWHGh0dHA6Ly9wb2xpY3kuY2FtZXJmaXJtYS5jb20w -DQYJKoZIhvcNAQEFBQADggIBAJASryI1wqM58C7e6bXpeHxIvj99RZJe6dqxGfwW -PJ+0W2aeaufDuV2I6A+tzyMP3iU6XsxPpcG1Lawk0lgH3qLPaYRgM+gQDROpI9CF -5Y57pp49chNyM/WqfcZjHwj0/gF/JM8rLFQJ3uIrbZLGOU8W6jx+ekbURWpGqOt1 -glanq6B8aBMz9p0w8G8nOSQjKpD9kCk18pPfNKXG9/jvjA9iSnyu0/VU+I22mlaH -FoI6M6taIgj3grrqLuBHmrS1RaMFO9ncLkVAO+rcf+g769HsJtg1pDDFOqxXnrN2 -pSB7+R5KBWIBpih1YJeSDW4+TTdDDZIVnBgizVGZoCkaPF+KMjNbMMeJL0eYD6MD -xvbxrN8y8NmBGuScvfaAFPDRLLmF9dijscilIeUcE5fuDr3fKanvNFNb0+RqE4QG -tjICxFKuItLcsiFCGtpA8CnJ7AoMXOLQusxI0zcKzBIKinmwPQN/aUv0NCB9szTq -jktk9T79syNnFQ0EuPAtwQlRPLJsFfClI9eDdOTlLsn+mCdCxqvGnrDQWzilm1De -fhiYtUU79nm06PcaewaD+9CL2rvHvRirCG88gGtAPxkZumWK5r7VXNM21+9AUiRg -OGcEMeyP84LG3rlV8zsxkVrctQgVrXYlCg17LofiDKYGvCYQbTed7N14jHyAxfDZ -d0jQ ------END CERTIFICATE----- - -# Issuer: CN=Global Chambersign Root - 2008 O=AC Camerfirma S.A. -# Subject: CN=Global Chambersign Root - 2008 O=AC Camerfirma S.A. -# Label: "Global Chambersign Root - 2008" -# Serial: 14541511773111788494 -# MD5 Fingerprint: 9e:80:ff:78:01:0c:2e:c1:36:bd:fe:96:90:6e:08:f3 -# SHA1 Fingerprint: 4a:bd:ee:ec:95:0d:35:9c:89:ae:c7:52:a1:2c:5b:29:f6:d6:aa:0c -# SHA256 Fingerprint: 13:63:35:43:93:34:a7:69:80:16:a0:d3:24:de:72:28:4e:07:9d:7b:52:20:bb:8f:bd:74:78:16:ee:be:ba:ca ------BEGIN CERTIFICATE----- -MIIHSTCCBTGgAwIBAgIJAMnN0+nVfSPOMA0GCSqGSIb3DQEBBQUAMIGsMQswCQYD -VQQGEwJFVTFDMEEGA1UEBxM6TWFkcmlkIChzZWUgY3VycmVudCBhZGRyZXNzIGF0 -IHd3dy5jYW1lcmZpcm1hLmNvbS9hZGRyZXNzKTESMBAGA1UEBRMJQTgyNzQzMjg3 -MRswGQYDVQQKExJBQyBDYW1lcmZpcm1hIFMuQS4xJzAlBgNVBAMTHkdsb2JhbCBD -aGFtYmVyc2lnbiBSb290IC0gMjAwODAeFw0wODA4MDExMjMxNDBaFw0zODA3MzEx -MjMxNDBaMIGsMQswCQYDVQQGEwJFVTFDMEEGA1UEBxM6TWFkcmlkIChzZWUgY3Vy -cmVudCBhZGRyZXNzIGF0IHd3dy5jYW1lcmZpcm1hLmNvbS9hZGRyZXNzKTESMBAG -A1UEBRMJQTgyNzQzMjg3MRswGQYDVQQKExJBQyBDYW1lcmZpcm1hIFMuQS4xJzAl -BgNVBAMTHkdsb2JhbCBDaGFtYmVyc2lnbiBSb290IC0gMjAwODCCAiIwDQYJKoZI -hvcNAQEBBQADggIPADCCAgoCggIBAMDfVtPkOpt2RbQT2//BthmLN0EYlVJH6xed -KYiONWwGMi5HYvNJBL99RDaxccy9Wglz1dmFRP+RVyXfXjaOcNFccUMd2drvXNL7 -G706tcuto8xEpw2uIRU/uXpbknXYpBI4iRmKt4DS4jJvVpyR1ogQC7N0ZJJ0YPP2 -zxhPYLIj0Mc7zmFLmY/CDNBAspjcDahOo7kKrmCgrUVSY7pmvWjg+b4aqIG7HkF4 -ddPB/gBVsIdU6CeQNR1MM62X/JcumIS/LMmjv9GYERTtY/jKmIhYF5ntRQOXfjyG -HoiMvvKRhI9lNNgATH23MRdaKXoKGCQwoze1eqkBfSbW+Q6OWfH9GzO1KTsXO0G2 -Id3UwD2ln58fQ1DJu7xsepeY7s2MH/ucUa6LcL0nn3HAa6x9kGbo1106DbDVwo3V -yJ2dwW3Q0L9R5OP4wzg2rtandeavhENdk5IMagfeOx2YItaswTXbo6Al/3K1dh3e -beksZixShNBFks4c5eUzHdwHU1SjqoI7mjcv3N2gZOnm3b2u/GSFHTynyQbehP9r -6GsaPMWis0L7iwk+XwhSx2LE1AVxv8Rk5Pihg+g+EpuoHtQ2TS9x9o0o9oOpE9Jh -wZG7SMA0j0GMS0zbaRL/UJScIINZc+18ofLx/d33SdNDWKBWY8o9PeU1VlnpDsog -zCtLkykPAgMBAAGjggFqMIIBZjASBgNVHRMBAf8ECDAGAQH/AgEMMB0GA1UdDgQW -BBS5CcqcHtvTbDprru1U8VuTBjUuXjCB4QYDVR0jBIHZMIHWgBS5CcqcHtvTbDpr -ru1U8VuTBjUuXqGBsqSBrzCBrDELMAkGA1UEBhMCRVUxQzBBBgNVBAcTOk1hZHJp -ZCAoc2VlIGN1cnJlbnQgYWRkcmVzcyBhdCB3d3cuY2FtZXJmaXJtYS5jb20vYWRk -cmVzcykxEjAQBgNVBAUTCUE4Mjc0MzI4NzEbMBkGA1UEChMSQUMgQ2FtZXJmaXJt -YSBTLkEuMScwJQYDVQQDEx5HbG9iYWwgQ2hhbWJlcnNpZ24gUm9vdCAtIDIwMDiC -CQDJzdPp1X0jzjAOBgNVHQ8BAf8EBAMCAQYwPQYDVR0gBDYwNDAyBgRVHSAAMCow -KAYIKwYBBQUHAgEWHGh0dHA6Ly9wb2xpY3kuY2FtZXJmaXJtYS5jb20wDQYJKoZI -hvcNAQEFBQADggIBAICIf3DekijZBZRG/5BXqfEv3xoNa/p8DhxJJHkn2EaqbylZ -UohwEurdPfWbU1Rv4WCiqAm57OtZfMY18dwY6fFn5a+6ReAJ3spED8IXDneRRXoz -X1+WLGiLwUePmJs9wOzL9dWCkoQ10b42OFZyMVtHLaoXpGNR6woBrX/sdZ7LoR/x -fxKxueRkf2fWIyr0uDldmOghp+G9PUIadJpwr2hsUF1Jz//7Dl3mLEfXgTpZALVz -a2Mg9jFFCDkO9HB+QHBaP9BrQql0PSgvAm11cpUJjUhjxsYjV5KTXjXBjfkK9yyd -Yhz2rXzdpjEetrHHfoUm+qRqtdpjMNHvkzeyZi99Bffnt0uYlDXA2TopwZ2yUDMd -SqlapskD7+3056huirRXhOukP9DuqqqHW2Pok+JrqNS4cnhrG+055F3Lm6qH1U9O -AP7Zap88MQ8oAgF9mOinsKJknnn4SPIVqczmyETrP3iZ8ntxPjzxmKfFGBI/5rso -M0LpRQp8bfKGeS/Fghl9CYl8slR2iK7ewfPM4W7bMdaTrpmg7yVqc5iJWzouE4ge -v8CSlDQb4ye3ix5vQv/n6TebUB0tovkC7stYWDpxvGjjqsGvHCgfotwjZT+B6q6Z -09gwzxMNTxXJhLynSC34MCN32EZLeW32jO06f2ARePTpm67VVMB0gNELQp/B ------END CERTIFICATE----- - # Issuer: CN=Go Daddy Root Certificate Authority - G2 O=GoDaddy.com, Inc. # Subject: CN=Go Daddy Root Certificate Authority - G2 O=GoDaddy.com, Inc. # Label: "Go Daddy Root Certificate Authority - G2" @@ -2140,39 +1292,6 @@ t/2jioSgrGK+KwmHNPBqAbubKVY8/gA3zyNs8U6qtnRGEmyR7jTV7JqR50S+kDFy SjnRBUkLp7Y3gaVdjKozXoEofKd9J+sAro03 -----END CERTIFICATE----- -# Issuer: CN=Hellenic Academic and Research Institutions RootCA 2011 O=Hellenic Academic and Research Institutions Cert. Authority -# Subject: CN=Hellenic Academic and Research Institutions RootCA 2011 O=Hellenic Academic and Research Institutions Cert. Authority -# Label: "Hellenic Academic and Research Institutions RootCA 2011" -# Serial: 0 -# MD5 Fingerprint: 73:9f:4c:4b:73:5b:79:e9:fa:ba:1c:ef:6e:cb:d5:c9 -# SHA1 Fingerprint: fe:45:65:9b:79:03:5b:98:a1:61:b5:51:2e:ac:da:58:09:48:22:4d -# SHA256 Fingerprint: bc:10:4f:15:a4:8b:e7:09:dc:a5:42:a7:e1:d4:b9:df:6f:05:45:27:e8:02:ea:a9:2d:59:54:44:25:8a:fe:71 ------BEGIN CERTIFICATE----- -MIIEMTCCAxmgAwIBAgIBADANBgkqhkiG9w0BAQUFADCBlTELMAkGA1UEBhMCR1Ix -RDBCBgNVBAoTO0hlbGxlbmljIEFjYWRlbWljIGFuZCBSZXNlYXJjaCBJbnN0aXR1 -dGlvbnMgQ2VydC4gQXV0aG9yaXR5MUAwPgYDVQQDEzdIZWxsZW5pYyBBY2FkZW1p -YyBhbmQgUmVzZWFyY2ggSW5zdGl0dXRpb25zIFJvb3RDQSAyMDExMB4XDTExMTIw -NjEzNDk1MloXDTMxMTIwMTEzNDk1MlowgZUxCzAJBgNVBAYTAkdSMUQwQgYDVQQK -EztIZWxsZW5pYyBBY2FkZW1pYyBhbmQgUmVzZWFyY2ggSW5zdGl0dXRpb25zIENl -cnQuIEF1dGhvcml0eTFAMD4GA1UEAxM3SGVsbGVuaWMgQWNhZGVtaWMgYW5kIFJl -c2VhcmNoIEluc3RpdHV0aW9ucyBSb290Q0EgMjAxMTCCASIwDQYJKoZIhvcNAQEB -BQADggEPADCCAQoCggEBAKlTAOMupvaO+mDYLZU++CwqVE7NuYRhlFhPjz2L5EPz -dYmNUeTDN9KKiE15HrcS3UN4SoqS5tdI1Q+kOilENbgH9mgdVc04UfCMJDGFr4PJ -fel3r+0ae50X+bOdOFAPplp5kYCvN66m0zH7tSYJnTxa71HFK9+WXesyHgLacEns -bgzImjeN9/E2YEsmLIKe0HjzDQ9jpFEw4fkrJxIH2Oq9GGKYsFk3fb7u8yBRQlqD -75O6aRXxYp2fmTmCobd0LovUxQt7L/DICto9eQqakxylKHJzkUOap9FNhYS5qXSP -FEDH3N6sQWRstBmbAmNtJGSPRLIl6s5ddAxjMlyNh+UCAwEAAaOBiTCBhjAPBgNV -HRMBAf8EBTADAQH/MAsGA1UdDwQEAwIBBjAdBgNVHQ4EFgQUppFC/RNhSiOeCKQp -5dgTBCPuQSUwRwYDVR0eBEAwPqA8MAWCAy5ncjAFggMuZXUwBoIELmVkdTAGggQu -b3JnMAWBAy5ncjAFgQMuZXUwBoEELmVkdTAGgQQub3JnMA0GCSqGSIb3DQEBBQUA -A4IBAQAf73lB4XtuP7KMhjdCSk4cNx6NZrokgclPEg8hwAOXhiVtXdMiKahsog2p -6z0GW5k6x8zDmjR/qw7IThzh+uTczQ2+vyT+bOdrwg3IBp5OjWEopmr95fZi6hg8 -TqBTnbI6nOulnJEWtk2C4AwFSKls9cz4y51JtPACpf1wA+2KIaWuE4ZJwzNzvoc7 -dIsXRSZMFpGD/md9zU1jZ/rzAxKWeAaNsWftjj++n08C9bMJL/NMh98qy5V8Acys -Nnq/onN694/BtZqhFLKPM58N7yLcZnuEvUUXBj08yrl3NI/K6s8/MT7jiOOASSXI -l7WdmplNsDz4SgCbZN2fOUvRJ9e4 ------END CERTIFICATE----- - # Issuer: CN=Actalis Authentication Root CA O=Actalis S.p.A./03358520967 # Subject: CN=Actalis Authentication Root CA O=Actalis S.p.A./03358520967 # Label: "Actalis Authentication Root CA" @@ -2214,35 +1333,6 @@ LysRJyU3eExRarDzzFhdFPFqSBX/wge2sY0PjlxQRrM9vwGYT7JZVEc+NHt4bVaT LnPqZih4zR0Uv6CPLy64Lo7yFIrM6bV8+2ydDKXhlg== -----END CERTIFICATE----- -# Issuer: O=Trustis Limited OU=Trustis FPS Root CA -# Subject: O=Trustis Limited OU=Trustis FPS Root CA -# Label: "Trustis FPS Root CA" -# Serial: 36053640375399034304724988975563710553 -# MD5 Fingerprint: 30:c9:e7:1e:6b:e6:14:eb:65:b2:16:69:20:31:67:4d -# SHA1 Fingerprint: 3b:c0:38:0b:33:c3:f6:a6:0c:86:15:22:93:d9:df:f5:4b:81:c0:04 -# SHA256 Fingerprint: c1:b4:82:99:ab:a5:20:8f:e9:63:0a:ce:55:ca:68:a0:3e:da:5a:51:9c:88:02:a0:d3:a6:73:be:8f:8e:55:7d ------BEGIN CERTIFICATE----- -MIIDZzCCAk+gAwIBAgIQGx+ttiD5JNM2a/fH8YygWTANBgkqhkiG9w0BAQUFADBF -MQswCQYDVQQGEwJHQjEYMBYGA1UEChMPVHJ1c3RpcyBMaW1pdGVkMRwwGgYDVQQL -ExNUcnVzdGlzIEZQUyBSb290IENBMB4XDTAzMTIyMzEyMTQwNloXDTI0MDEyMTEx -MzY1NFowRTELMAkGA1UEBhMCR0IxGDAWBgNVBAoTD1RydXN0aXMgTGltaXRlZDEc -MBoGA1UECxMTVHJ1c3RpcyBGUFMgUm9vdCBDQTCCASIwDQYJKoZIhvcNAQEBBQAD -ggEPADCCAQoCggEBAMVQe547NdDfxIzNjpvto8A2mfRC6qc+gIMPpqdZh8mQRUN+ -AOqGeSoDvT03mYlmt+WKVoaTnGhLaASMk5MCPjDSNzoiYYkchU59j9WvezX2fihH -iTHcDnlkH5nSW7r+f2C/revnPDgpai/lkQtV/+xvWNUtyd5MZnGPDNcE2gfmHhjj -vSkCqPoc4Vu5g6hBSLwacY3nYuUtsuvffM/bq1rKMfFMIvMFE/eC+XN5DL7XSxzA -0RU8k0Fk0ea+IxciAIleH2ulrG6nS4zto3Lmr2NNL4XSFDWaLk6M6jKYKIahkQlB -OrTh4/L68MkKokHdqeMDx4gVOxzUGpTXn2RZEm0CAwEAAaNTMFEwDwYDVR0TAQH/ -BAUwAwEB/zAfBgNVHSMEGDAWgBS6+nEleYtXQSUhhgtx67JkDoshZzAdBgNVHQ4E -FgQUuvpxJXmLV0ElIYYLceuyZA6LIWcwDQYJKoZIhvcNAQEFBQADggEBAH5Y//01 -GX2cGE+esCu8jowU/yyg2kdbw++BLa8F6nRIW/M+TgfHbcWzk88iNVy2P3UnXwmW -zaD+vkAMXBJV+JOCyinpXj9WV4s4NvdFGkwozZ5BuO1WTISkQMi4sKUraXAEasP4 -1BIy+Q7DsdwyhEQsb8tGD+pmQQ9P8Vilpg0ND2HepZ5dfWWhPBfnqFVO76DH7cZE -f1T1o+CP8HxVIo8ptoGj4W1OLBuAZ+ytIJ8MYmHVl/9D7S3B2l0pKoU/rGXuhg8F -jZBf3+6f9L/uHfuY5H+QK4R4EA5sSVPvFVtlRkpdr7r7OnIdzfYliB6XzCGcKQEN -ZetX2fNXlrtIzYE= ------END CERTIFICATE----- - # Issuer: CN=Buypass Class 2 Root CA O=Buypass AS-983163327 # Subject: CN=Buypass Class 2 Root CA O=Buypass AS-983163327 # Label: "Buypass Class 2 Root CA" @@ -2352,38 +1442,6 @@ e9eiPZaGzPImNC1qkp2aGtAw4l1OBLBfiyB+d8E9lYLRRpo7PHi4b6HQDWSieB4p TpPDpFQUWw== -----END CERTIFICATE----- -# Issuer: CN=EE Certification Centre Root CA O=AS Sertifitseerimiskeskus -# Subject: CN=EE Certification Centre Root CA O=AS Sertifitseerimiskeskus -# Label: "EE Certification Centre Root CA" -# Serial: 112324828676200291871926431888494945866 -# MD5 Fingerprint: 43:5e:88:d4:7d:1a:4a:7e:fd:84:2e:52:eb:01:d4:6f -# SHA1 Fingerprint: c9:a8:b9:e7:55:80:5e:58:e3:53:77:a7:25:eb:af:c3:7b:27:cc:d7 -# SHA256 Fingerprint: 3e:84:ba:43:42:90:85:16:e7:75:73:c0:99:2f:09:79:ca:08:4e:46:85:68:1f:f1:95:cc:ba:8a:22:9b:8a:76 ------BEGIN CERTIFICATE----- -MIIEAzCCAuugAwIBAgIQVID5oHPtPwBMyonY43HmSjANBgkqhkiG9w0BAQUFADB1 -MQswCQYDVQQGEwJFRTEiMCAGA1UECgwZQVMgU2VydGlmaXRzZWVyaW1pc2tlc2t1 -czEoMCYGA1UEAwwfRUUgQ2VydGlmaWNhdGlvbiBDZW50cmUgUm9vdCBDQTEYMBYG -CSqGSIb3DQEJARYJcGtpQHNrLmVlMCIYDzIwMTAxMDMwMTAxMDMwWhgPMjAzMDEy -MTcyMzU5NTlaMHUxCzAJBgNVBAYTAkVFMSIwIAYDVQQKDBlBUyBTZXJ0aWZpdHNl -ZXJpbWlza2Vza3VzMSgwJgYDVQQDDB9FRSBDZXJ0aWZpY2F0aW9uIENlbnRyZSBS -b290IENBMRgwFgYJKoZIhvcNAQkBFglwa2lAc2suZWUwggEiMA0GCSqGSIb3DQEB -AQUAA4IBDwAwggEKAoIBAQDIIMDs4MVLqwd4lfNE7vsLDP90jmG7sWLqI9iroWUy -euuOF0+W2Ap7kaJjbMeMTC55v6kF/GlclY1i+blw7cNRfdCT5mzrMEvhvH2/UpvO -bntl8jixwKIy72KyaOBhU8E2lf/slLo2rpwcpzIP5Xy0xm90/XsY6KxX7QYgSzIw -WFv9zajmofxwvI6Sc9uXp3whrj3B9UiHbCe9nyV0gVWw93X2PaRka9ZP585ArQ/d -MtO8ihJTmMmJ+xAdTX7Nfh9WDSFwhfYggx/2uh8Ej+p3iDXE/+pOoYtNP2MbRMNE -1CV2yreN1x5KZmTNXMWcg+HCCIia7E6j8T4cLNlsHaFLAgMBAAGjgYowgYcwDwYD -VR0TAQH/BAUwAwEB/zAOBgNVHQ8BAf8EBAMCAQYwHQYDVR0OBBYEFBLyWj7qVhy/ -zQas8fElyalL1BSZMEUGA1UdJQQ+MDwGCCsGAQUFBwMCBggrBgEFBQcDAQYIKwYB -BQUHAwMGCCsGAQUFBwMEBggrBgEFBQcDCAYIKwYBBQUHAwkwDQYJKoZIhvcNAQEF -BQADggEBAHv25MANqhlHt01Xo/6tu7Fq1Q+e2+RjxY6hUFaTlrg4wCQiZrxTFGGV -v9DHKpY5P30osxBAIWrEr7BSdxjhlthWXePdNl4dp1BUoMUq5KqMlIpPnTX/dqQG -E5Gion0ARD9V04I8GtVbvFZMIi5GQ4okQC3zErg7cBqklrkar4dBGmoYDQZPxz5u -uSlNDUmJEYcyW+ZLBMjkXOZ0c5RdFpgTlf7727FE5TpwrDdr5rMzcijJs1eg9gIW -iAYLtqZLICjU3j2LrTcFU3T+bsy8QxdxXvnFzBqpYe73dgzzcvRyrc9yAjYHR8/v -GVCJYMzpJJUPwssd8m92kMfMdcGWxZ0= ------END CERTIFICATE----- - # Issuer: CN=D-TRUST Root Class 3 CA 2 2009 O=D-Trust GmbH # Subject: CN=D-TRUST Root Class 3 CA 2 2009 O=D-Trust GmbH # Label: "D-TRUST Root Class 3 CA 2 2009" @@ -3093,27 +2151,6 @@ zzuqQhFkoJ2UOQIReVx7Hfpkue4WQrO/isIJxOzksU0CMQDpKmFHjFJKS04YcPbW RNZu9YO6bVi9JNlWSOrvxKJGgYhqOkbRqZtNyWHa0V1Xahg= -----END CERTIFICATE----- -# Issuer: CN=GlobalSign O=GlobalSign OU=GlobalSign ECC Root CA - R4 -# Subject: CN=GlobalSign O=GlobalSign OU=GlobalSign ECC Root CA - R4 -# Label: "GlobalSign ECC Root CA - R4" -# Serial: 14367148294922964480859022125800977897474 -# MD5 Fingerprint: 20:f0:27:68:d1:7e:a0:9d:0e:e6:2a:ca:df:5c:89:8e -# SHA1 Fingerprint: 69:69:56:2e:40:80:f4:24:a1:e7:19:9f:14:ba:f3:ee:58:ab:6a:bb -# SHA256 Fingerprint: be:c9:49:11:c2:95:56:76:db:6c:0a:55:09:86:d7:6e:3b:a0:05:66:7c:44:2c:97:62:b4:fb:b7:73:de:22:8c ------BEGIN CERTIFICATE----- -MIIB4TCCAYegAwIBAgIRKjikHJYKBN5CsiilC+g0mAIwCgYIKoZIzj0EAwIwUDEk -MCIGA1UECxMbR2xvYmFsU2lnbiBFQ0MgUm9vdCBDQSAtIFI0MRMwEQYDVQQKEwpH -bG9iYWxTaWduMRMwEQYDVQQDEwpHbG9iYWxTaWduMB4XDTEyMTExMzAwMDAwMFoX -DTM4MDExOTAzMTQwN1owUDEkMCIGA1UECxMbR2xvYmFsU2lnbiBFQ0MgUm9vdCBD -QSAtIFI0MRMwEQYDVQQKEwpHbG9iYWxTaWduMRMwEQYDVQQDEwpHbG9iYWxTaWdu -MFkwEwYHKoZIzj0CAQYIKoZIzj0DAQcDQgAEuMZ5049sJQ6fLjkZHAOkrprlOQcJ -FspjsbmG+IpXwVfOQvpzofdlQv8ewQCybnMO/8ch5RikqtlxP6jUuc6MHaNCMEAw -DgYDVR0PAQH/BAQDAgEGMA8GA1UdEwEB/wQFMAMBAf8wHQYDVR0OBBYEFFSwe61F -uOJAf/sKbvu+M8k8o4TVMAoGCCqGSM49BAMCA0gAMEUCIQDckqGgE6bPA7DmxCGX -kPoUVy0D7O48027KqGx2vKLeuwIgJ6iFJzWbVsaj8kfSt24bAgAXqmemFZHe+pTs -ewv4n4Q= ------END CERTIFICATE----- - # Issuer: CN=GlobalSign O=GlobalSign OU=GlobalSign ECC Root CA - R5 # Subject: CN=GlobalSign O=GlobalSign OU=GlobalSign ECC Root CA - R5 # Label: "GlobalSign ECC Root CA - R5" @@ -3136,86 +2173,6 @@ KoZIzj0EAwMDaAAwZQIxAOVpEslu28YxuglB4Zf4+/2a4n0Sye18ZNPLBSWLVtmg xwy8p2Fp8fc74SrL+SvzZpA3 -----END CERTIFICATE----- -# Issuer: CN=Staat der Nederlanden Root CA - G3 O=Staat der Nederlanden -# Subject: CN=Staat der Nederlanden Root CA - G3 O=Staat der Nederlanden -# Label: "Staat der Nederlanden Root CA - G3" -# Serial: 10003001 -# MD5 Fingerprint: 0b:46:67:07:db:10:2f:19:8c:35:50:60:d1:0b:f4:37 -# SHA1 Fingerprint: d8:eb:6b:41:51:92:59:e0:f3:e7:85:00:c0:3d:b6:88:97:c9:ee:fc -# SHA256 Fingerprint: 3c:4f:b0:b9:5a:b8:b3:00:32:f4:32:b8:6f:53:5f:e1:72:c1:85:d0:fd:39:86:58:37:cf:36:18:7f:a6:f4:28 ------BEGIN CERTIFICATE----- -MIIFdDCCA1ygAwIBAgIEAJiiOTANBgkqhkiG9w0BAQsFADBaMQswCQYDVQQGEwJO -TDEeMBwGA1UECgwVU3RhYXQgZGVyIE5lZGVybGFuZGVuMSswKQYDVQQDDCJTdGFh -dCBkZXIgTmVkZXJsYW5kZW4gUm9vdCBDQSAtIEczMB4XDTEzMTExNDExMjg0MloX -DTI4MTExMzIzMDAwMFowWjELMAkGA1UEBhMCTkwxHjAcBgNVBAoMFVN0YWF0IGRl -ciBOZWRlcmxhbmRlbjErMCkGA1UEAwwiU3RhYXQgZGVyIE5lZGVybGFuZGVuIFJv -b3QgQ0EgLSBHMzCCAiIwDQYJKoZIhvcNAQEBBQADggIPADCCAgoCggIBAL4yolQP -cPssXFnrbMSkUeiFKrPMSjTysF/zDsccPVMeiAho2G89rcKezIJnByeHaHE6n3WW -IkYFsO2tx1ueKt6c/DrGlaf1F2cY5y9JCAxcz+bMNO14+1Cx3Gsy8KL+tjzk7FqX -xz8ecAgwoNzFs21v0IJyEavSgWhZghe3eJJg+szeP4TrjTgzkApyI/o1zCZxMdFy -KJLZWyNtZrVtB0LrpjPOktvA9mxjeM3KTj215VKb8b475lRgsGYeCasH/lSJEULR -9yS6YHgamPfJEf0WwTUaVHXvQ9Plrk7O53vDxk5hUUurmkVLoR9BvUhTFXFkC4az -5S6+zqQbwSmEorXLCCN2QyIkHxcE1G6cxvx/K2Ya7Irl1s9N9WMJtxU51nus6+N8 -6U78dULI7ViVDAZCopz35HCz33JvWjdAidiFpNfxC95DGdRKWCyMijmev4SH8RY7 -Ngzp07TKbBlBUgmhHbBqv4LvcFEhMtwFdozL92TkA1CvjJFnq8Xy7ljY3r735zHP -bMk7ccHViLVlvMDoFxcHErVc0qsgk7TmgoNwNsXNo42ti+yjwUOH5kPiNL6VizXt -BznaqB16nzaeErAMZRKQFWDZJkBE41ZgpRDUajz9QdwOWke275dhdU/Z/seyHdTt -XUmzqWrLZoQT1Vyg3N9udwbRcXXIV2+vD3dbAgMBAAGjQjBAMA8GA1UdEwEB/wQF -MAMBAf8wDgYDVR0PAQH/BAQDAgEGMB0GA1UdDgQWBBRUrfrHkleuyjWcLhL75Lpd -INyUVzANBgkqhkiG9w0BAQsFAAOCAgEAMJmdBTLIXg47mAE6iqTnB/d6+Oea31BD -U5cqPco8R5gu4RV78ZLzYdqQJRZlwJ9UXQ4DO1t3ApyEtg2YXzTdO2PCwyiBwpwp -LiniyMMB8jPqKqrMCQj3ZWfGzd/TtiunvczRDnBfuCPRy5FOCvTIeuXZYzbB1N/8 -Ipf3YF3qKS9Ysr1YvY2WTxB1v0h7PVGHoTx0IsL8B3+A3MSs/mrBcDCw6Y5p4ixp -gZQJut3+TcCDjJRYwEYgr5wfAvg1VUkvRtTA8KCWAg8zxXHzniN9lLf9OtMJgwYh -/WA9rjLA0u6NpvDntIJ8CsxwyXmA+P5M9zWEGYox+wrZ13+b8KKaa8MFSu1BYBQw -0aoRQm7TIwIEC8Zl3d1Sd9qBa7Ko+gE4uZbqKmxnl4mUnrzhVNXkanjvSr0rmj1A -fsbAddJu+2gw7OyLnflJNZoaLNmzlTnVHpL3prllL+U9bTpITAjc5CgSKL59NVzq -4BZ+Extq1z7XnvwtdbLBFNUjA9tbbws+eC8N3jONFrdI54OagQ97wUNNVQQXOEpR -1VmiiXTTn74eS9fGbbeIJG9gkaSChVtWQbzQRKtqE77RLFi3EjNYsjdj3BP1lB0/ -QFH1T/U67cjF68IeHRaVesd+QnGTbksVtzDfqu1XhUisHWrdOWnk4Xl4vs4Fv6EM -94B7IWcnMFk= ------END CERTIFICATE----- - -# Issuer: CN=Staat der Nederlanden EV Root CA O=Staat der Nederlanden -# Subject: CN=Staat der Nederlanden EV Root CA O=Staat der Nederlanden -# Label: "Staat der Nederlanden EV Root CA" -# Serial: 10000013 -# MD5 Fingerprint: fc:06:af:7b:e8:1a:f1:9a:b4:e8:d2:70:1f:c0:f5:ba -# SHA1 Fingerprint: 76:e2:7e:c1:4f:db:82:c1:c0:a6:75:b5:05:be:3d:29:b4:ed:db:bb -# SHA256 Fingerprint: 4d:24:91:41:4c:fe:95:67:46:ec:4c:ef:a6:cf:6f:72:e2:8a:13:29:43:2f:9d:8a:90:7a:c4:cb:5d:ad:c1:5a ------BEGIN CERTIFICATE----- -MIIFcDCCA1igAwIBAgIEAJiWjTANBgkqhkiG9w0BAQsFADBYMQswCQYDVQQGEwJO -TDEeMBwGA1UECgwVU3RhYXQgZGVyIE5lZGVybGFuZGVuMSkwJwYDVQQDDCBTdGFh -dCBkZXIgTmVkZXJsYW5kZW4gRVYgUm9vdCBDQTAeFw0xMDEyMDgxMTE5MjlaFw0y -MjEyMDgxMTEwMjhaMFgxCzAJBgNVBAYTAk5MMR4wHAYDVQQKDBVTdGFhdCBkZXIg -TmVkZXJsYW5kZW4xKTAnBgNVBAMMIFN0YWF0IGRlciBOZWRlcmxhbmRlbiBFViBS -b290IENBMIICIjANBgkqhkiG9w0BAQEFAAOCAg8AMIICCgKCAgEA48d+ifkkSzrS -M4M1LGns3Amk41GoJSt5uAg94JG6hIXGhaTK5skuU6TJJB79VWZxXSzFYGgEt9nC -UiY4iKTWO0Cmws0/zZiTs1QUWJZV1VD+hq2kY39ch/aO5ieSZxeSAgMs3NZmdO3d -Z//BYY1jTw+bbRcwJu+r0h8QoPnFfxZpgQNH7R5ojXKhTbImxrpsX23Wr9GxE46p -rfNeaXUmGD5BKyF/7otdBwadQ8QpCiv8Kj6GyzyDOvnJDdrFmeK8eEEzduG/L13l -pJhQDBXd4Pqcfzho0LKmeqfRMb1+ilgnQ7O6M5HTp5gVXJrm0w912fxBmJc+qiXb -j5IusHsMX/FjqTf5m3VpTCgmJdrV8hJwRVXj33NeN/UhbJCONVrJ0yPr08C+eKxC -KFhmpUZtcALXEPlLVPxdhkqHz3/KRawRWrUgUY0viEeXOcDPusBCAUCZSCELa6fS -/ZbV0b5GnUngC6agIk440ME8MLxwjyx1zNDFjFE7PZQIZCZhfbnDZY8UnCHQqv0X -cgOPvZuM5l5Tnrmd74K74bzickFbIZTTRTeU0d8JOV3nI6qaHcptqAqGhYqCvkIH -1vI4gnPah1vlPNOePqc7nvQDs/nxfRN0Av+7oeX6AHkcpmZBiFxgV6YuCcS6/ZrP -px9Aw7vMWgpVSzs4dlG4Y4uElBbmVvMCAwEAAaNCMEAwDwYDVR0TAQH/BAUwAwEB -/zAOBgNVHQ8BAf8EBAMCAQYwHQYDVR0OBBYEFP6rAJCYniT8qcwaivsnuL8wbqg7 -MA0GCSqGSIb3DQEBCwUAA4ICAQDPdyxuVr5Os7aEAJSrR8kN0nbHhp8dB9O2tLsI -eK9p0gtJ3jPFrK3CiAJ9Brc1AsFgyb/E6JTe1NOpEyVa/m6irn0F3H3zbPB+po3u -2dfOWBfoqSmuc0iH55vKbimhZF8ZE/euBhD/UcabTVUlT5OZEAFTdfETzsemQUHS -v4ilf0X8rLiltTMMgsT7B/Zq5SWEXwbKwYY5EdtYzXc7LMJMD16a4/CrPmEbUCTC -wPTxGfARKbalGAKb12NMcIxHowNDXLldRqANb/9Zjr7dn3LDWyvfjFvO5QxGbJKy -CqNMVEIYFRIYvdr8unRu/8G2oGTYqV9Vrp9canaW2HNnh/tNf1zuacpzEPuKqf2e -vTY4SUmH9A4U8OmHuD+nT3pajnnUk+S7aFKErGzp85hwVXIy+TSrK0m1zSBi5Dp6 -Z2Orltxtrpfs/J92VoguZs9btsmksNcFuuEnL5O7Jiqik7Ab846+HUCjuTaPPoIa -Gl6I6lD4WeKDRikL40Rc4ZW2aZCaFG+XroHPaO+Zmr615+F/+PoTRxZMzG0IQOeL -eG9QgkRQP2YGiqtDhFZKDyAthg710tvSeopLzaXoTvFeJiUBWSOgftL2fiFX1ye8 -FVdMpEbB4IMeDExNH08GGeL5qPQ6gqGyeUN51q1veieQA6TqJIc/2b3Z6fJfUEkc -7uzXLg== ------END CERTIFICATE----- - # Issuer: CN=IdenTrust Commercial Root CA 1 O=IdenTrust # Subject: CN=IdenTrust Commercial Root CA 1 O=IdenTrust # Label: "IdenTrust Commercial Root CA 1" @@ -3749,47 +2706,6 @@ CkcO8DdZEv8tmZQoTipPNU0zWgIxAOp1AE47xDqUEpHJWEadIRNyp4iciuRMStuW 1KyLa2tJElMzrdfkviT8tQp21KW8EA== -----END CERTIFICATE----- -# Issuer: CN=LuxTrust Global Root 2 O=LuxTrust S.A. -# Subject: CN=LuxTrust Global Root 2 O=LuxTrust S.A. -# Label: "LuxTrust Global Root 2" -# Serial: 59914338225734147123941058376788110305822489521 -# MD5 Fingerprint: b2:e1:09:00:61:af:f7:f1:91:6f:c4:ad:8d:5e:3b:7c -# SHA1 Fingerprint: 1e:0e:56:19:0a:d1:8b:25:98:b2:04:44:ff:66:8a:04:17:99:5f:3f -# SHA256 Fingerprint: 54:45:5f:71:29:c2:0b:14:47:c4:18:f9:97:16:8f:24:c5:8f:c5:02:3b:f5:da:5b:e2:eb:6e:1d:d8:90:2e:d5 ------BEGIN CERTIFICATE----- -MIIFwzCCA6ugAwIBAgIUCn6m30tEntpqJIWe5rgV0xZ/u7EwDQYJKoZIhvcNAQEL -BQAwRjELMAkGA1UEBhMCTFUxFjAUBgNVBAoMDUx1eFRydXN0IFMuQS4xHzAdBgNV -BAMMFkx1eFRydXN0IEdsb2JhbCBSb290IDIwHhcNMTUwMzA1MTMyMTU3WhcNMzUw -MzA1MTMyMTU3WjBGMQswCQYDVQQGEwJMVTEWMBQGA1UECgwNTHV4VHJ1c3QgUy5B -LjEfMB0GA1UEAwwWTHV4VHJ1c3QgR2xvYmFsIFJvb3QgMjCCAiIwDQYJKoZIhvcN -AQEBBQADggIPADCCAgoCggIBANeFl78RmOnwYoNMPIf5U2o3C/IPPIfOb9wmKb3F -ibrJgz337spbxm1Jc7TJRqMbNBM/wYlFV/TZsfs2ZUv7COJIcRHIbjuend+JZTem -hfY7RBi2xjcwYkSSl2l9QjAk5A0MiWtj3sXh306pFGxT4GHO9hcvHTy95iJMHZP1 -EMShduxq3sVs35a0VkBCwGKSMKEtFZSg0iAGCW5qbeXrt77U8PEVfIvmTroTzEsn -Xpk8F12PgX8zPU/TPxvsXD/wPEx1bvKm1Z3aLQdjAsZy6ZS8TEmVT4hSyNvoaYL4 -zDRbIvCGp4m9SAptZoFtyMhk+wHh9OHe2Z7d21vUKpkmFRseTJIpgp7VkoGSQXAZ -96Tlk0u8d2cx3Rz9MXANF5kM+Qw5GSoXtTBxVdUPrljhPS80m8+f9niFwpN6cj5m -j5wWEWCPnolvZ77gR1o7DJpni89Gxq44o/KnvObWhWszJHAiS8sIm7vI+AIpHb4g -DEa/a4ebsypmQjVGbKq6rfmYe+lQVRQxv7HaLe2ArWgk+2mr2HETMOZns4dA/Yl+ -8kPREd8vZS9kzl8UubG/Mb2HeFpZZYiq/FkySIbWTLkpS5XTdvN3JW1CHDiDTf2j -X5t/Lax5Gw5CMZdjpPuKadUiDTSQMC6otOBttpSsvItO13D8xTiOZCXhTTmQzsmH -hFhxAgMBAAGjgagwgaUwDwYDVR0TAQH/BAUwAwEB/zBCBgNVHSAEOzA5MDcGByuB -KwEBAQowLDAqBggrBgEFBQcCARYeaHR0cHM6Ly9yZXBvc2l0b3J5Lmx1eHRydXN0 -Lmx1MA4GA1UdDwEB/wQEAwIBBjAfBgNVHSMEGDAWgBT/GCh2+UgFLKGu8SsbK7JT -+Et8szAdBgNVHQ4EFgQU/xgodvlIBSyhrvErGyuyU/hLfLMwDQYJKoZIhvcNAQEL -BQADggIBAGoZFO1uecEsh9QNcH7X9njJCwROxLHOk3D+sFTAMs2ZMGQXvw/l4jP9 -BzZAcg4atmpZ1gDlaCDdLnINH2pkMSCEfUmmWjfrRcmF9dTHF5kH5ptV5AzoqbTO -jFu1EVzPig4N1qx3gf4ynCSecs5U89BvolbW7MM3LGVYvlcAGvI1+ut7MV3CwRI9 -loGIlonBWVx65n9wNOeD4rHh4bhY79SV5GCc8JaXcozrhAIuZY+kt9J/Z93I055c -qqmkoCUUBpvsT34tC38ddfEz2O3OuHVtPlu5mB0xDVbYQw8wkbIEa91WvpWAVWe+ -2M2D2RjuLg+GLZKecBPs3lHJQ3gCpU3I+V/EkVhGFndadKpAvAefMLmx9xIX3eP/ -JEAdemrRTxgKqpAd60Ae36EeRJIQmvKN4dFLRp7oRUKX6kWZ8+xm1QL68qZKJKre -zrnK+T+Tb/mjuuqlPpmt/f97mfVl7vBZKGfXkJWkE4SphMHozs51k2MavDzq1WQf -LSoSOcbDWjLtR5EWDrw4wVDej8oqkDQc7kGUnF4ZLvhFSZl0kbAEb+MEWrGrKqv+ -x9CWttrhSmQGbmBNvUJO/3jaJMobtNeWOWyu8Q6qp31IiyBMz2TWuJdGsE7RKlY6 -oJO9r4Ak4Ap+58rVyuiFVdw2KuGUaJPHZnJED4AhMmwlxyOAgwrr ------END CERTIFICATE----- - # Issuer: CN=TUBITAK Kamu SM SSL Kok Sertifikasi - Surum 1 O=Turkiye Bilimsel ve Teknolojik Arastirma Kurumu - TUBITAK OU=Kamu Sertifikasyon Merkezi - Kamu SM # Subject: CN=TUBITAK Kamu SM SSL Kok Sertifikasi - Surum 1 O=Turkiye Bilimsel ve Teknolojik Arastirma Kurumu - TUBITAK OU=Kamu Sertifikasyon Merkezi - Kamu SM # Label: "TUBITAK Kamu SM SSL Kok Sertifikasi - Surum 1" @@ -3864,116 +2780,6 @@ T8p+ck0LcIymSLumoRT2+1hEmRSuqguTaaApJUqlyyvdimYHFngVV3Eb7PVHhPOe MTd61X8kreS8/f3MboPoDKi3QWwH3b08hpcv0g== -----END CERTIFICATE----- -# Issuer: CN=TrustCor RootCert CA-1 O=TrustCor Systems S. de R.L. OU=TrustCor Certificate Authority -# Subject: CN=TrustCor RootCert CA-1 O=TrustCor Systems S. de R.L. OU=TrustCor Certificate Authority -# Label: "TrustCor RootCert CA-1" -# Serial: 15752444095811006489 -# MD5 Fingerprint: 6e:85:f1:dc:1a:00:d3:22:d5:b2:b2:ac:6b:37:05:45 -# SHA1 Fingerprint: ff:bd:cd:e7:82:c8:43:5e:3c:6f:26:86:5c:ca:a8:3a:45:5b:c3:0a -# SHA256 Fingerprint: d4:0e:9c:86:cd:8f:e4:68:c1:77:69:59:f4:9e:a7:74:fa:54:86:84:b6:c4:06:f3:90:92:61:f4:dc:e2:57:5c ------BEGIN CERTIFICATE----- -MIIEMDCCAxigAwIBAgIJANqb7HHzA7AZMA0GCSqGSIb3DQEBCwUAMIGkMQswCQYD -VQQGEwJQQTEPMA0GA1UECAwGUGFuYW1hMRQwEgYDVQQHDAtQYW5hbWEgQ2l0eTEk -MCIGA1UECgwbVHJ1c3RDb3IgU3lzdGVtcyBTLiBkZSBSLkwuMScwJQYDVQQLDB5U -cnVzdENvciBDZXJ0aWZpY2F0ZSBBdXRob3JpdHkxHzAdBgNVBAMMFlRydXN0Q29y -IFJvb3RDZXJ0IENBLTEwHhcNMTYwMjA0MTIzMjE2WhcNMjkxMjMxMTcyMzE2WjCB -pDELMAkGA1UEBhMCUEExDzANBgNVBAgMBlBhbmFtYTEUMBIGA1UEBwwLUGFuYW1h -IENpdHkxJDAiBgNVBAoMG1RydXN0Q29yIFN5c3RlbXMgUy4gZGUgUi5MLjEnMCUG -A1UECwweVHJ1c3RDb3IgQ2VydGlmaWNhdGUgQXV0aG9yaXR5MR8wHQYDVQQDDBZU -cnVzdENvciBSb290Q2VydCBDQS0xMIIBIjANBgkqhkiG9w0BAQEFAAOCAQ8AMIIB -CgKCAQEAv463leLCJhJrMxnHQFgKq1mqjQCj/IDHUHuO1CAmujIS2CNUSSUQIpid -RtLByZ5OGy4sDjjzGiVoHKZaBeYei0i/mJZ0PmnK6bV4pQa81QBeCQryJ3pS/C3V -seq0iWEk8xoT26nPUu0MJLq5nux+AHT6k61sKZKuUbS701e/s/OojZz0JEsq1pme -9J7+wH5COucLlVPat2gOkEz7cD+PSiyU8ybdY2mplNgQTsVHCJCZGxdNuWxu72CV -EY4hgLW9oHPY0LJ3xEXqWib7ZnZ2+AYfYW0PVcWDtxBWcgYHpfOxGgMFZA6dWorW -hnAbJN7+KIor0Gqw/Hqi3LJ5DotlDwIDAQABo2MwYTAdBgNVHQ4EFgQU7mtJPHo/ -DeOxCbeKyKsZn3MzUOcwHwYDVR0jBBgwFoAU7mtJPHo/DeOxCbeKyKsZn3MzUOcw -DwYDVR0TAQH/BAUwAwEB/zAOBgNVHQ8BAf8EBAMCAYYwDQYJKoZIhvcNAQELBQAD -ggEBACUY1JGPE+6PHh0RU9otRCkZoB5rMZ5NDp6tPVxBb5UrJKF5mDo4Nvu7Zp5I -/5CQ7z3UuJu0h3U/IJvOcs+hVcFNZKIZBqEHMwwLKeXx6quj7LUKdJDHfXLy11yf -ke+Ri7fc7Waiz45mO7yfOgLgJ90WmMCV1Aqk5IGadZQ1nJBfiDcGrVmVCrDRZ9MZ -yonnMlo2HD6CqFqTvsbQZJG2z9m2GM/bftJlo6bEjhcxwft+dtvTheNYsnd6djts -L1Ac59v2Z3kf9YKVmgenFK+P3CghZwnS1k1aHBkcjndcw5QkPTJrS37UeJSDvjdN -zl/HHk484IkzlQsPpTLWPFp5LBk= ------END CERTIFICATE----- - -# Issuer: CN=TrustCor RootCert CA-2 O=TrustCor Systems S. de R.L. OU=TrustCor Certificate Authority -# Subject: CN=TrustCor RootCert CA-2 O=TrustCor Systems S. de R.L. OU=TrustCor Certificate Authority -# Label: "TrustCor RootCert CA-2" -# Serial: 2711694510199101698 -# MD5 Fingerprint: a2:e1:f8:18:0b:ba:45:d5:c7:41:2a:bb:37:52:45:64 -# SHA1 Fingerprint: b8:be:6d:cb:56:f1:55:b9:63:d4:12:ca:4e:06:34:c7:94:b2:1c:c0 -# SHA256 Fingerprint: 07:53:e9:40:37:8c:1b:d5:e3:83:6e:39:5d:ae:a5:cb:83:9e:50:46:f1:bd:0e:ae:19:51:cf:10:fe:c7:c9:65 ------BEGIN CERTIFICATE----- -MIIGLzCCBBegAwIBAgIIJaHfyjPLWQIwDQYJKoZIhvcNAQELBQAwgaQxCzAJBgNV -BAYTAlBBMQ8wDQYDVQQIDAZQYW5hbWExFDASBgNVBAcMC1BhbmFtYSBDaXR5MSQw -IgYDVQQKDBtUcnVzdENvciBTeXN0ZW1zIFMuIGRlIFIuTC4xJzAlBgNVBAsMHlRy -dXN0Q29yIENlcnRpZmljYXRlIEF1dGhvcml0eTEfMB0GA1UEAwwWVHJ1c3RDb3Ig -Um9vdENlcnQgQ0EtMjAeFw0xNjAyMDQxMjMyMjNaFw0zNDEyMzExNzI2MzlaMIGk -MQswCQYDVQQGEwJQQTEPMA0GA1UECAwGUGFuYW1hMRQwEgYDVQQHDAtQYW5hbWEg -Q2l0eTEkMCIGA1UECgwbVHJ1c3RDb3IgU3lzdGVtcyBTLiBkZSBSLkwuMScwJQYD -VQQLDB5UcnVzdENvciBDZXJ0aWZpY2F0ZSBBdXRob3JpdHkxHzAdBgNVBAMMFlRy -dXN0Q29yIFJvb3RDZXJ0IENBLTIwggIiMA0GCSqGSIb3DQEBAQUAA4ICDwAwggIK -AoICAQCnIG7CKqJiJJWQdsg4foDSq8GbZQWU9MEKENUCrO2fk8eHyLAnK0IMPQo+ -QVqedd2NyuCb7GgypGmSaIwLgQ5WoD4a3SwlFIIvl9NkRvRUqdw6VC0xK5mC8tkq -1+9xALgxpL56JAfDQiDyitSSBBtlVkxs1Pu2YVpHI7TYabS3OtB0PAx1oYxOdqHp -2yqlO/rOsP9+aij9JxzIsekp8VduZLTQwRVtDr4uDkbIXvRR/u8OYzo7cbrPb1nK -DOObXUm4TOJXsZiKQlecdu/vvdFoqNL0Cbt3Nb4lggjEFixEIFapRBF37120Hape -az6LMvYHL1cEksr1/p3C6eizjkxLAjHZ5DxIgif3GIJ2SDpxsROhOdUuxTTCHWKF -3wP+TfSvPd9cW436cOGlfifHhi5qjxLGhF5DUVCcGZt45vz27Ud+ez1m7xMTiF88 -oWP7+ayHNZ/zgp6kPwqcMWmLmaSISo5uZk3vFsQPeSghYA2FFn3XVDjxklb9tTNM -g9zXEJ9L/cb4Qr26fHMC4P99zVvh1Kxhe1fVSntb1IVYJ12/+CtgrKAmrhQhJ8Z3 -mjOAPF5GP/fDsaOGM8boXg25NSyqRsGFAnWAoOsk+xWq5Gd/bnc/9ASKL3x74xdh -8N0JqSDIvgmk0H5Ew7IwSjiqqewYmgeCK9u4nBit2uBGF6zPXQIDAQABo2MwYTAd -BgNVHQ4EFgQU2f4hQG6UnrybPZx9mCAZ5YwwYrIwHwYDVR0jBBgwFoAU2f4hQG6U -nrybPZx9mCAZ5YwwYrIwDwYDVR0TAQH/BAUwAwEB/zAOBgNVHQ8BAf8EBAMCAYYw -DQYJKoZIhvcNAQELBQADggIBAJ5Fngw7tu/hOsh80QA9z+LqBrWyOrsGS2h60COX -dKcs8AjYeVrXWoSK2BKaG9l9XE1wxaX5q+WjiYndAfrs3fnpkpfbsEZC89NiqpX+ -MWcUaViQCqoL7jcjx1BRtPV+nuN79+TMQjItSQzL/0kMmx40/W5ulop5A7Zv2wnL -/V9lFDfhOPXzYRZY5LVtDQsEGz9QLX+zx3oaFoBg+Iof6Rsqxvm6ARppv9JYx1RX -CI/hOWB3S6xZhBqI8d3LT3jX5+EzLfzuQfogsL7L9ziUwOHQhQ+77Sxzq+3+knYa -ZH9bDTMJBzN7Bj8RpFxwPIXAz+OQqIN3+tvmxYxoZxBnpVIt8MSZj3+/0WvitUfW -2dCFmU2Umw9Lje4AWkcdEQOsQRivh7dvDDqPys/cA8GiCcjl/YBeyGBCARsaU1q7 -N6a3vLqE6R5sGtRk2tRD/pOLS/IseRYQ1JMLiI+h2IYURpFHmygk71dSTlxCnKr3 -Sewn6EAes6aJInKc9Q0ztFijMDvd1GpUk74aTfOTlPf8hAs/hCBcNANExdqtvArB -As8e5ZTZ845b2EzwnexhF7sUMlQMAimTHpKG9n/v55IFDlndmQguLvqcAFLTxWYp -5KeXRKQOKIETNcX2b2TmQcTVL8w0RSXPQQCWPUouwpaYT05KnJe32x+SMsj/D1Fu -1uwJ ------END CERTIFICATE----- - -# Issuer: CN=TrustCor ECA-1 O=TrustCor Systems S. de R.L. OU=TrustCor Certificate Authority -# Subject: CN=TrustCor ECA-1 O=TrustCor Systems S. de R.L. OU=TrustCor Certificate Authority -# Label: "TrustCor ECA-1" -# Serial: 9548242946988625984 -# MD5 Fingerprint: 27:92:23:1d:0a:f5:40:7c:e9:e6:6b:9d:d8:f5:e7:6c -# SHA1 Fingerprint: 58:d1:df:95:95:67:6b:63:c0:f0:5b:1c:17:4d:8b:84:0b:c8:78:bd -# SHA256 Fingerprint: 5a:88:5d:b1:9c:01:d9:12:c5:75:93:88:93:8c:af:bb:df:03:1a:b2:d4:8e:91:ee:15:58:9b:42:97:1d:03:9c ------BEGIN CERTIFICATE----- -MIIEIDCCAwigAwIBAgIJAISCLF8cYtBAMA0GCSqGSIb3DQEBCwUAMIGcMQswCQYD -VQQGEwJQQTEPMA0GA1UECAwGUGFuYW1hMRQwEgYDVQQHDAtQYW5hbWEgQ2l0eTEk -MCIGA1UECgwbVHJ1c3RDb3IgU3lzdGVtcyBTLiBkZSBSLkwuMScwJQYDVQQLDB5U -cnVzdENvciBDZXJ0aWZpY2F0ZSBBdXRob3JpdHkxFzAVBgNVBAMMDlRydXN0Q29y -IEVDQS0xMB4XDTE2MDIwNDEyMzIzM1oXDTI5MTIzMTE3MjgwN1owgZwxCzAJBgNV -BAYTAlBBMQ8wDQYDVQQIDAZQYW5hbWExFDASBgNVBAcMC1BhbmFtYSBDaXR5MSQw -IgYDVQQKDBtUcnVzdENvciBTeXN0ZW1zIFMuIGRlIFIuTC4xJzAlBgNVBAsMHlRy -dXN0Q29yIENlcnRpZmljYXRlIEF1dGhvcml0eTEXMBUGA1UEAwwOVHJ1c3RDb3Ig -RUNBLTEwggEiMA0GCSqGSIb3DQEBAQUAA4IBDwAwggEKAoIBAQDPj+ARtZ+odnbb -3w9U73NjKYKtR8aja+3+XzP4Q1HpGjORMRegdMTUpwHmspI+ap3tDvl0mEDTPwOA -BoJA6LHip1GnHYMma6ve+heRK9jGrB6xnhkB1Zem6g23xFUfJ3zSCNV2HykVh0A5 -3ThFEXXQmqc04L/NyFIduUd+Dbi7xgz2c1cWWn5DkR9VOsZtRASqnKmcp0yJF4Ou -owReUoCLHhIlERnXDH19MURB6tuvsBzvgdAsxZohmz3tQjtQJvLsznFhBmIhVE5/ -wZ0+fyCMgMsq2JdiyIMzkX2woloPV+g7zPIlstR8L+xNxqE6FXrntl019fZISjZF -ZtS6mFjBAgMBAAGjYzBhMB0GA1UdDgQWBBREnkj1zG1I1KBLf/5ZJC+Dl5mahjAf -BgNVHSMEGDAWgBREnkj1zG1I1KBLf/5ZJC+Dl5mahjAPBgNVHRMBAf8EBTADAQH/ -MA4GA1UdDwEB/wQEAwIBhjANBgkqhkiG9w0BAQsFAAOCAQEABT41XBVwm8nHc2Fv -civUwo/yQ10CzsSUuZQRg2dd4mdsdXa/uwyqNsatR5Nj3B5+1t4u/ukZMjgDfxT2 -AHMsWbEhBuH7rBiVDKP/mZb3Kyeb1STMHd3BOuCYRLDE5D53sXOpZCz2HAF8P11F -hcCF5yWPldwX8zyfGm6wyuMdKulMY/okYWLW2n62HGz1Ah3UKt1VkOsqEUc8Ll50 -soIipX1TH0XsJ5F95yIW6MBoNtjG8U+ARDL54dHRHareqKucBK+tIA5kmE2la8BI -WJZpTdwHjFGTot+fDz2LYLSCjaoITmJF4PkL0uDgPFveXHEnJcLmA4GLEFPjx1Wi -tJ/X5g== ------END CERTIFICATE----- - # Issuer: CN=SSL.com Root Certification Authority RSA O=SSL Corporation # Subject: CN=SSL.com Root Certification Authority RSA O=SSL Corporation # Label: "SSL.com Root Certification Authority RSA" @@ -4169,126 +2975,6 @@ rYy0UGYwEAYJKwYBBAGCNxUBBAMCAQAwCgYIKoZIzj0EAwMDaAAwZQIwJsdpW9zV Mgj/mkkCtojeFK9dbJlxjRo/i9fgojaGHAeCOnZT/cKi7e97sIBPWA9LUzm9 -----END CERTIFICATE----- -# Issuer: CN=GTS Root R1 O=Google Trust Services LLC -# Subject: CN=GTS Root R1 O=Google Trust Services LLC -# Label: "GTS Root R1" -# Serial: 146587175971765017618439757810265552097 -# MD5 Fingerprint: 82:1a:ef:d4:d2:4a:f2:9f:e2:3d:97:06:14:70:72:85 -# SHA1 Fingerprint: e1:c9:50:e6:ef:22:f8:4c:56:45:72:8b:92:20:60:d7:d5:a7:a3:e8 -# SHA256 Fingerprint: 2a:57:54:71:e3:13:40:bc:21:58:1c:bd:2c:f1:3e:15:84:63:20:3e:ce:94:bc:f9:d3:cc:19:6b:f0:9a:54:72 ------BEGIN CERTIFICATE----- -MIIFWjCCA0KgAwIBAgIQbkepxUtHDA3sM9CJuRz04TANBgkqhkiG9w0BAQwFADBH -MQswCQYDVQQGEwJVUzEiMCAGA1UEChMZR29vZ2xlIFRydXN0IFNlcnZpY2VzIExM -QzEUMBIGA1UEAxMLR1RTIFJvb3QgUjEwHhcNMTYwNjIyMDAwMDAwWhcNMzYwNjIy -MDAwMDAwWjBHMQswCQYDVQQGEwJVUzEiMCAGA1UEChMZR29vZ2xlIFRydXN0IFNl -cnZpY2VzIExMQzEUMBIGA1UEAxMLR1RTIFJvb3QgUjEwggIiMA0GCSqGSIb3DQEB -AQUAA4ICDwAwggIKAoICAQC2EQKLHuOhd5s73L+UPreVp0A8of2C+X0yBoJx9vaM -f/vo27xqLpeXo4xL+Sv2sfnOhB2x+cWX3u+58qPpvBKJXqeqUqv4IyfLpLGcY9vX -mX7wCl7raKb0xlpHDU0QM+NOsROjyBhsS+z8CZDfnWQpJSMHobTSPS5g4M/SCYe7 -zUjwTcLCeoiKu7rPWRnWr4+wB7CeMfGCwcDfLqZtbBkOtdh+JhpFAz2weaSUKK0P -fyblqAj+lug8aJRT7oM6iCsVlgmy4HqMLnXWnOunVmSPlk9orj2XwoSPwLxAwAtc -vfaHszVsrBhQf4TgTM2S0yDpM7xSma8ytSmzJSq0SPly4cpk9+aCEI3oncKKiPo4 -Zor8Y/kB+Xj9e1x3+naH+uzfsQ55lVe0vSbv1gHR6xYKu44LtcXFilWr06zqkUsp -zBmkMiVOKvFlRNACzqrOSbTqn3yDsEB750Orp2yjj32JgfpMpf/VjsPOS+C12LOO -Rc92wO1AK/1TD7Cn1TsNsYqiA94xrcx36m97PtbfkSIS5r762DL8EGMUUXLeXdYW -k70paDPvOmbsB4om3xPXV2V4J95eSRQAogB/mqghtqmxlbCluQ0WEdrHbEg8QOB+ -DVrNVjzRlwW5y0vtOUucxD/SVRNuJLDWcfr0wbrM7Rv1/oFB2ACYPTrIrnqYNxgF -lQIDAQABo0IwQDAOBgNVHQ8BAf8EBAMCAQYwDwYDVR0TAQH/BAUwAwEB/zAdBgNV -HQ4EFgQU5K8rJnEaK0gnhS9SZizv8IkTcT4wDQYJKoZIhvcNAQEMBQADggIBADiW -Cu49tJYeX++dnAsznyvgyv3SjgofQXSlfKqE1OXyHuY3UjKcC9FhHb8owbZEKTV1 -d5iyfNm9dKyKaOOpMQkpAWBz40d8U6iQSifvS9efk+eCNs6aaAyC58/UEBZvXw6Z -XPYfcX3v73svfuo21pdwCxXu11xWajOl40k4DLh9+42FpLFZXvRq4d2h9mREruZR -gyFmxhE+885H7pwoHyXa/6xmld01D1zvICxi/ZG6qcz8WpyTgYMpl0p8WnK0OdC3 -d8t5/Wk6kjftbjhlRn7pYL15iJdfOBL07q9bgsiG1eGZbYwE8na6SfZu6W0eX6Dv -J4J2QPim01hcDyxC2kLGe4g0x8HYRZvBPsVhHdljUEn2NIVq4BjFbkerQUIpm/Zg -DdIx02OYI5NaAIFItO/Nis3Jz5nu2Z6qNuFoS3FJFDYoOj0dzpqPJeaAcWErtXvM -+SUWgeExX6GjfhaknBZqlxi9dnKlC54dNuYvoS++cJEPqOba+MSSQGwlfnuzCdyy -F62ARPBopY+Udf90WuioAnwMCeKpSwughQtiue+hMZL77/ZRBIls6Kl0obsXs7X9 -SQ98POyDGCBDTtWTurQ0sR8WNh8M5mQ5Fkzc4P4dyKliPUDqysU0ArSuiYgzNdws -E3PYJ/HQcu51OyLemGhmW/HGY0dVHLqlCFF1pkgl ------END CERTIFICATE----- - -# Issuer: CN=GTS Root R2 O=Google Trust Services LLC -# Subject: CN=GTS Root R2 O=Google Trust Services LLC -# Label: "GTS Root R2" -# Serial: 146587176055767053814479386953112547951 -# MD5 Fingerprint: 44:ed:9a:0e:a4:09:3b:00:f2:ae:4c:a3:c6:61:b0:8b -# SHA1 Fingerprint: d2:73:96:2a:2a:5e:39:9f:73:3f:e1:c7:1e:64:3f:03:38:34:fc:4d -# SHA256 Fingerprint: c4:5d:7b:b0:8e:6d:67:e6:2e:42:35:11:0b:56:4e:5f:78:fd:92:ef:05:8c:84:0a:ea:4e:64:55:d7:58:5c:60 ------BEGIN CERTIFICATE----- -MIIFWjCCA0KgAwIBAgIQbkepxlqz5yDFMJo/aFLybzANBgkqhkiG9w0BAQwFADBH -MQswCQYDVQQGEwJVUzEiMCAGA1UEChMZR29vZ2xlIFRydXN0IFNlcnZpY2VzIExM -QzEUMBIGA1UEAxMLR1RTIFJvb3QgUjIwHhcNMTYwNjIyMDAwMDAwWhcNMzYwNjIy -MDAwMDAwWjBHMQswCQYDVQQGEwJVUzEiMCAGA1UEChMZR29vZ2xlIFRydXN0IFNl -cnZpY2VzIExMQzEUMBIGA1UEAxMLR1RTIFJvb3QgUjIwggIiMA0GCSqGSIb3DQEB -AQUAA4ICDwAwggIKAoICAQDO3v2m++zsFDQ8BwZabFn3GTXd98GdVarTzTukk3Lv -CvptnfbwhYBboUhSnznFt+4orO/LdmgUud+tAWyZH8QiHZ/+cnfgLFuv5AS/T3Kg -GjSY6Dlo7JUle3ah5mm5hRm9iYz+re026nO8/4Piy33B0s5Ks40FnotJk9/BW9Bu -XvAuMC6C/Pq8tBcKSOWIm8Wba96wyrQD8Nr0kLhlZPdcTK3ofmZemde4wj7I0BOd -re7kRXuJVfeKH2JShBKzwkCX44ofR5GmdFrS+LFjKBC4swm4VndAoiaYecb+3yXu -PuWgf9RhD1FLPD+M2uFwdNjCaKH5wQzpoeJ/u1U8dgbuak7MkogwTZq9TwtImoS1 -mKPV+3PBV2HdKFZ1E66HjucMUQkQdYhMvI35ezzUIkgfKtzra7tEscszcTJGr61K -8YzodDqs5xoic4DSMPclQsciOzsSrZYuxsN2B6ogtzVJV+mSSeh2FnIxZyuWfoqj -x5RWIr9qS34BIbIjMt/kmkRtWVtd9QCgHJvGeJeNkP+byKq0rxFROV7Z+2et1VsR -nTKaG73VululycslaVNVJ1zgyjbLiGH7HrfQy+4W+9OmTN6SpdTi3/UGVN4unUu0 -kzCqgc7dGtxRcw1PcOnlthYhGXmy5okLdWTK1au8CcEYof/UVKGFPP0UJAOyh9Ok -twIDAQABo0IwQDAOBgNVHQ8BAf8EBAMCAQYwDwYDVR0TAQH/BAUwAwEB/zAdBgNV -HQ4EFgQUu//KjiOfT5nK2+JopqUVJxce2Q4wDQYJKoZIhvcNAQEMBQADggIBALZp -8KZ3/p7uC4Gt4cCpx/k1HUCCq+YEtN/L9x0Pg/B+E02NjO7jMyLDOfxA325BS0JT -vhaI8dI4XsRomRyYUpOM52jtG2pzegVATX9lO9ZY8c6DR2Dj/5epnGB3GFW1fgiT -z9D2PGcDFWEJ+YF59exTpJ/JjwGLc8R3dtyDovUMSRqodt6Sm2T4syzFJ9MHwAiA -pJiS4wGWAqoC7o87xdFtCjMwc3i5T1QWvwsHoaRc5svJXISPD+AVdyx+Jn7axEvb -pxZ3B7DNdehyQtaVhJ2Gg/LkkM0JR9SLA3DaWsYDQvTtN6LwG1BUSw7YhN4ZKJmB -R64JGz9I0cNv4rBgF/XuIwKl2gBbbZCr7qLpGzvpx0QnRY5rn/WkhLx3+WuXrD5R -RaIRpsyF7gpo8j5QOHokYh4XIDdtak23CZvJ/KRY9bb7nE4Yu5UC56GtmwfuNmsk -0jmGwZODUNKBRqhfYlcsu2xkiAhu7xNUX90txGdj08+JN7+dIPT7eoOboB6BAFDC -5AwiWVIQ7UNWhwD4FFKnHYuTjKJNRn8nxnGbJN7k2oaLDX5rIMHAnuFl2GqjpuiF -izoHCBy69Y9Vmhh1fuXsgWbRIXOhNUQLgD1bnF5vKheW0YMjiGZt5obicDIvUiLn -yOd/xCxgXS/Dr55FBcOEArf9LAhST4Ldo/DUhgkC ------END CERTIFICATE----- - -# Issuer: CN=GTS Root R3 O=Google Trust Services LLC -# Subject: CN=GTS Root R3 O=Google Trust Services LLC -# Label: "GTS Root R3" -# Serial: 146587176140553309517047991083707763997 -# MD5 Fingerprint: 1a:79:5b:6b:04:52:9c:5d:c7:74:33:1b:25:9a:f9:25 -# SHA1 Fingerprint: 30:d4:24:6f:07:ff:db:91:89:8a:0b:e9:49:66:11:eb:8c:5e:46:e5 -# SHA256 Fingerprint: 15:d5:b8:77:46:19:ea:7d:54:ce:1c:a6:d0:b0:c4:03:e0:37:a9:17:f1:31:e8:a0:4e:1e:6b:7a:71:ba:bc:e5 ------BEGIN CERTIFICATE----- -MIICDDCCAZGgAwIBAgIQbkepx2ypcyRAiQ8DVd2NHTAKBggqhkjOPQQDAzBHMQsw -CQYDVQQGEwJVUzEiMCAGA1UEChMZR29vZ2xlIFRydXN0IFNlcnZpY2VzIExMQzEU -MBIGA1UEAxMLR1RTIFJvb3QgUjMwHhcNMTYwNjIyMDAwMDAwWhcNMzYwNjIyMDAw -MDAwWjBHMQswCQYDVQQGEwJVUzEiMCAGA1UEChMZR29vZ2xlIFRydXN0IFNlcnZp -Y2VzIExMQzEUMBIGA1UEAxMLR1RTIFJvb3QgUjMwdjAQBgcqhkjOPQIBBgUrgQQA -IgNiAAQfTzOHMymKoYTey8chWEGJ6ladK0uFxh1MJ7x/JlFyb+Kf1qPKzEUURout -736GjOyxfi//qXGdGIRFBEFVbivqJn+7kAHjSxm65FSWRQmx1WyRRK2EE46ajA2A -DDL24CejQjBAMA4GA1UdDwEB/wQEAwIBBjAPBgNVHRMBAf8EBTADAQH/MB0GA1Ud -DgQWBBTB8Sa6oC2uhYHP0/EqEr24Cmf9vDAKBggqhkjOPQQDAwNpADBmAjEAgFuk -fCPAlaUs3L6JbyO5o91lAFJekazInXJ0glMLfalAvWhgxeG4VDvBNhcl2MG9AjEA -njWSdIUlUfUk7GRSJFClH9voy8l27OyCbvWFGFPouOOaKaqW04MjyaR7YbPMAuhd ------END CERTIFICATE----- - -# Issuer: CN=GTS Root R4 O=Google Trust Services LLC -# Subject: CN=GTS Root R4 O=Google Trust Services LLC -# Label: "GTS Root R4" -# Serial: 146587176229350439916519468929765261721 -# MD5 Fingerprint: 5d:b6:6a:c4:60:17:24:6a:1a:99:a8:4b:ee:5e:b4:26 -# SHA1 Fingerprint: 2a:1d:60:27:d9:4a:b1:0a:1c:4d:91:5c:cd:33:a0:cb:3e:2d:54:cb -# SHA256 Fingerprint: 71:cc:a5:39:1f:9e:79:4b:04:80:25:30:b3:63:e1:21:da:8a:30:43:bb:26:66:2f:ea:4d:ca:7f:c9:51:a4:bd ------BEGIN CERTIFICATE----- -MIICCjCCAZGgAwIBAgIQbkepyIuUtui7OyrYorLBmTAKBggqhkjOPQQDAzBHMQsw -CQYDVQQGEwJVUzEiMCAGA1UEChMZR29vZ2xlIFRydXN0IFNlcnZpY2VzIExMQzEU -MBIGA1UEAxMLR1RTIFJvb3QgUjQwHhcNMTYwNjIyMDAwMDAwWhcNMzYwNjIyMDAw -MDAwWjBHMQswCQYDVQQGEwJVUzEiMCAGA1UEChMZR29vZ2xlIFRydXN0IFNlcnZp -Y2VzIExMQzEUMBIGA1UEAxMLR1RTIFJvb3QgUjQwdjAQBgcqhkjOPQIBBgUrgQQA -IgNiAATzdHOnaItgrkO4NcWBMHtLSZ37wWHO5t5GvWvVYRg1rkDdc/eJkTBa6zzu -hXyiQHY7qca4R9gq55KRanPpsXI5nymfopjTX15YhmUPoYRlBtHci8nHc8iMai/l -xKvRHYqjQjBAMA4GA1UdDwEB/wQEAwIBBjAPBgNVHRMBAf8EBTADAQH/MB0GA1Ud -DgQWBBSATNbrdP9JNqPV2Py1PsVq8JQdjDAKBggqhkjOPQQDAwNnADBkAjBqUFJ0 -CMRw3J5QdCHojXohw0+WbhXRIjVhLfoIN+4Zba3bssx9BzT1YBkstTTZbyACMANx -sbqjYAuG7ZoIapVon+Kz4ZNkfF6Tpt95LY2F45TPI11xzPKwTdb+mciUqXWi4w== ------END CERTIFICATE----- - # Issuer: CN=UCA Global G2 Root O=UniTrust # Subject: CN=UCA Global G2 Root O=UniTrust # Label: "UCA Global G2 Root" @@ -4600,3 +3286,1242 @@ IQ6SwJAfzyBfyjs4x7dtOvPmRLgOMWuIjnDrnBdSqEGULoe256YSxXXfW8AKbnuk 5F6G+TaU33fD6Q3AOfF5u0aOq0NZJ7cguyPpVkAh7DE9ZapD8j3fcEThuk0mEDuY n/PIjhs4ViFqUZPTkcpG2om3PVODLAgfi49T3f+sHw== -----END CERTIFICATE----- + +# Issuer: CN=Microsoft ECC Root Certificate Authority 2017 O=Microsoft Corporation +# Subject: CN=Microsoft ECC Root Certificate Authority 2017 O=Microsoft Corporation +# Label: "Microsoft ECC Root Certificate Authority 2017" +# Serial: 136839042543790627607696632466672567020 +# MD5 Fingerprint: dd:a1:03:e6:4a:93:10:d1:bf:f0:19:42:cb:fe:ed:67 +# SHA1 Fingerprint: 99:9a:64:c3:7f:f4:7d:9f:ab:95:f1:47:69:89:14:60:ee:c4:c3:c5 +# SHA256 Fingerprint: 35:8d:f3:9d:76:4a:f9:e1:b7:66:e9:c9:72:df:35:2e:e1:5c:fa:c2:27:af:6a:d1:d7:0e:8e:4a:6e:dc:ba:02 +-----BEGIN CERTIFICATE----- +MIICWTCCAd+gAwIBAgIQZvI9r4fei7FK6gxXMQHC7DAKBggqhkjOPQQDAzBlMQsw +CQYDVQQGEwJVUzEeMBwGA1UEChMVTWljcm9zb2Z0IENvcnBvcmF0aW9uMTYwNAYD +VQQDEy1NaWNyb3NvZnQgRUNDIFJvb3QgQ2VydGlmaWNhdGUgQXV0aG9yaXR5IDIw +MTcwHhcNMTkxMjE4MjMwNjQ1WhcNNDIwNzE4MjMxNjA0WjBlMQswCQYDVQQGEwJV +UzEeMBwGA1UEChMVTWljcm9zb2Z0IENvcnBvcmF0aW9uMTYwNAYDVQQDEy1NaWNy +b3NvZnQgRUNDIFJvb3QgQ2VydGlmaWNhdGUgQXV0aG9yaXR5IDIwMTcwdjAQBgcq +hkjOPQIBBgUrgQQAIgNiAATUvD0CQnVBEyPNgASGAlEvaqiBYgtlzPbKnR5vSmZR +ogPZnZH6thaxjG7efM3beaYvzrvOcS/lpaso7GMEZpn4+vKTEAXhgShC48Zo9OYb +hGBKia/teQ87zvH2RPUBeMCjVDBSMA4GA1UdDwEB/wQEAwIBhjAPBgNVHRMBAf8E +BTADAQH/MB0GA1UdDgQWBBTIy5lycFIM+Oa+sgRXKSrPQhDtNTAQBgkrBgEEAYI3 +FQEEAwIBADAKBggqhkjOPQQDAwNoADBlAjBY8k3qDPlfXu5gKcs68tvWMoQZP3zV +L8KxzJOuULsJMsbG7X7JNpQS5GiFBqIb0C8CMQCZ6Ra0DvpWSNSkMBaReNtUjGUB +iudQZsIxtzm6uBoiB078a1QWIP8rtedMDE2mT3M= +-----END CERTIFICATE----- + +# Issuer: CN=Microsoft RSA Root Certificate Authority 2017 O=Microsoft Corporation +# Subject: CN=Microsoft RSA Root Certificate Authority 2017 O=Microsoft Corporation +# Label: "Microsoft RSA Root Certificate Authority 2017" +# Serial: 40975477897264996090493496164228220339 +# MD5 Fingerprint: 10:ff:00:ff:cf:c9:f8:c7:7a:c0:ee:35:8e:c9:0f:47 +# SHA1 Fingerprint: 73:a5:e6:4a:3b:ff:83:16:ff:0e:dc:cc:61:8a:90:6e:4e:ae:4d:74 +# SHA256 Fingerprint: c7:41:f7:0f:4b:2a:8d:88:bf:2e:71:c1:41:22:ef:53:ef:10:eb:a0:cf:a5:e6:4c:fa:20:f4:18:85:30:73:e0 +-----BEGIN CERTIFICATE----- +MIIFqDCCA5CgAwIBAgIQHtOXCV/YtLNHcB6qvn9FszANBgkqhkiG9w0BAQwFADBl +MQswCQYDVQQGEwJVUzEeMBwGA1UEChMVTWljcm9zb2Z0IENvcnBvcmF0aW9uMTYw +NAYDVQQDEy1NaWNyb3NvZnQgUlNBIFJvb3QgQ2VydGlmaWNhdGUgQXV0aG9yaXR5 +IDIwMTcwHhcNMTkxMjE4MjI1MTIyWhcNNDIwNzE4MjMwMDIzWjBlMQswCQYDVQQG +EwJVUzEeMBwGA1UEChMVTWljcm9zb2Z0IENvcnBvcmF0aW9uMTYwNAYDVQQDEy1N +aWNyb3NvZnQgUlNBIFJvb3QgQ2VydGlmaWNhdGUgQXV0aG9yaXR5IDIwMTcwggIi +MA0GCSqGSIb3DQEBAQUAA4ICDwAwggIKAoICAQDKW76UM4wplZEWCpW9R2LBifOZ +Nt9GkMml7Xhqb0eRaPgnZ1AzHaGm++DlQ6OEAlcBXZxIQIJTELy/xztokLaCLeX0 +ZdDMbRnMlfl7rEqUrQ7eS0MdhweSE5CAg2Q1OQT85elss7YfUJQ4ZVBcF0a5toW1 +HLUX6NZFndiyJrDKxHBKrmCk3bPZ7Pw71VdyvD/IybLeS2v4I2wDwAW9lcfNcztm +gGTjGqwu+UcF8ga2m3P1eDNbx6H7JyqhtJqRjJHTOoI+dkC0zVJhUXAoP8XFWvLJ +jEm7FFtNyP9nTUwSlq31/niol4fX/V4ggNyhSyL71Imtus5Hl0dVe49FyGcohJUc +aDDv70ngNXtk55iwlNpNhTs+VcQor1fznhPbRiefHqJeRIOkpcrVE7NLP8TjwuaG +YaRSMLl6IE9vDzhTyzMMEyuP1pq9KsgtsRx9S1HKR9FIJ3Jdh+vVReZIZZ2vUpC6 +W6IYZVcSn2i51BVrlMRpIpj0M+Dt+VGOQVDJNE92kKz8OMHY4Xu54+OU4UZpyw4K +UGsTuqwPN1q3ErWQgR5WrlcihtnJ0tHXUeOrO8ZV/R4O03QK0dqq6mm4lyiPSMQH ++FJDOvTKVTUssKZqwJz58oHhEmrARdlns87/I6KJClTUFLkqqNfs+avNJVgyeY+Q +W5g5xAgGwax/Dj0ApQIDAQABo1QwUjAOBgNVHQ8BAf8EBAMCAYYwDwYDVR0TAQH/ +BAUwAwEB/zAdBgNVHQ4EFgQUCctZf4aycI8awznjwNnpv7tNsiMwEAYJKwYBBAGC +NxUBBAMCAQAwDQYJKoZIhvcNAQEMBQADggIBAKyvPl3CEZaJjqPnktaXFbgToqZC +LgLNFgVZJ8og6Lq46BrsTaiXVq5lQ7GPAJtSzVXNUzltYkyLDVt8LkS/gxCP81OC +gMNPOsduET/m4xaRhPtthH80dK2Jp86519efhGSSvpWhrQlTM93uCupKUY5vVau6 +tZRGrox/2KJQJWVggEbbMwSubLWYdFQl3JPk+ONVFT24bcMKpBLBaYVu32TxU5nh +SnUgnZUP5NbcA/FZGOhHibJXWpS2qdgXKxdJ5XbLwVaZOjex/2kskZGT4d9Mozd2 +TaGf+G0eHdP67Pv0RR0Tbc/3WeUiJ3IrhvNXuzDtJE3cfVa7o7P4NHmJweDyAmH3 +pvwPuxwXC65B2Xy9J6P9LjrRk5Sxcx0ki69bIImtt2dmefU6xqaWM/5TkshGsRGR +xpl/j8nWZjEgQRCHLQzWwa80mMpkg/sTV9HB8Dx6jKXB/ZUhoHHBk2dxEuqPiApp +GWSZI1b7rCoucL5mxAyE7+WL85MB+GqQk2dLsmijtWKP6T+MejteD+eMuMZ87zf9 +dOLITzNy4ZQ5bb0Sr74MTnB8G2+NszKTc0QWbej09+CVgI+WXTik9KveCjCHk9hN +AHFiRSdLOkKEW39lt2c0Ui2cFmuqqNh7o0JMcccMyj6D5KbvtwEwXlGjefVwaaZB +RA+GsCyRxj3qrg+E +-----END CERTIFICATE----- + +# Issuer: CN=e-Szigno Root CA 2017 O=Microsec Ltd. +# Subject: CN=e-Szigno Root CA 2017 O=Microsec Ltd. +# Label: "e-Szigno Root CA 2017" +# Serial: 411379200276854331539784714 +# MD5 Fingerprint: de:1f:f6:9e:84:ae:a7:b4:21:ce:1e:58:7d:d1:84:98 +# SHA1 Fingerprint: 89:d4:83:03:4f:9e:9a:48:80:5f:72:37:d4:a9:a6:ef:cb:7c:1f:d1 +# SHA256 Fingerprint: be:b0:0b:30:83:9b:9b:c3:2c:32:e4:44:79:05:95:06:41:f2:64:21:b1:5e:d0:89:19:8b:51:8a:e2:ea:1b:99 +-----BEGIN CERTIFICATE----- +MIICQDCCAeWgAwIBAgIMAVRI7yH9l1kN9QQKMAoGCCqGSM49BAMCMHExCzAJBgNV +BAYTAkhVMREwDwYDVQQHDAhCdWRhcGVzdDEWMBQGA1UECgwNTWljcm9zZWMgTHRk +LjEXMBUGA1UEYQwOVkFUSFUtMjM1ODQ0OTcxHjAcBgNVBAMMFWUtU3ppZ25vIFJv +b3QgQ0EgMjAxNzAeFw0xNzA4MjIxMjA3MDZaFw00MjA4MjIxMjA3MDZaMHExCzAJ +BgNVBAYTAkhVMREwDwYDVQQHDAhCdWRhcGVzdDEWMBQGA1UECgwNTWljcm9zZWMg +THRkLjEXMBUGA1UEYQwOVkFUSFUtMjM1ODQ0OTcxHjAcBgNVBAMMFWUtU3ppZ25v +IFJvb3QgQ0EgMjAxNzBZMBMGByqGSM49AgEGCCqGSM49AwEHA0IABJbcPYrYsHtv +xie+RJCxs1YVe45DJH0ahFnuY2iyxl6H0BVIHqiQrb1TotreOpCmYF9oMrWGQd+H +Wyx7xf58etqjYzBhMA8GA1UdEwEB/wQFMAMBAf8wDgYDVR0PAQH/BAQDAgEGMB0G +A1UdDgQWBBSHERUI0arBeAyxr87GyZDvvzAEwDAfBgNVHSMEGDAWgBSHERUI0arB +eAyxr87GyZDvvzAEwDAKBggqhkjOPQQDAgNJADBGAiEAtVfd14pVCzbhhkT61Nlo +jbjcI4qKDdQvfepz7L9NbKgCIQDLpbQS+ue16M9+k/zzNY9vTlp8tLxOsvxyqltZ ++efcMQ== +-----END CERTIFICATE----- + +# Issuer: O=CERTSIGN SA OU=certSIGN ROOT CA G2 +# Subject: O=CERTSIGN SA OU=certSIGN ROOT CA G2 +# Label: "certSIGN Root CA G2" +# Serial: 313609486401300475190 +# MD5 Fingerprint: 8c:f1:75:8a:c6:19:cf:94:b7:f7:65:20:87:c3:97:c7 +# SHA1 Fingerprint: 26:f9:93:b4:ed:3d:28:27:b0:b9:4b:a7:e9:15:1d:a3:8d:92:e5:32 +# SHA256 Fingerprint: 65:7c:fe:2f:a7:3f:aa:38:46:25:71:f3:32:a2:36:3a:46:fc:e7:02:09:51:71:07:02:cd:fb:b6:ee:da:33:05 +-----BEGIN CERTIFICATE----- +MIIFRzCCAy+gAwIBAgIJEQA0tk7GNi02MA0GCSqGSIb3DQEBCwUAMEExCzAJBgNV +BAYTAlJPMRQwEgYDVQQKEwtDRVJUU0lHTiBTQTEcMBoGA1UECxMTY2VydFNJR04g +Uk9PVCBDQSBHMjAeFw0xNzAyMDYwOTI3MzVaFw00MjAyMDYwOTI3MzVaMEExCzAJ +BgNVBAYTAlJPMRQwEgYDVQQKEwtDRVJUU0lHTiBTQTEcMBoGA1UECxMTY2VydFNJ +R04gUk9PVCBDQSBHMjCCAiIwDQYJKoZIhvcNAQEBBQADggIPADCCAgoCggIBAMDF +dRmRfUR0dIf+DjuW3NgBFszuY5HnC2/OOwppGnzC46+CjobXXo9X69MhWf05N0Iw +vlDqtg+piNguLWkh59E3GE59kdUWX2tbAMI5Qw02hVK5U2UPHULlj88F0+7cDBrZ +uIt4ImfkabBoxTzkbFpG583H+u/E7Eu9aqSs/cwoUe+StCmrqzWaTOTECMYmzPhp +n+Sc8CnTXPnGFiWeI8MgwT0PPzhAsP6CRDiqWhqKa2NYOLQV07YRaXseVO6MGiKs +cpc/I1mbySKEwQdPzH/iV8oScLumZfNpdWO9lfsbl83kqK/20U6o2YpxJM02PbyW +xPFsqa7lzw1uKA2wDrXKUXt4FMMgL3/7FFXhEZn91QqhngLjYl/rNUssuHLoPj1P +rCy7Lobio3aP5ZMqz6WryFyNSwb/EkaseMsUBzXgqd+L6a8VTxaJW732jcZZroiF +DsGJ6x9nxUWO/203Nit4ZoORUSs9/1F3dmKh7Gc+PoGD4FapUB8fepmrY7+EF3fx +DTvf95xhszWYijqy7DwaNz9+j5LP2RIUZNoQAhVB/0/E6xyjyfqZ90bp4RjZsbgy +LcsUDFDYg2WD7rlcz8sFWkz6GZdr1l0T08JcVLwyc6B49fFtHsufpaafItzRUZ6C +eWRgKRM+o/1Pcmqr4tTluCRVLERLiohEnMqE0yo7AgMBAAGjQjBAMA8GA1UdEwEB +/wQFMAMBAf8wDgYDVR0PAQH/BAQDAgEGMB0GA1UdDgQWBBSCIS1mxteg4BXrzkwJ +d8RgnlRuAzANBgkqhkiG9w0BAQsFAAOCAgEAYN4auOfyYILVAzOBywaK8SJJ6ejq +kX/GM15oGQOGO0MBzwdw5AgeZYWR5hEit/UCI46uuR59H35s5r0l1ZUa8gWmr4UC +b6741jH/JclKyMeKqdmfS0mbEVeZkkMR3rYzpMzXjWR91M08KCy0mpbqTfXERMQl +qiCA2ClV9+BB/AYm/7k29UMUA2Z44RGx2iBfRgB4ACGlHgAoYXhvqAEBj500mv/0 +OJD7uNGzcgbJceaBxXntC6Z58hMLnPddDnskk7RI24Zf3lCGeOdA5jGokHZwYa+c +NywRtYK3qq4kNFtyDGkNzVmf9nGvnAvRCjj5BiKDUyUM/FHE5r7iOZULJK2v0ZXk +ltd0ZGtxTgI8qoXzIKNDOXZbbFD+mpwUHmUUihW9o4JFWklWatKcsWMy5WHgUyIO +pwpJ6st+H6jiYoD2EEVSmAYY3qXNL3+q1Ok+CHLsIwMCPKaq2LxndD0UF/tUSxfj +03k9bWtJySgOLnRQvwzZRjoQhsmnP+mg7H/rpXdYaXHmgwo38oZJar55CJD2AhZk +PuXaTH4MNMn5X7azKFGnpyuqSfqNZSlO42sTp5SjLVFteAxEy9/eCG/Oo2Sr05WE +1LlSVHJ7liXMvGnjSG4N0MedJ5qq+BOS3R7fY581qRY27Iy4g/Q9iY/NtBde17MX +QRBdJ3NghVdJIgc= +-----END CERTIFICATE----- + +# Issuer: CN=Trustwave Global Certification Authority O=Trustwave Holdings, Inc. +# Subject: CN=Trustwave Global Certification Authority O=Trustwave Holdings, Inc. +# Label: "Trustwave Global Certification Authority" +# Serial: 1846098327275375458322922162 +# MD5 Fingerprint: f8:1c:18:2d:2f:ba:5f:6d:a1:6c:bc:c7:ab:91:c7:0e +# SHA1 Fingerprint: 2f:8f:36:4f:e1:58:97:44:21:59:87:a5:2a:9a:d0:69:95:26:7f:b5 +# SHA256 Fingerprint: 97:55:20:15:f5:dd:fc:3c:87:88:c0:06:94:45:55:40:88:94:45:00:84:f1:00:86:70:86:bc:1a:2b:b5:8d:c8 +-----BEGIN CERTIFICATE----- +MIIF2jCCA8KgAwIBAgIMBfcOhtpJ80Y1LrqyMA0GCSqGSIb3DQEBCwUAMIGIMQsw +CQYDVQQGEwJVUzERMA8GA1UECAwISWxsaW5vaXMxEDAOBgNVBAcMB0NoaWNhZ28x +ITAfBgNVBAoMGFRydXN0d2F2ZSBIb2xkaW5ncywgSW5jLjExMC8GA1UEAwwoVHJ1 +c3R3YXZlIEdsb2JhbCBDZXJ0aWZpY2F0aW9uIEF1dGhvcml0eTAeFw0xNzA4MjMx +OTM0MTJaFw00MjA4MjMxOTM0MTJaMIGIMQswCQYDVQQGEwJVUzERMA8GA1UECAwI +SWxsaW5vaXMxEDAOBgNVBAcMB0NoaWNhZ28xITAfBgNVBAoMGFRydXN0d2F2ZSBI +b2xkaW5ncywgSW5jLjExMC8GA1UEAwwoVHJ1c3R3YXZlIEdsb2JhbCBDZXJ0aWZp +Y2F0aW9uIEF1dGhvcml0eTCCAiIwDQYJKoZIhvcNAQEBBQADggIPADCCAgoCggIB +ALldUShLPDeS0YLOvR29zd24q88KPuFd5dyqCblXAj7mY2Hf8g+CY66j96xz0Xzn +swuvCAAJWX/NKSqIk4cXGIDtiLK0thAfLdZfVaITXdHG6wZWiYj+rDKd/VzDBcdu +7oaJuogDnXIhhpCujwOl3J+IKMujkkkP7NAP4m1ET4BqstTnoApTAbqOl5F2brz8 +1Ws25kCI1nsvXwXoLG0R8+eyvpJETNKXpP7ScoFDB5zpET71ixpZfR9oWN0EACyW +80OzfpgZdNmcc9kYvkHHNHnZ9GLCQ7mzJ7Aiy/k9UscwR7PJPrhq4ufogXBeQotP +JqX+OsIgbrv4Fo7NDKm0G2x2EOFYeUY+VM6AqFcJNykbmROPDMjWLBz7BegIlT1l +RtzuzWniTY+HKE40Cz7PFNm73bZQmq131BnW2hqIyE4bJ3XYsgjxroMwuREOzYfw +hI0Vcnyh78zyiGG69Gm7DIwLdVcEuE4qFC49DxweMqZiNu5m4iK4BUBjECLzMx10 +coos9TkpoNPnG4CELcU9402x/RpvumUHO1jsQkUm+9jaJXLE9gCxInm943xZYkqc +BW89zubWR2OZxiRvchLIrH+QtAuRcOi35hYQcRfO3gZPSEF9NUqjifLJS3tBEW1n +twiYTOURGa5CgNz7kAXU+FDKvuStx8KU1xad5hePrzb7AgMBAAGjQjBAMA8GA1Ud +EwEB/wQFMAMBAf8wHQYDVR0OBBYEFJngGWcNYtt2s9o9uFvo/ULSMQ6HMA4GA1Ud +DwEB/wQEAwIBBjANBgkqhkiG9w0BAQsFAAOCAgEAmHNw4rDT7TnsTGDZqRKGFx6W +0OhUKDtkLSGm+J1WE2pIPU/HPinbbViDVD2HfSMF1OQc3Og4ZYbFdada2zUFvXfe +uyk3QAUHw5RSn8pk3fEbK9xGChACMf1KaA0HZJDmHvUqoai7PF35owgLEQzxPy0Q +lG/+4jSHg9bP5Rs1bdID4bANqKCqRieCNqcVtgimQlRXtpla4gt5kNdXElE1GYhB +aCXUNxeEFfsBctyV3lImIJgm4nb1J2/6ADtKYdkNy1GTKv0WBpanI5ojSP5RvbbE +sLFUzt5sQa0WZ37b/TjNuThOssFgy50X31ieemKyJo90lZvkWx3SD92YHJtZuSPT +MaCm/zjdzyBP6VhWOmfD0faZmZ26NraAL4hHT4a/RDqA5Dccprrql5gR0IRiR2Qe +qu5AvzSxnI9O4fKSTx+O856X3vOmeWqJcU9LJxdI/uz0UA9PSX3MReO9ekDFQdxh +VicGaeVyQYHTtgGJoC86cnn+OjC/QezHYj6RS8fZMXZC+fc8Y+wmjHMMfRod6qh8 +h6jCJ3zhM0EPz8/8AKAigJ5Kp28AsEFFtyLKaEjFQqKu3R3y4G5OBVixwJAWKqQ9 +EEC+j2Jjg6mcgn0tAumDMHzLJ8n9HmYAsC7TIS+OMxZsmO0QqAfWzJPP29FpHOTK +yeC2nOnOcXHebD8WpHk= +-----END CERTIFICATE----- + +# Issuer: CN=Trustwave Global ECC P256 Certification Authority O=Trustwave Holdings, Inc. +# Subject: CN=Trustwave Global ECC P256 Certification Authority O=Trustwave Holdings, Inc. +# Label: "Trustwave Global ECC P256 Certification Authority" +# Serial: 4151900041497450638097112925 +# MD5 Fingerprint: 5b:44:e3:8d:5d:36:86:26:e8:0d:05:d2:59:a7:83:54 +# SHA1 Fingerprint: b4:90:82:dd:45:0c:be:8b:5b:b1:66:d3:e2:a4:08:26:cd:ed:42:cf +# SHA256 Fingerprint: 94:5b:bc:82:5e:a5:54:f4:89:d1:fd:51:a7:3d:df:2e:a6:24:ac:70:19:a0:52:05:22:5c:22:a7:8c:cf:a8:b4 +-----BEGIN CERTIFICATE----- +MIICYDCCAgegAwIBAgIMDWpfCD8oXD5Rld9dMAoGCCqGSM49BAMCMIGRMQswCQYD +VQQGEwJVUzERMA8GA1UECBMISWxsaW5vaXMxEDAOBgNVBAcTB0NoaWNhZ28xITAf +BgNVBAoTGFRydXN0d2F2ZSBIb2xkaW5ncywgSW5jLjE6MDgGA1UEAxMxVHJ1c3R3 +YXZlIEdsb2JhbCBFQ0MgUDI1NiBDZXJ0aWZpY2F0aW9uIEF1dGhvcml0eTAeFw0x +NzA4MjMxOTM1MTBaFw00MjA4MjMxOTM1MTBaMIGRMQswCQYDVQQGEwJVUzERMA8G +A1UECBMISWxsaW5vaXMxEDAOBgNVBAcTB0NoaWNhZ28xITAfBgNVBAoTGFRydXN0 +d2F2ZSBIb2xkaW5ncywgSW5jLjE6MDgGA1UEAxMxVHJ1c3R3YXZlIEdsb2JhbCBF +Q0MgUDI1NiBDZXJ0aWZpY2F0aW9uIEF1dGhvcml0eTBZMBMGByqGSM49AgEGCCqG +SM49AwEHA0IABH77bOYj43MyCMpg5lOcunSNGLB4kFKA3TjASh3RqMyTpJcGOMoN +FWLGjgEqZZ2q3zSRLoHB5DOSMcT9CTqmP62jQzBBMA8GA1UdEwEB/wQFMAMBAf8w +DwYDVR0PAQH/BAUDAwcGADAdBgNVHQ4EFgQUo0EGrJBt0UrrdaVKEJmzsaGLSvcw +CgYIKoZIzj0EAwIDRwAwRAIgB+ZU2g6gWrKuEZ+Hxbb/ad4lvvigtwjzRM4q3wgh +DDcCIC0mA6AFvWvR9lz4ZcyGbbOcNEhjhAnFjXca4syc4XR7 +-----END CERTIFICATE----- + +# Issuer: CN=Trustwave Global ECC P384 Certification Authority O=Trustwave Holdings, Inc. +# Subject: CN=Trustwave Global ECC P384 Certification Authority O=Trustwave Holdings, Inc. +# Label: "Trustwave Global ECC P384 Certification Authority" +# Serial: 2704997926503831671788816187 +# MD5 Fingerprint: ea:cf:60:c4:3b:b9:15:29:40:a1:97:ed:78:27:93:d6 +# SHA1 Fingerprint: e7:f3:a3:c8:cf:6f:c3:04:2e:6d:0e:67:32:c5:9e:68:95:0d:5e:d2 +# SHA256 Fingerprint: 55:90:38:59:c8:c0:c3:eb:b8:75:9e:ce:4e:25:57:22:5f:f5:75:8b:bd:38:eb:d4:82:76:60:1e:1b:d5:80:97 +-----BEGIN CERTIFICATE----- +MIICnTCCAiSgAwIBAgIMCL2Fl2yZJ6SAaEc7MAoGCCqGSM49BAMDMIGRMQswCQYD +VQQGEwJVUzERMA8GA1UECBMISWxsaW5vaXMxEDAOBgNVBAcTB0NoaWNhZ28xITAf +BgNVBAoTGFRydXN0d2F2ZSBIb2xkaW5ncywgSW5jLjE6MDgGA1UEAxMxVHJ1c3R3 +YXZlIEdsb2JhbCBFQ0MgUDM4NCBDZXJ0aWZpY2F0aW9uIEF1dGhvcml0eTAeFw0x +NzA4MjMxOTM2NDNaFw00MjA4MjMxOTM2NDNaMIGRMQswCQYDVQQGEwJVUzERMA8G +A1UECBMISWxsaW5vaXMxEDAOBgNVBAcTB0NoaWNhZ28xITAfBgNVBAoTGFRydXN0 +d2F2ZSBIb2xkaW5ncywgSW5jLjE6MDgGA1UEAxMxVHJ1c3R3YXZlIEdsb2JhbCBF +Q0MgUDM4NCBDZXJ0aWZpY2F0aW9uIEF1dGhvcml0eTB2MBAGByqGSM49AgEGBSuB +BAAiA2IABGvaDXU1CDFHBa5FmVXxERMuSvgQMSOjfoPTfygIOiYaOs+Xgh+AtycJ +j9GOMMQKmw6sWASr9zZ9lCOkmwqKi6vr/TklZvFe/oyujUF5nQlgziip04pt89ZF +1PKYhDhloKNDMEEwDwYDVR0TAQH/BAUwAwEB/zAPBgNVHQ8BAf8EBQMDBwYAMB0G +A1UdDgQWBBRVqYSJ0sEyvRjLbKYHTsjnnb6CkDAKBggqhkjOPQQDAwNnADBkAjA3 +AZKXRRJ+oPM+rRk6ct30UJMDEr5E0k9BpIycnR+j9sKS50gU/k6bpZFXrsY3crsC +MGclCrEMXu6pY5Jv5ZAL/mYiykf9ijH3g/56vxC+GCsej/YpHpRZ744hN8tRmKVu +Sw== +-----END CERTIFICATE----- + +# Issuer: CN=NAVER Global Root Certification Authority O=NAVER BUSINESS PLATFORM Corp. +# Subject: CN=NAVER Global Root Certification Authority O=NAVER BUSINESS PLATFORM Corp. +# Label: "NAVER Global Root Certification Authority" +# Serial: 9013692873798656336226253319739695165984492813 +# MD5 Fingerprint: c8:7e:41:f6:25:3b:f5:09:b3:17:e8:46:3d:bf:d0:9b +# SHA1 Fingerprint: 8f:6b:f2:a9:27:4a:da:14:a0:c4:f4:8e:61:27:f9:c0:1e:78:5d:d1 +# SHA256 Fingerprint: 88:f4:38:dc:f8:ff:d1:fa:8f:42:91:15:ff:e5:f8:2a:e1:e0:6e:0c:70:c3:75:fa:ad:71:7b:34:a4:9e:72:65 +-----BEGIN CERTIFICATE----- +MIIFojCCA4qgAwIBAgIUAZQwHqIL3fXFMyqxQ0Rx+NZQTQ0wDQYJKoZIhvcNAQEM +BQAwaTELMAkGA1UEBhMCS1IxJjAkBgNVBAoMHU5BVkVSIEJVU0lORVNTIFBMQVRG +T1JNIENvcnAuMTIwMAYDVQQDDClOQVZFUiBHbG9iYWwgUm9vdCBDZXJ0aWZpY2F0 +aW9uIEF1dGhvcml0eTAeFw0xNzA4MTgwODU4NDJaFw0zNzA4MTgyMzU5NTlaMGkx +CzAJBgNVBAYTAktSMSYwJAYDVQQKDB1OQVZFUiBCVVNJTkVTUyBQTEFURk9STSBD +b3JwLjEyMDAGA1UEAwwpTkFWRVIgR2xvYmFsIFJvb3QgQ2VydGlmaWNhdGlvbiBB +dXRob3JpdHkwggIiMA0GCSqGSIb3DQEBAQUAA4ICDwAwggIKAoICAQC21PGTXLVA +iQqrDZBbUGOukJR0F0Vy1ntlWilLp1agS7gvQnXp2XskWjFlqxcX0TM62RHcQDaH +38dq6SZeWYp34+hInDEW+j6RscrJo+KfziFTowI2MMtSAuXaMl3Dxeb57hHHi8lE +HoSTGEq0n+USZGnQJoViAbbJAh2+g1G7XNr4rRVqmfeSVPc0W+m/6imBEtRTkZaz +kVrd/pBzKPswRrXKCAfHcXLJZtM0l/aM9BhK4dA9WkW2aacp+yPOiNgSnABIqKYP +szuSjXEOdMWLyEz59JuOuDxp7W87UC9Y7cSw0BwbagzivESq2M0UXZR4Yb8Obtoq +vC8MC3GmsxY/nOb5zJ9TNeIDoKAYv7vxvvTWjIcNQvcGufFt7QSUqP620wbGQGHf +nZ3zVHbOUzoBppJB7ASjjw2i1QnK1sua8e9DXcCrpUHPXFNwcMmIpi3Ua2FzUCaG +YQ5fG8Ir4ozVu53BA0K6lNpfqbDKzE0K70dpAy8i+/Eozr9dUGWokG2zdLAIx6yo +0es+nPxdGoMuK8u180SdOqcXYZaicdNwlhVNt0xz7hlcxVs+Qf6sdWA7G2POAN3a +CJBitOUt7kinaxeZVL6HSuOpXgRM6xBtVNbv8ejyYhbLgGvtPe31HzClrkvJE+2K +AQHJuFFYwGY6sWZLxNUxAmLpdIQM201GLQIDAQABo0IwQDAdBgNVHQ4EFgQU0p+I +36HNLL3s9TsBAZMzJ7LrYEswDgYDVR0PAQH/BAQDAgEGMA8GA1UdEwEB/wQFMAMB +Af8wDQYJKoZIhvcNAQEMBQADggIBADLKgLOdPVQG3dLSLvCkASELZ0jKbY7gyKoN +qo0hV4/GPnrK21HUUrPUloSlWGB/5QuOH/XcChWB5Tu2tyIvCZwTFrFsDDUIbatj +cu3cvuzHV+YwIHHW1xDBE1UBjCpD5EHxzzp6U5LOogMFDTjfArsQLtk70pt6wKGm ++LUx5vR1yblTmXVHIloUFcd4G7ad6Qz4G3bxhYTeodoS76TiEJd6eN4MUZeoIUCL +hr0N8F5OSza7OyAfikJW4Qsav3vQIkMsRIz75Sq0bBwcupTgE34h5prCy8VCZLQe +lHsIJchxzIdFV4XTnyliIoNRlwAYl3dqmJLJfGBs32x9SuRwTMKeuB330DTHD8z7 +p/8Dvq1wkNoL3chtl1+afwkyQf3NosxabUzyqkn+Zvjp2DXrDige7kgvOtB5CTh8 +piKCk5XQA76+AqAF3SAi428diDRgxuYKuQl1C/AH6GmWNcf7I4GOODm4RStDeKLR +LBT/DShycpWbXgnbiUSYqqFJu3FS8r/2/yehNq+4tneI3TqkbZs0kNwUXTC/t+sX +5Ie3cdCh13cV1ELX8vMxmV2b3RZtP+oGI/hGoiLtk/bdmuYqh7GYVPEi92tF4+KO +dh2ajcQGjTa3FPOdVGm3jjzVpG2Tgbet9r1ke8LJaDmgkpzNNIaRkPpkUZ3+/uul +9XXeifdy +-----END CERTIFICATE----- + +# Issuer: CN=AC RAIZ FNMT-RCM SERVIDORES SEGUROS O=FNMT-RCM OU=Ceres +# Subject: CN=AC RAIZ FNMT-RCM SERVIDORES SEGUROS O=FNMT-RCM OU=Ceres +# Label: "AC RAIZ FNMT-RCM SERVIDORES SEGUROS" +# Serial: 131542671362353147877283741781055151509 +# MD5 Fingerprint: 19:36:9c:52:03:2f:d2:d1:bb:23:cc:dd:1e:12:55:bb +# SHA1 Fingerprint: 62:ff:d9:9e:c0:65:0d:03:ce:75:93:d2:ed:3f:2d:32:c9:e3:e5:4a +# SHA256 Fingerprint: 55:41:53:b1:3d:2c:f9:dd:b7:53:bf:be:1a:4e:0a:e0:8d:0a:a4:18:70:58:fe:60:a2:b8:62:b2:e4:b8:7b:cb +-----BEGIN CERTIFICATE----- +MIICbjCCAfOgAwIBAgIQYvYybOXE42hcG2LdnC6dlTAKBggqhkjOPQQDAzB4MQsw +CQYDVQQGEwJFUzERMA8GA1UECgwIRk5NVC1SQ00xDjAMBgNVBAsMBUNlcmVzMRgw +FgYDVQRhDA9WQVRFUy1RMjgyNjAwNEoxLDAqBgNVBAMMI0FDIFJBSVogRk5NVC1S +Q00gU0VSVklET1JFUyBTRUdVUk9TMB4XDTE4MTIyMDA5MzczM1oXDTQzMTIyMDA5 +MzczM1oweDELMAkGA1UEBhMCRVMxETAPBgNVBAoMCEZOTVQtUkNNMQ4wDAYDVQQL +DAVDZXJlczEYMBYGA1UEYQwPVkFURVMtUTI4MjYwMDRKMSwwKgYDVQQDDCNBQyBS +QUlaIEZOTVQtUkNNIFNFUlZJRE9SRVMgU0VHVVJPUzB2MBAGByqGSM49AgEGBSuB +BAAiA2IABPa6V1PIyqvfNkpSIeSX0oNnnvBlUdBeh8dHsVnyV0ebAAKTRBdp20LH +sbI6GA60XYyzZl2hNPk2LEnb80b8s0RpRBNm/dfF/a82Tc4DTQdxz69qBdKiQ1oK +Um8BA06Oi6NCMEAwDwYDVR0TAQH/BAUwAwEB/zAOBgNVHQ8BAf8EBAMCAQYwHQYD +VR0OBBYEFAG5L++/EYZg8k/QQW6rcx/n0m5JMAoGCCqGSM49BAMDA2kAMGYCMQCu +SuMrQMN0EfKVrRYj3k4MGuZdpSRea0R7/DjiT8ucRRcRTBQnJlU5dUoDzBOQn5IC +MQD6SmxgiHPz7riYYqnOK8LZiqZwMR2vsJRM60/G49HzYqc8/5MuB1xJAWdpEgJy +v+c= +-----END CERTIFICATE----- + +# Issuer: CN=GlobalSign Root R46 O=GlobalSign nv-sa +# Subject: CN=GlobalSign Root R46 O=GlobalSign nv-sa +# Label: "GlobalSign Root R46" +# Serial: 1552617688466950547958867513931858518042577 +# MD5 Fingerprint: c4:14:30:e4:fa:66:43:94:2a:6a:1b:24:5f:19:d0:ef +# SHA1 Fingerprint: 53:a2:b0:4b:ca:6b:d6:45:e6:39:8a:8e:c4:0d:d2:bf:77:c3:a2:90 +# SHA256 Fingerprint: 4f:a3:12:6d:8d:3a:11:d1:c4:85:5a:4f:80:7c:ba:d6:cf:91:9d:3a:5a:88:b0:3b:ea:2c:63:72:d9:3c:40:c9 +-----BEGIN CERTIFICATE----- +MIIFWjCCA0KgAwIBAgISEdK7udcjGJ5AXwqdLdDfJWfRMA0GCSqGSIb3DQEBDAUA +MEYxCzAJBgNVBAYTAkJFMRkwFwYDVQQKExBHbG9iYWxTaWduIG52LXNhMRwwGgYD +VQQDExNHbG9iYWxTaWduIFJvb3QgUjQ2MB4XDTE5MDMyMDAwMDAwMFoXDTQ2MDMy +MDAwMDAwMFowRjELMAkGA1UEBhMCQkUxGTAXBgNVBAoTEEdsb2JhbFNpZ24gbnYt +c2ExHDAaBgNVBAMTE0dsb2JhbFNpZ24gUm9vdCBSNDYwggIiMA0GCSqGSIb3DQEB +AQUAA4ICDwAwggIKAoICAQCsrHQy6LNl5brtQyYdpokNRbopiLKkHWPd08EsCVeJ +OaFV6Wc0dwxu5FUdUiXSE2te4R2pt32JMl8Nnp8semNgQB+msLZ4j5lUlghYruQG +vGIFAha/r6gjA7aUD7xubMLL1aa7DOn2wQL7Id5m3RerdELv8HQvJfTqa1VbkNud +316HCkD7rRlr+/fKYIje2sGP1q7Vf9Q8g+7XFkyDRTNrJ9CG0Bwta/OrffGFqfUo +0q3v84RLHIf8E6M6cqJaESvWJ3En7YEtbWaBkoe0G1h6zD8K+kZPTXhc+CtI4wSE +y132tGqzZfxCnlEmIyDLPRT5ge1lFgBPGmSXZgjPjHvjK8Cd+RTyG/FWaha/LIWF +zXg4mutCagI0GIMXTpRW+LaCtfOW3T3zvn8gdz57GSNrLNRyc0NXfeD412lPFzYE ++cCQYDdF3uYM2HSNrpyibXRdQr4G9dlkbgIQrImwTDsHTUB+JMWKmIJ5jqSngiCN +I/onccnfxkF0oE32kRbcRoxfKWMxWXEM2G/CtjJ9++ZdU6Z+Ffy7dXxd7Pj2Fxzs +x2sZy/N78CsHpdlseVR2bJ0cpm4O6XkMqCNqo98bMDGfsVR7/mrLZqrcZdCinkqa +ByFrgY/bxFn63iLABJzjqls2k+g9vXqhnQt2sQvHnf3PmKgGwvgqo6GDoLclcqUC +4wIDAQABo0IwQDAOBgNVHQ8BAf8EBAMCAYYwDwYDVR0TAQH/BAUwAwEB/zAdBgNV +HQ4EFgQUA1yrc4GHqMywptWU4jaWSf8FmSwwDQYJKoZIhvcNAQEMBQADggIBAHx4 +7PYCLLtbfpIrXTncvtgdokIzTfnvpCo7RGkerNlFo048p9gkUbJUHJNOxO97k4Vg +JuoJSOD1u8fpaNK7ajFxzHmuEajwmf3lH7wvqMxX63bEIaZHU1VNaL8FpO7XJqti +2kM3S+LGteWygxk6x9PbTZ4IevPuzz5i+6zoYMzRx6Fcg0XERczzF2sUyQQCPtIk +pnnpHs6i58FZFZ8d4kuaPp92CC1r2LpXFNqD6v6MVenQTqnMdzGxRBF6XLE+0xRF +FRhiJBPSy03OXIPBNvIQtQ6IbbjhVp+J3pZmOUdkLG5NrmJ7v2B0GbhWrJKsFjLt +rWhV/pi60zTe9Mlhww6G9kuEYO4Ne7UyWHmRVSyBQ7N0H3qqJZ4d16GLuc1CLgSk +ZoNNiTW2bKg2SnkheCLQQrzRQDGQob4Ez8pn7fXwgNNgyYMqIgXQBztSvwyeqiv5 +u+YfjyW6hY0XHgL+XVAEV8/+LbzvXMAaq7afJMbfc2hIkCwU9D9SGuTSyxTDYWnP +4vkYxboznxSjBF25cfe1lNj2M8FawTSLfJvdkzrnE6JwYZ+vj+vYxXX4M2bUdGc6 +N3ec592kD3ZDZopD8p/7DEJ4Y9HiD2971KE9dJeFt0g5QdYg/NA6s/rob8SKunE3 +vouXsXgxT7PntgMTzlSdriVZzH81Xwj3QEUxeCp6 +-----END CERTIFICATE----- + +# Issuer: CN=GlobalSign Root E46 O=GlobalSign nv-sa +# Subject: CN=GlobalSign Root E46 O=GlobalSign nv-sa +# Label: "GlobalSign Root E46" +# Serial: 1552617690338932563915843282459653771421763 +# MD5 Fingerprint: b5:b8:66:ed:de:08:83:e3:c9:e2:01:34:06:ac:51:6f +# SHA1 Fingerprint: 39:b4:6c:d5:fe:80:06:eb:e2:2f:4a:bb:08:33:a0:af:db:b9:dd:84 +# SHA256 Fingerprint: cb:b9:c4:4d:84:b8:04:3e:10:50:ea:31:a6:9f:51:49:55:d7:bf:d2:e2:c6:b4:93:01:01:9a:d6:1d:9f:50:58 +-----BEGIN CERTIFICATE----- +MIICCzCCAZGgAwIBAgISEdK7ujNu1LzmJGjFDYQdmOhDMAoGCCqGSM49BAMDMEYx +CzAJBgNVBAYTAkJFMRkwFwYDVQQKExBHbG9iYWxTaWduIG52LXNhMRwwGgYDVQQD +ExNHbG9iYWxTaWduIFJvb3QgRTQ2MB4XDTE5MDMyMDAwMDAwMFoXDTQ2MDMyMDAw +MDAwMFowRjELMAkGA1UEBhMCQkUxGTAXBgNVBAoTEEdsb2JhbFNpZ24gbnYtc2Ex +HDAaBgNVBAMTE0dsb2JhbFNpZ24gUm9vdCBFNDYwdjAQBgcqhkjOPQIBBgUrgQQA +IgNiAAScDrHPt+ieUnd1NPqlRqetMhkytAepJ8qUuwzSChDH2omwlwxwEwkBjtjq +R+q+soArzfwoDdusvKSGN+1wCAB16pMLey5SnCNoIwZD7JIvU4Tb+0cUB+hflGdd +yXqBPCCjQjBAMA4GA1UdDwEB/wQEAwIBhjAPBgNVHRMBAf8EBTADAQH/MB0GA1Ud +DgQWBBQxCpCPtsad0kRLgLWi5h+xEk8blTAKBggqhkjOPQQDAwNoADBlAjEA31SQ +7Zvvi5QCkxeCmb6zniz2C5GMn0oUsfZkvLtoURMMA/cVi4RguYv/Uo7njLwcAjA8 ++RHUjE7AwWHCFUyqqx0LMV87HOIAl0Qx5v5zli/altP+CAezNIm8BZ/3Hobui3A= +-----END CERTIFICATE----- + +# Issuer: CN=GLOBALTRUST 2020 O=e-commerce monitoring GmbH +# Subject: CN=GLOBALTRUST 2020 O=e-commerce monitoring GmbH +# Label: "GLOBALTRUST 2020" +# Serial: 109160994242082918454945253 +# MD5 Fingerprint: 8a:c7:6f:cb:6d:e3:cc:a2:f1:7c:83:fa:0e:78:d7:e8 +# SHA1 Fingerprint: d0:67:c1:13:51:01:0c:aa:d0:c7:6a:65:37:31:16:26:4f:53:71:a2 +# SHA256 Fingerprint: 9a:29:6a:51:82:d1:d4:51:a2:e3:7f:43:9b:74:da:af:a2:67:52:33:29:f9:0f:9a:0d:20:07:c3:34:e2:3c:9a +-----BEGIN CERTIFICATE----- +MIIFgjCCA2qgAwIBAgILWku9WvtPilv6ZeUwDQYJKoZIhvcNAQELBQAwTTELMAkG +A1UEBhMCQVQxIzAhBgNVBAoTGmUtY29tbWVyY2UgbW9uaXRvcmluZyBHbWJIMRkw +FwYDVQQDExBHTE9CQUxUUlVTVCAyMDIwMB4XDTIwMDIxMDAwMDAwMFoXDTQwMDYx +MDAwMDAwMFowTTELMAkGA1UEBhMCQVQxIzAhBgNVBAoTGmUtY29tbWVyY2UgbW9u +aXRvcmluZyBHbWJIMRkwFwYDVQQDExBHTE9CQUxUUlVTVCAyMDIwMIICIjANBgkq +hkiG9w0BAQEFAAOCAg8AMIICCgKCAgEAri5WrRsc7/aVj6B3GyvTY4+ETUWiD59b +RatZe1E0+eyLinjF3WuvvcTfk0Uev5E4C64OFudBc/jbu9G4UeDLgztzOG53ig9Z +YybNpyrOVPu44sB8R85gfD+yc/LAGbaKkoc1DZAoouQVBGM+uq/ufF7MpotQsjj3 +QWPKzv9pj2gOlTblzLmMCcpL3TGQlsjMH/1WljTbjhzqLL6FLmPdqqmV0/0plRPw +yJiT2S0WR5ARg6I6IqIoV6Lr/sCMKKCmfecqQjuCgGOlYx8ZzHyyZqjC0203b+J+ +BlHZRYQfEs4kUmSFC0iAToexIiIwquuuvuAC4EDosEKAA1GqtH6qRNdDYfOiaxaJ +SaSjpCuKAsR49GiKweR6NrFvG5Ybd0mN1MkGco/PU+PcF4UgStyYJ9ORJitHHmkH +r96i5OTUawuzXnzUJIBHKWk7buis/UDr2O1xcSvy6Fgd60GXIsUf1DnQJ4+H4xj0 +4KlGDfV0OoIu0G4skaMxXDtG6nsEEFZegB31pWXogvziB4xiRfUg3kZwhqG8k9Me +dKZssCz3AwyIDMvUclOGvGBG85hqwvG/Q/lwIHfKN0F5VVJjjVsSn8VoxIidrPIw +q7ejMZdnrY8XD2zHc+0klGvIg5rQmjdJBKuxFshsSUktq6HQjJLyQUp5ISXbY9e2 +nKd+Qmn7OmMCAwEAAaNjMGEwDwYDVR0TAQH/BAUwAwEB/zAOBgNVHQ8BAf8EBAMC +AQYwHQYDVR0OBBYEFNwuH9FhN3nkq9XVsxJxaD1qaJwiMB8GA1UdIwQYMBaAFNwu +H9FhN3nkq9XVsxJxaD1qaJwiMA0GCSqGSIb3DQEBCwUAA4ICAQCR8EICaEDuw2jA +VC/f7GLDw56KoDEoqoOOpFaWEhCGVrqXctJUMHytGdUdaG/7FELYjQ7ztdGl4wJC +XtzoRlgHNQIw4Lx0SsFDKv/bGtCwr2zD/cuz9X9tAy5ZVp0tLTWMstZDFyySCstd +6IwPS3BD0IL/qMy/pJTAvoe9iuOTe8aPmxadJ2W8esVCgmxcB9CpwYhgROmYhRZf ++I/KARDOJcP5YBugxZfD0yyIMaK9MOzQ0MAS8cE54+X1+NZK3TTN+2/BT+MAi1bi +kvcoskJ3ciNnxz8RFbLEAwW+uxF7Cr+obuf/WEPPm2eggAe2HcqtbepBEX4tdJP7 +wry+UUTF72glJ4DjyKDUEuzZpTcdN3y0kcra1LGWge9oXHYQSa9+pTeAsRxSvTOB +TI/53WXZFM2KJVj04sWDpQmQ1GwUY7VA3+vA/MRYfg0UFodUJ25W5HCEuGwyEn6C +MUO+1918oa2u1qsgEu8KwxCMSZY13At1XrFP1U80DhEgB3VDRemjEdqso5nCtnkn +4rnvyOL2NSl6dPrFf4IFYqYK6miyeUcGbvJXqBUzxvd4Sj1Ce2t+/vdG6tHrju+I +aFvowdlxfv1k7/9nR4hYJS8+hge9+6jlgqispdNpQ80xiEmEU5LAsTkbOYMBMMTy +qfrQA71yN2BWHzZ8vTmR9W0Nv3vXkg== +-----END CERTIFICATE----- + +# Issuer: CN=ANF Secure Server Root CA O=ANF Autoridad de Certificacion OU=ANF CA Raiz +# Subject: CN=ANF Secure Server Root CA O=ANF Autoridad de Certificacion OU=ANF CA Raiz +# Label: "ANF Secure Server Root CA" +# Serial: 996390341000653745 +# MD5 Fingerprint: 26:a6:44:5a:d9:af:4e:2f:b2:1d:b6:65:b0:4e:e8:96 +# SHA1 Fingerprint: 5b:6e:68:d0:cc:15:b6:a0:5f:1e:c1:5f:ae:02:fc:6b:2f:5d:6f:74 +# SHA256 Fingerprint: fb:8f:ec:75:91:69:b9:10:6b:1e:51:16:44:c6:18:c5:13:04:37:3f:6c:06:43:08:8d:8b:ef:fd:1b:99:75:99 +-----BEGIN CERTIFICATE----- +MIIF7zCCA9egAwIBAgIIDdPjvGz5a7EwDQYJKoZIhvcNAQELBQAwgYQxEjAQBgNV +BAUTCUc2MzI4NzUxMDELMAkGA1UEBhMCRVMxJzAlBgNVBAoTHkFORiBBdXRvcmlk +YWQgZGUgQ2VydGlmaWNhY2lvbjEUMBIGA1UECxMLQU5GIENBIFJhaXoxIjAgBgNV +BAMTGUFORiBTZWN1cmUgU2VydmVyIFJvb3QgQ0EwHhcNMTkwOTA0MTAwMDM4WhcN +MzkwODMwMTAwMDM4WjCBhDESMBAGA1UEBRMJRzYzMjg3NTEwMQswCQYDVQQGEwJF +UzEnMCUGA1UEChMeQU5GIEF1dG9yaWRhZCBkZSBDZXJ0aWZpY2FjaW9uMRQwEgYD +VQQLEwtBTkYgQ0EgUmFpejEiMCAGA1UEAxMZQU5GIFNlY3VyZSBTZXJ2ZXIgUm9v +dCBDQTCCAiIwDQYJKoZIhvcNAQEBBQADggIPADCCAgoCggIBANvrayvmZFSVgpCj +cqQZAZ2cC4Ffc0m6p6zzBE57lgvsEeBbphzOG9INgxwruJ4dfkUyYA8H6XdYfp9q +yGFOtibBTI3/TO80sh9l2Ll49a2pcbnvT1gdpd50IJeh7WhM3pIXS7yr/2WanvtH +2Vdy8wmhrnZEE26cLUQ5vPnHO6RYPUG9tMJJo8gN0pcvB2VSAKduyK9o7PQUlrZX +H1bDOZ8rbeTzPvY1ZNoMHKGESy9LS+IsJJ1tk0DrtSOOMspvRdOoiXsezx76W0OL +zc2oD2rKDF65nkeP8Nm2CgtYZRczuSPkdxl9y0oukntPLxB3sY0vaJxizOBQ+OyR +p1RMVwnVdmPF6GUe7m1qzwmd+nxPrWAI/VaZDxUse6mAq4xhj0oHdkLePfTdsiQz +W7i1o0TJrH93PB0j7IKppuLIBkwC/qxcmZkLLxCKpvR/1Yd0DVlJRfbwcVw5Kda/ +SiOL9V8BY9KHcyi1Swr1+KuCLH5zJTIdC2MKF4EA/7Z2Xue0sUDKIbvVgFHlSFJn +LNJhiQcND85Cd8BEc5xEUKDbEAotlRyBr+Qc5RQe8TZBAQIvfXOn3kLMTOmJDVb3 +n5HUA8ZsyY/b2BzgQJhdZpmYgG4t/wHFzstGH6wCxkPmrqKEPMVOHj1tyRRM4y5B +u8o5vzY8KhmqQYdOpc5LMnndkEl/AgMBAAGjYzBhMB8GA1UdIwQYMBaAFJxf0Gxj +o1+TypOYCK2Mh6UsXME3MB0GA1UdDgQWBBScX9BsY6Nfk8qTmAitjIelLFzBNzAO +BgNVHQ8BAf8EBAMCAYYwDwYDVR0TAQH/BAUwAwEB/zANBgkqhkiG9w0BAQsFAAOC +AgEATh65isagmD9uw2nAalxJUqzLK114OMHVVISfk/CHGT0sZonrDUL8zPB1hT+L +9IBdeeUXZ701guLyPI59WzbLWoAAKfLOKyzxj6ptBZNscsdW699QIyjlRRA96Gej +rw5VD5AJYu9LWaL2U/HANeQvwSS9eS9OICI7/RogsKQOLHDtdD+4E5UGUcjohybK +pFtqFiGS3XNgnhAY3jyB6ugYw3yJ8otQPr0R4hUDqDZ9MwFsSBXXiJCZBMXM5gf0 +vPSQ7RPi6ovDj6MzD8EpTBNO2hVWcXNyglD2mjN8orGoGjR0ZVzO0eurU+AagNjq +OknkJjCb5RyKqKkVMoaZkgoQI1YS4PbOTOK7vtuNknMBZi9iPrJyJ0U27U1W45eZ +/zo1PqVUSlJZS2Db7v54EX9K3BR5YLZrZAPbFYPhor72I5dQ8AkzNqdxliXzuUJ9 +2zg/LFis6ELhDtjTO0wugumDLmsx2d1Hhk9tl5EuT+IocTUW0fJz/iUrB0ckYyfI ++PbZa/wSMVYIwFNCr5zQM378BvAxRAMU8Vjq8moNqRGyg77FGr8H6lnco4g175x2 +MjxNBiLOFeXdntiP2t7SxDnlF4HPOEfrf4htWRvfn0IUrn7PqLBmZdo3r5+qPeoo +tt7VMVgWglvquxl1AnMaykgaIZOQCo6ThKd9OyMYkomgjaw= +-----END CERTIFICATE----- + +# Issuer: CN=Certum EC-384 CA O=Asseco Data Systems S.A. OU=Certum Certification Authority +# Subject: CN=Certum EC-384 CA O=Asseco Data Systems S.A. OU=Certum Certification Authority +# Label: "Certum EC-384 CA" +# Serial: 160250656287871593594747141429395092468 +# MD5 Fingerprint: b6:65:b3:96:60:97:12:a1:ec:4e:e1:3d:a3:c6:c9:f1 +# SHA1 Fingerprint: f3:3e:78:3c:ac:df:f4:a2:cc:ac:67:55:69:56:d7:e5:16:3c:e1:ed +# SHA256 Fingerprint: 6b:32:80:85:62:53:18:aa:50:d1:73:c9:8d:8b:da:09:d5:7e:27:41:3d:11:4c:f7:87:a0:f5:d0:6c:03:0c:f6 +-----BEGIN CERTIFICATE----- +MIICZTCCAeugAwIBAgIQeI8nXIESUiClBNAt3bpz9DAKBggqhkjOPQQDAzB0MQsw +CQYDVQQGEwJQTDEhMB8GA1UEChMYQXNzZWNvIERhdGEgU3lzdGVtcyBTLkEuMScw +JQYDVQQLEx5DZXJ0dW0gQ2VydGlmaWNhdGlvbiBBdXRob3JpdHkxGTAXBgNVBAMT +EENlcnR1bSBFQy0zODQgQ0EwHhcNMTgwMzI2MDcyNDU0WhcNNDMwMzI2MDcyNDU0 +WjB0MQswCQYDVQQGEwJQTDEhMB8GA1UEChMYQXNzZWNvIERhdGEgU3lzdGVtcyBT +LkEuMScwJQYDVQQLEx5DZXJ0dW0gQ2VydGlmaWNhdGlvbiBBdXRob3JpdHkxGTAX +BgNVBAMTEENlcnR1bSBFQy0zODQgQ0EwdjAQBgcqhkjOPQIBBgUrgQQAIgNiAATE +KI6rGFtqvm5kN2PkzeyrOvfMobgOgknXhimfoZTy42B4mIF4Bk3y7JoOV2CDn7Tm +Fy8as10CW4kjPMIRBSqniBMY81CE1700LCeJVf/OTOffph8oxPBUw7l8t1Ot68Kj +QjBAMA8GA1UdEwEB/wQFMAMBAf8wHQYDVR0OBBYEFI0GZnQkdjrzife81r1HfS+8 +EF9LMA4GA1UdDwEB/wQEAwIBBjAKBggqhkjOPQQDAwNoADBlAjADVS2m5hjEfO/J +UG7BJw+ch69u1RsIGL2SKcHvlJF40jocVYli5RsJHrpka/F2tNQCMQC0QoSZ/6vn +nvuRlydd3LBbMHHOXjgaatkl5+r3YZJW+OraNsKHZZYuciUvf9/DE8k= +-----END CERTIFICATE----- + +# Issuer: CN=Certum Trusted Root CA O=Asseco Data Systems S.A. OU=Certum Certification Authority +# Subject: CN=Certum Trusted Root CA O=Asseco Data Systems S.A. OU=Certum Certification Authority +# Label: "Certum Trusted Root CA" +# Serial: 40870380103424195783807378461123655149 +# MD5 Fingerprint: 51:e1:c2:e7:fe:4c:84:af:59:0e:2f:f4:54:6f:ea:29 +# SHA1 Fingerprint: c8:83:44:c0:18:ae:9f:cc:f1:87:b7:8f:22:d1:c5:d7:45:84:ba:e5 +# SHA256 Fingerprint: fe:76:96:57:38:55:77:3e:37:a9:5e:7a:d4:d9:cc:96:c3:01:57:c1:5d:31:76:5b:a9:b1:57:04:e1:ae:78:fd +-----BEGIN CERTIFICATE----- +MIIFwDCCA6igAwIBAgIQHr9ZULjJgDdMBvfrVU+17TANBgkqhkiG9w0BAQ0FADB6 +MQswCQYDVQQGEwJQTDEhMB8GA1UEChMYQXNzZWNvIERhdGEgU3lzdGVtcyBTLkEu +MScwJQYDVQQLEx5DZXJ0dW0gQ2VydGlmaWNhdGlvbiBBdXRob3JpdHkxHzAdBgNV +BAMTFkNlcnR1bSBUcnVzdGVkIFJvb3QgQ0EwHhcNMTgwMzE2MTIxMDEzWhcNNDMw +MzE2MTIxMDEzWjB6MQswCQYDVQQGEwJQTDEhMB8GA1UEChMYQXNzZWNvIERhdGEg +U3lzdGVtcyBTLkEuMScwJQYDVQQLEx5DZXJ0dW0gQ2VydGlmaWNhdGlvbiBBdXRo +b3JpdHkxHzAdBgNVBAMTFkNlcnR1bSBUcnVzdGVkIFJvb3QgQ0EwggIiMA0GCSqG +SIb3DQEBAQUAA4ICDwAwggIKAoICAQDRLY67tzbqbTeRn06TpwXkKQMlzhyC93yZ +n0EGze2jusDbCSzBfN8pfktlL5On1AFrAygYo9idBcEq2EXxkd7fO9CAAozPOA/q +p1x4EaTByIVcJdPTsuclzxFUl6s1wB52HO8AU5853BSlLCIls3Jy/I2z5T4IHhQq +NwuIPMqw9MjCoa68wb4pZ1Xi/K1ZXP69VyywkI3C7Te2fJmItdUDmj0VDT06qKhF +8JVOJVkdzZhpu9PMMsmN74H+rX2Ju7pgE8pllWeg8xn2A1bUatMn4qGtg/BKEiJ3 +HAVz4hlxQsDsdUaakFjgao4rpUYwBI4Zshfjvqm6f1bxJAPXsiEodg42MEx51UGa +mqi4NboMOvJEGyCI98Ul1z3G4z5D3Yf+xOr1Uz5MZf87Sst4WmsXXw3Hw09Omiqi +7VdNIuJGmj8PkTQkfVXjjJU30xrwCSss0smNtA0Aq2cpKNgB9RkEth2+dv5yXMSF +ytKAQd8FqKPVhJBPC/PgP5sZ0jeJP/J7UhyM9uH3PAeXjA6iWYEMspA90+NZRu0P +qafegGtaqge2Gcu8V/OXIXoMsSt0Puvap2ctTMSYnjYJdmZm/Bo/6khUHL4wvYBQ +v3y1zgD2DGHZ5yQD4OMBgQ692IU0iL2yNqh7XAjlRICMb/gv1SHKHRzQ+8S1h9E6 +Tsd2tTVItQIDAQABo0IwQDAPBgNVHRMBAf8EBTADAQH/MB0GA1UdDgQWBBSM+xx1 +vALTn04uSNn5YFSqxLNP+jAOBgNVHQ8BAf8EBAMCAQYwDQYJKoZIhvcNAQENBQAD +ggIBAEii1QALLtA/vBzVtVRJHlpr9OTy4EA34MwUe7nJ+jW1dReTagVphZzNTxl4 +WxmB82M+w85bj/UvXgF2Ez8sALnNllI5SW0ETsXpD4YN4fqzX4IS8TrOZgYkNCvo +zMrnadyHncI013nR03e4qllY/p0m+jiGPp2Kh2RX5Rc64vmNueMzeMGQ2Ljdt4NR +5MTMI9UGfOZR0800McD2RrsLrfw9EAUqO0qRJe6M1ISHgCq8CYyqOhNf6DR5UMEQ +GfnTKB7U0VEwKbOukGfWHwpjscWpxkIxYxeU72nLL/qMFH3EQxiJ2fAyQOaA4kZf +5ePBAFmo+eggvIksDkc0C+pXwlM2/KfUrzHN/gLldfq5Jwn58/U7yn2fqSLLiMmq +0Uc9NneoWWRrJ8/vJ8HjJLWG965+Mk2weWjROeiQWMODvA8s1pfrzgzhIMfatz7D +P78v3DSk+yshzWePS/Tj6tQ/50+6uaWTRRxmHyH6ZF5v4HaUMst19W7l9o/HuKTM +qJZ9ZPskWkoDbGs4xugDQ5r3V7mzKWmTOPQD8rv7gmsHINFSH5pkAnuYZttcTVoP +0ISVoDwUQwbKytu4QTbaakRnh6+v40URFWkIsr4WOZckbxJF0WddCajJFdr60qZf +E2Efv4WstK2tBZQIgx51F9NxO5NQI1mg7TyRVJ12AMXDuDjb +-----END CERTIFICATE----- + +# Issuer: CN=TunTrust Root CA O=Agence Nationale de Certification Electronique +# Subject: CN=TunTrust Root CA O=Agence Nationale de Certification Electronique +# Label: "TunTrust Root CA" +# Serial: 108534058042236574382096126452369648152337120275 +# MD5 Fingerprint: 85:13:b9:90:5b:36:5c:b6:5e:b8:5a:f8:e0:31:57:b4 +# SHA1 Fingerprint: cf:e9:70:84:0f:e0:73:0f:9d:f6:0c:7f:2c:4b:ee:20:46:34:9c:bb +# SHA256 Fingerprint: 2e:44:10:2a:b5:8c:b8:54:19:45:1c:8e:19:d9:ac:f3:66:2c:af:bc:61:4b:6a:53:96:0a:30:f7:d0:e2:eb:41 +-----BEGIN CERTIFICATE----- +MIIFszCCA5ugAwIBAgIUEwLV4kBMkkaGFmddtLu7sms+/BMwDQYJKoZIhvcNAQEL +BQAwYTELMAkGA1UEBhMCVE4xNzA1BgNVBAoMLkFnZW5jZSBOYXRpb25hbGUgZGUg +Q2VydGlmaWNhdGlvbiBFbGVjdHJvbmlxdWUxGTAXBgNVBAMMEFR1blRydXN0IFJv +b3QgQ0EwHhcNMTkwNDI2MDg1NzU2WhcNNDQwNDI2MDg1NzU2WjBhMQswCQYDVQQG +EwJUTjE3MDUGA1UECgwuQWdlbmNlIE5hdGlvbmFsZSBkZSBDZXJ0aWZpY2F0aW9u +IEVsZWN0cm9uaXF1ZTEZMBcGA1UEAwwQVHVuVHJ1c3QgUm9vdCBDQTCCAiIwDQYJ +KoZIhvcNAQEBBQADggIPADCCAgoCggIBAMPN0/y9BFPdDCA61YguBUtB9YOCfvdZ +n56eY+hz2vYGqU8ftPkLHzmMmiDQfgbU7DTZhrx1W4eI8NLZ1KMKsmwb60ksPqxd +2JQDoOw05TDENX37Jk0bbjBU2PWARZw5rZzJJQRNmpA+TkBuimvNKWfGzC3gdOgF +VwpIUPp6Q9p+7FuaDmJ2/uqdHYVy7BG7NegfJ7/Boce7SBbdVtfMTqDhuazb1YMZ +GoXRlJfXyqNlC/M4+QKu3fZnz8k/9YosRxqZbwUN/dAdgjH8KcwAWJeRTIAAHDOF +li/LQcKLEITDCSSJH7UP2dl3RxiSlGBcx5kDPP73lad9UKGAwqmDrViWVSHbhlnU +r8a83YFuB9tgYv7sEG7aaAH0gxupPqJbI9dkxt/con3YS7qC0lH4Zr8GRuR5KiY2 +eY8fTpkdso8MDhz/yV3A/ZAQprE38806JG60hZC/gLkMjNWb1sjxVj8agIl6qeIb +MlEsPvLfe/ZdeikZjuXIvTZxi11Mwh0/rViizz1wTaZQmCXcI/m4WEEIcb9PuISg +jwBUFfyRbVinljvrS5YnzWuioYasDXxU5mZMZl+QviGaAkYt5IPCgLnPSz7ofzwB +7I9ezX/SKEIBlYrilz0QIX32nRzFNKHsLA4KUiwSVXAkPcvCFDVDXSdOvsC9qnyW +5/yeYa1E0wCXAgMBAAGjYzBhMB0GA1UdDgQWBBQGmpsfU33x9aTI04Y+oXNZtPdE +ITAPBgNVHRMBAf8EBTADAQH/MB8GA1UdIwQYMBaAFAaamx9TffH1pMjThj6hc1m0 +90QhMA4GA1UdDwEB/wQEAwIBBjANBgkqhkiG9w0BAQsFAAOCAgEAqgVutt0Vyb+z +xiD2BkewhpMl0425yAA/l/VSJ4hxyXT968pk21vvHl26v9Hr7lxpuhbI87mP0zYu +QEkHDVneixCwSQXi/5E/S7fdAo74gShczNxtr18UnH1YeA32gAm56Q6XKRm4t+v4 +FstVEuTGfbvE7Pi1HE4+Z7/FXxttbUcoqgRYYdZ2vyJ/0Adqp2RT8JeNnYA/u8EH +22Wv5psymsNUk8QcCMNE+3tjEUPRahphanltkE8pjkcFwRJpadbGNjHh/PqAulxP +xOu3Mqz4dWEX1xAZufHSCe96Qp1bWgvUxpVOKs7/B9dPfhgGiPEZtdmYu65xxBzn +dFlY7wyJz4sfdZMaBBSSSFCp61cpABbjNhzI+L/wM9VBD8TMPN3pM0MBkRArHtG5 +Xc0yGYuPjCB31yLEQtyEFpslbei0VXF/sHyz03FJuc9SpAQ/3D2gu68zngowYI7b +nV2UqL1g52KAdoGDDIzMMEZJ4gzSqK/rYXHv5yJiqfdcZGyfFoxnNidF9Ql7v/YQ +CvGwjVRDjAS6oz/v4jXH+XTgbzRB0L9zZVcg+ZtnemZoJE6AZb0QmQZZ8mWvuMZH +u/2QeItBcy6vVR/cO5JyboTT0GFMDcx2V+IthSIVNg3rAZ3r2OvEhJn7wAzMMujj +d9qDRIueVSjAi1jTkD5OGwDxFa2DK5o= +-----END CERTIFICATE----- + +# Issuer: CN=HARICA TLS RSA Root CA 2021 O=Hellenic Academic and Research Institutions CA +# Subject: CN=HARICA TLS RSA Root CA 2021 O=Hellenic Academic and Research Institutions CA +# Label: "HARICA TLS RSA Root CA 2021" +# Serial: 76817823531813593706434026085292783742 +# MD5 Fingerprint: 65:47:9b:58:86:dd:2c:f0:fc:a2:84:1f:1e:96:c4:91 +# SHA1 Fingerprint: 02:2d:05:82:fa:88:ce:14:0c:06:79:de:7f:14:10:e9:45:d7:a5:6d +# SHA256 Fingerprint: d9:5d:0e:8e:da:79:52:5b:f9:be:b1:1b:14:d2:10:0d:32:94:98:5f:0c:62:d9:fa:bd:9c:d9:99:ec:cb:7b:1d +-----BEGIN CERTIFICATE----- +MIIFpDCCA4ygAwIBAgIQOcqTHO9D88aOk8f0ZIk4fjANBgkqhkiG9w0BAQsFADBs +MQswCQYDVQQGEwJHUjE3MDUGA1UECgwuSGVsbGVuaWMgQWNhZGVtaWMgYW5kIFJl +c2VhcmNoIEluc3RpdHV0aW9ucyBDQTEkMCIGA1UEAwwbSEFSSUNBIFRMUyBSU0Eg +Um9vdCBDQSAyMDIxMB4XDTIxMDIxOTEwNTUzOFoXDTQ1MDIxMzEwNTUzN1owbDEL +MAkGA1UEBhMCR1IxNzA1BgNVBAoMLkhlbGxlbmljIEFjYWRlbWljIGFuZCBSZXNl +YXJjaCBJbnN0aXR1dGlvbnMgQ0ExJDAiBgNVBAMMG0hBUklDQSBUTFMgUlNBIFJv +b3QgQ0EgMjAyMTCCAiIwDQYJKoZIhvcNAQEBBQADggIPADCCAgoCggIBAIvC569l +mwVnlskNJLnQDmT8zuIkGCyEf3dRywQRNrhe7Wlxp57kJQmXZ8FHws+RFjZiPTgE +4VGC/6zStGndLuwRo0Xua2s7TL+MjaQenRG56Tj5eg4MmOIjHdFOY9TnuEFE+2uv +a9of08WRiFukiZLRgeaMOVig1mlDqa2YUlhu2wr7a89o+uOkXjpFc5gH6l8Cct4M +pbOfrqkdtx2z/IpZ525yZa31MJQjB/OCFks1mJxTuy/K5FrZx40d/JiZ+yykgmvw +Kh+OC19xXFyuQnspiYHLA6OZyoieC0AJQTPb5lh6/a6ZcMBaD9YThnEvdmn8kN3b +LW7R8pv1GmuebxWMevBLKKAiOIAkbDakO/IwkfN4E8/BPzWr8R0RI7VDIp4BkrcY +AuUR0YLbFQDMYTfBKnya4dC6s1BG7oKsnTH4+yPiAwBIcKMJJnkVU2DzOFytOOqB +AGMUuTNe3QvboEUHGjMJ+E20pwKmafTCWQWIZYVWrkvL4N48fS0ayOn7H6NhStYq +E613TBoYm5EPWNgGVMWX+Ko/IIqmhaZ39qb8HOLubpQzKoNQhArlT4b4UEV4AIHr +W2jjJo3Me1xR9BQsQL4aYB16cmEdH2MtiKrOokWQCPxrvrNQKlr9qEgYRtaQQJKQ +CoReaDH46+0N0x3GfZkYVVYnZS6NRcUk7M7jAgMBAAGjQjBAMA8GA1UdEwEB/wQF +MAMBAf8wHQYDVR0OBBYEFApII6ZgpJIKM+qTW8VX6iVNvRLuMA4GA1UdDwEB/wQE +AwIBhjANBgkqhkiG9w0BAQsFAAOCAgEAPpBIqm5iFSVmewzVjIuJndftTgfvnNAU +X15QvWiWkKQUEapobQk1OUAJ2vQJLDSle1mESSmXdMgHHkdt8s4cUCbjnj1AUz/3 +f5Z2EMVGpdAgS1D0NTsY9FVqQRtHBmg8uwkIYtlfVUKqrFOFrJVWNlar5AWMxaja +H6NpvVMPxP/cyuN+8kyIhkdGGvMA9YCRotxDQpSbIPDRzbLrLFPCU3hKTwSUQZqP +JzLB5UkZv/HywouoCjkxKLR9YjYsTewfM7Z+d21+UPCfDtcRj88YxeMn/ibvBZ3P +zzfF0HvaO7AWhAw6k9a+F9sPPg4ZeAnHqQJyIkv3N3a6dcSFA1pj1bF1BcK5vZSt +jBWZp5N99sXzqnTPBIWUmAD04vnKJGW/4GKvyMX6ssmeVkjaef2WdhW+o45WxLM0 +/L5H9MG0qPzVMIho7suuyWPEdr6sOBjhXlzPrjoiUevRi7PzKzMHVIf6tLITe7pT +BGIBnfHAT+7hOtSLIBD6Alfm78ELt5BGnBkpjNxvoEppaZS3JGWg/6w/zgH7IS79 +aPib8qXPMThcFarmlwDB31qlpzmq6YR/PFGoOtmUW4y/Twhx5duoXNTSpv4Ao8YW +xw/ogM4cKGR0GQjTQuPOAF1/sdwTsOEFy9EgqoZ0njnnkf3/W9b3raYvAwtt41dU +63ZTGI0RmLo= +-----END CERTIFICATE----- + +# Issuer: CN=HARICA TLS ECC Root CA 2021 O=Hellenic Academic and Research Institutions CA +# Subject: CN=HARICA TLS ECC Root CA 2021 O=Hellenic Academic and Research Institutions CA +# Label: "HARICA TLS ECC Root CA 2021" +# Serial: 137515985548005187474074462014555733966 +# MD5 Fingerprint: ae:f7:4c:e5:66:35:d1:b7:9b:8c:22:93:74:d3:4b:b0 +# SHA1 Fingerprint: bc:b0:c1:9d:e9:98:92:70:19:38:57:e9:8d:a7:b4:5d:6e:ee:01:48 +# SHA256 Fingerprint: 3f:99:cc:47:4a:cf:ce:4d:fe:d5:87:94:66:5e:47:8d:15:47:73:9f:2e:78:0f:1b:b4:ca:9b:13:30:97:d4:01 +-----BEGIN CERTIFICATE----- +MIICVDCCAdugAwIBAgIQZ3SdjXfYO2rbIvT/WeK/zjAKBggqhkjOPQQDAzBsMQsw +CQYDVQQGEwJHUjE3MDUGA1UECgwuSGVsbGVuaWMgQWNhZGVtaWMgYW5kIFJlc2Vh +cmNoIEluc3RpdHV0aW9ucyBDQTEkMCIGA1UEAwwbSEFSSUNBIFRMUyBFQ0MgUm9v +dCBDQSAyMDIxMB4XDTIxMDIxOTExMDExMFoXDTQ1MDIxMzExMDEwOVowbDELMAkG +A1UEBhMCR1IxNzA1BgNVBAoMLkhlbGxlbmljIEFjYWRlbWljIGFuZCBSZXNlYXJj +aCBJbnN0aXR1dGlvbnMgQ0ExJDAiBgNVBAMMG0hBUklDQSBUTFMgRUNDIFJvb3Qg +Q0EgMjAyMTB2MBAGByqGSM49AgEGBSuBBAAiA2IABDgI/rGgltJ6rK9JOtDA4MM7 +KKrxcm1lAEeIhPyaJmuqS7psBAqIXhfyVYf8MLA04jRYVxqEU+kw2anylnTDUR9Y +STHMmE5gEYd103KUkE+bECUqqHgtvpBBWJAVcqeht6NCMEAwDwYDVR0TAQH/BAUw +AwEB/zAdBgNVHQ4EFgQUyRtTgRL+BNUW0aq8mm+3oJUZbsowDgYDVR0PAQH/BAQD +AgGGMAoGCCqGSM49BAMDA2cAMGQCMBHervjcToiwqfAircJRQO9gcS3ujwLEXQNw +SaSS6sUUiHCm0w2wqsosQJz76YJumgIwK0eaB8bRwoF8yguWGEEbo/QwCZ61IygN +nxS2PFOiTAZpffpskcYqSUXm7LcT4Tps +-----END CERTIFICATE----- + +# Issuer: CN=Autoridad de Certificacion Firmaprofesional CIF A62634068 +# Subject: CN=Autoridad de Certificacion Firmaprofesional CIF A62634068 +# Label: "Autoridad de Certificacion Firmaprofesional CIF A62634068" +# Serial: 1977337328857672817 +# MD5 Fingerprint: 4e:6e:9b:54:4c:ca:b7:fa:48:e4:90:b1:15:4b:1c:a3 +# SHA1 Fingerprint: 0b:be:c2:27:22:49:cb:39:aa:db:35:5c:53:e3:8c:ae:78:ff:b6:fe +# SHA256 Fingerprint: 57:de:05:83:ef:d2:b2:6e:03:61:da:99:da:9d:f4:64:8d:ef:7e:e8:44:1c:3b:72:8a:fa:9b:cd:e0:f9:b2:6a +-----BEGIN CERTIFICATE----- +MIIGFDCCA/ygAwIBAgIIG3Dp0v+ubHEwDQYJKoZIhvcNAQELBQAwUTELMAkGA1UE +BhMCRVMxQjBABgNVBAMMOUF1dG9yaWRhZCBkZSBDZXJ0aWZpY2FjaW9uIEZpcm1h +cHJvZmVzaW9uYWwgQ0lGIEE2MjYzNDA2ODAeFw0xNDA5MjMxNTIyMDdaFw0zNjA1 +MDUxNTIyMDdaMFExCzAJBgNVBAYTAkVTMUIwQAYDVQQDDDlBdXRvcmlkYWQgZGUg +Q2VydGlmaWNhY2lvbiBGaXJtYXByb2Zlc2lvbmFsIENJRiBBNjI2MzQwNjgwggIi +MA0GCSqGSIb3DQEBAQUAA4ICDwAwggIKAoICAQDKlmuO6vj78aI14H9M2uDDUtd9 +thDIAl6zQyrET2qyyhxdKJp4ERppWVevtSBC5IsP5t9bpgOSL/UR5GLXMnE42QQM +cas9UX4PB99jBVzpv5RvwSmCwLTaUbDBPLutN0pcyvFLNg4kq7/DhHf9qFD0sefG +L9ItWY16Ck6WaVICqjaY7Pz6FIMMNx/Jkjd/14Et5cS54D40/mf0PmbR0/RAz15i +NA9wBj4gGFrO93IbJWyTdBSTo3OxDqqHECNZXyAFGUftaI6SEspd/NYrspI8IM/h +X68gvqB2f3bl7BqGYTM+53u0P6APjqK5am+5hyZvQWyIplD9amML9ZMWGxmPsu2b +m8mQ9QEM3xk9Dz44I8kvjwzRAv4bVdZO0I08r0+k8/6vKtMFnXkIoctXMbScyJCy +Z/QYFpM6/EfY0XiWMR+6KwxfXZmtY4laJCB22N/9q06mIqqdXuYnin1oKaPnirja +EbsXLZmdEyRG98Xi2J+Of8ePdG1asuhy9azuJBCtLxTa/y2aRnFHvkLfuwHb9H/T +KI8xWVvTyQKmtFLKbpf7Q8UIJm+K9Lv9nyiqDdVF8xM6HdjAeI9BZzwelGSuewvF +6NkBiDkal4ZkQdU7hwxu+g/GvUgUvzlN1J5Bto+WHWOWk9mVBngxaJ43BjuAiUVh +OSPHG0SjFeUc+JIwuwIDAQABo4HvMIHsMB0GA1UdDgQWBBRlzeurNR4APn7VdMAc +tHNHDhpkLzASBgNVHRMBAf8ECDAGAQH/AgEBMIGmBgNVHSAEgZ4wgZswgZgGBFUd +IAAwgY8wLwYIKwYBBQUHAgEWI2h0dHA6Ly93d3cuZmlybWFwcm9mZXNpb25hbC5j +b20vY3BzMFwGCCsGAQUFBwICMFAeTgBQAGEAcwBlAG8AIABkAGUAIABsAGEAIABC +AG8AbgBhAG4AbwB2AGEAIAA0ADcAIABCAGEAcgBjAGUAbABvAG4AYQAgADAAOAAw +ADEANzAOBgNVHQ8BAf8EBAMCAQYwDQYJKoZIhvcNAQELBQADggIBAHSHKAIrdx9m +iWTtj3QuRhy7qPj4Cx2Dtjqn6EWKB7fgPiDL4QjbEwj4KKE1soCzC1HA01aajTNF +Sa9J8OA9B3pFE1r/yJfY0xgsfZb43aJlQ3CTkBW6kN/oGbDbLIpgD7dvlAceHabJ +hfa9NPhAeGIQcDq+fUs5gakQ1JZBu/hfHAsdCPKxsIl68veg4MSPi3i1O1ilI45P +Vf42O+AMt8oqMEEgtIDNrvx2ZnOorm7hfNoD6JQg5iKj0B+QXSBTFCZX2lSX3xZE +EAEeiGaPcjiT3SC3NL7X8e5jjkd5KAb881lFJWAiMxujX6i6KtoaPc1A6ozuBRWV +1aUsIC+nmCjuRfzxuIgALI9C2lHVnOUTaHFFQ4ueCyE8S1wF3BqfmI7avSKecs2t +CsvMo2ebKHTEm9caPARYpoKdrcd7b/+Alun4jWq9GJAd/0kakFI3ky88Al2CdgtR +5xbHV/g4+afNmyJU72OwFW1TZQNKXkqgsqeOSQBZONXH9IBk9W6VULgRfhVwOEqw +f9DEMnDAGf/JOC0ULGb0QkTmVXYbgBVX/8Cnp6o5qtjTcNAuuuuUavpfNIbnYrX9 +ivAwhZTJryQCL2/W3Wf+47BVTwSYT6RBVuKT0Gro1vP7ZeDOdcQxWQzugsgMYDNK +GbqEZycPvEJdvSRUDewdcAZfpLz6IHxV +-----END CERTIFICATE----- + +# Issuer: CN=vTrus ECC Root CA O=iTrusChina Co.,Ltd. +# Subject: CN=vTrus ECC Root CA O=iTrusChina Co.,Ltd. +# Label: "vTrus ECC Root CA" +# Serial: 630369271402956006249506845124680065938238527194 +# MD5 Fingerprint: de:4b:c1:f5:52:8c:9b:43:e1:3e:8f:55:54:17:8d:85 +# SHA1 Fingerprint: f6:9c:db:b0:fc:f6:02:13:b6:52:32:a6:a3:91:3f:16:70:da:c3:e1 +# SHA256 Fingerprint: 30:fb:ba:2c:32:23:8e:2a:98:54:7a:f9:79:31:e5:50:42:8b:9b:3f:1c:8e:eb:66:33:dc:fa:86:c5:b2:7d:d3 +-----BEGIN CERTIFICATE----- +MIICDzCCAZWgAwIBAgIUbmq8WapTvpg5Z6LSa6Q75m0c1towCgYIKoZIzj0EAwMw +RzELMAkGA1UEBhMCQ04xHDAaBgNVBAoTE2lUcnVzQ2hpbmEgQ28uLEx0ZC4xGjAY +BgNVBAMTEXZUcnVzIEVDQyBSb290IENBMB4XDTE4MDczMTA3MjY0NFoXDTQzMDcz +MTA3MjY0NFowRzELMAkGA1UEBhMCQ04xHDAaBgNVBAoTE2lUcnVzQ2hpbmEgQ28u +LEx0ZC4xGjAYBgNVBAMTEXZUcnVzIEVDQyBSb290IENBMHYwEAYHKoZIzj0CAQYF +K4EEACIDYgAEZVBKrox5lkqqHAjDo6LN/llWQXf9JpRCux3NCNtzslt188+cToL0 +v/hhJoVs1oVbcnDS/dtitN9Ti72xRFhiQgnH+n9bEOf+QP3A2MMrMudwpremIFUd +e4BdS49nTPEQo0IwQDAdBgNVHQ4EFgQUmDnNvtiyjPeyq+GtJK97fKHbH88wDwYD +VR0TAQH/BAUwAwEB/zAOBgNVHQ8BAf8EBAMCAQYwCgYIKoZIzj0EAwMDaAAwZQIw +V53dVvHH4+m4SVBrm2nDb+zDfSXkV5UTQJtS0zvzQBm8JsctBp61ezaf9SXUY2sA +AjEA6dPGnlaaKsyh2j/IZivTWJwghfqrkYpwcBE4YGQLYgmRWAD5Tfs0aNoJrSEG +GJTO +-----END CERTIFICATE----- + +# Issuer: CN=vTrus Root CA O=iTrusChina Co.,Ltd. +# Subject: CN=vTrus Root CA O=iTrusChina Co.,Ltd. +# Label: "vTrus Root CA" +# Serial: 387574501246983434957692974888460947164905180485 +# MD5 Fingerprint: b8:c9:37:df:fa:6b:31:84:64:c5:ea:11:6a:1b:75:fc +# SHA1 Fingerprint: 84:1a:69:fb:f5:cd:1a:25:34:13:3d:e3:f8:fc:b8:99:d0:c9:14:b7 +# SHA256 Fingerprint: 8a:71:de:65:59:33:6f:42:6c:26:e5:38:80:d0:0d:88:a1:8d:a4:c6:a9:1f:0d:cb:61:94:e2:06:c5:c9:63:87 +-----BEGIN CERTIFICATE----- +MIIFVjCCAz6gAwIBAgIUQ+NxE9izWRRdt86M/TX9b7wFjUUwDQYJKoZIhvcNAQEL +BQAwQzELMAkGA1UEBhMCQ04xHDAaBgNVBAoTE2lUcnVzQ2hpbmEgQ28uLEx0ZC4x +FjAUBgNVBAMTDXZUcnVzIFJvb3QgQ0EwHhcNMTgwNzMxMDcyNDA1WhcNNDMwNzMx +MDcyNDA1WjBDMQswCQYDVQQGEwJDTjEcMBoGA1UEChMTaVRydXNDaGluYSBDby4s +THRkLjEWMBQGA1UEAxMNdlRydXMgUm9vdCBDQTCCAiIwDQYJKoZIhvcNAQEBBQAD +ggIPADCCAgoCggIBAL1VfGHTuB0EYgWgrmy3cLRB6ksDXhA/kFocizuwZotsSKYc +IrrVQJLuM7IjWcmOvFjai57QGfIvWcaMY1q6n6MLsLOaXLoRuBLpDLvPbmyAhykU +AyyNJJrIZIO1aqwTLDPxn9wsYTwaP3BVm60AUn/PBLn+NvqcwBauYv6WTEN+VRS+ +GrPSbcKvdmaVayqwlHeFXgQPYh1jdfdr58tbmnDsPmcF8P4HCIDPKNsFxhQnL4Z9 +8Cfe/+Z+M0jnCx5Y0ScrUw5XSmXX+6KAYPxMvDVTAWqXcoKv8R1w6Jz1717CbMdH +flqUhSZNO7rrTOiwCcJlwp2dCZtOtZcFrPUGoPc2BX70kLJrxLT5ZOrpGgrIDajt +J8nU57O5q4IikCc9Kuh8kO+8T/3iCiSn3mUkpF3qwHYw03dQ+A0Em5Q2AXPKBlim +0zvc+gRGE1WKyURHuFE5Gi7oNOJ5y1lKCn+8pu8fA2dqWSslYpPZUxlmPCdiKYZN +pGvu/9ROutW04o5IWgAZCfEF2c6Rsffr6TlP9m8EQ5pV9T4FFL2/s1m02I4zhKOQ +UqqzApVg+QxMaPnu1RcN+HFXtSXkKe5lXa/R7jwXC1pDxaWG6iSe4gUH3DRCEpHW +OXSuTEGC2/KmSNGzm/MzqvOmwMVO9fSddmPmAsYiS8GVP1BkLFTltvA8Kc9XAgMB +AAGjQjBAMB0GA1UdDgQWBBRUYnBj8XWEQ1iO0RYgscasGrz2iTAPBgNVHRMBAf8E +BTADAQH/MA4GA1UdDwEB/wQEAwIBBjANBgkqhkiG9w0BAQsFAAOCAgEAKbqSSaet +8PFww+SX8J+pJdVrnjT+5hpk9jprUrIQeBqfTNqK2uwcN1LgQkv7bHbKJAs5EhWd +nxEt/Hlk3ODg9d3gV8mlsnZwUKT+twpw1aA08XXXTUm6EdGz2OyC/+sOxL9kLX1j +bhd47F18iMjrjld22VkE+rxSH0Ws8HqA7Oxvdq6R2xCOBNyS36D25q5J08FsEhvM +Kar5CKXiNxTKsbhm7xqC5PD48acWabfbqWE8n/Uxy+QARsIvdLGx14HuqCaVvIiv +TDUHKgLKeBRtRytAVunLKmChZwOgzoy8sHJnxDHO2zTlJQNgJXtxmOTAGytfdELS +S8VZCAeHvsXDf+eW2eHcKJfWjwXj9ZtOyh1QRwVTsMo554WgicEFOwE30z9J4nfr +I8iIZjs9OXYhRvHsXyO466JmdXTBQPfYaJqT4i2pLr0cox7IdMakLXogqzu4sEb9 +b91fUlV1YvCXoHzXOP0l382gmxDPi7g4Xl7FtKYCNqEeXxzP4padKar9mK5S4fNB +UvupLnKWnyfjqnN9+BojZns7q2WwMgFLFT49ok8MKzWixtlnEjUwzXYuFrOZnk1P +Ti07NEPhmg4NpGaXutIcSkwsKouLgU9xGqndXHt7CMUADTdA43x7VF8vhV929ven +sBxXVsFy6K2ir40zSbofitzmdHxghm+Hl3s= +-----END CERTIFICATE----- + +# Issuer: CN=ISRG Root X2 O=Internet Security Research Group +# Subject: CN=ISRG Root X2 O=Internet Security Research Group +# Label: "ISRG Root X2" +# Serial: 87493402998870891108772069816698636114 +# MD5 Fingerprint: d3:9e:c4:1e:23:3c:a6:df:cf:a3:7e:6d:e0:14:e6:e5 +# SHA1 Fingerprint: bd:b1:b9:3c:d5:97:8d:45:c6:26:14:55:f8:db:95:c7:5a:d1:53:af +# SHA256 Fingerprint: 69:72:9b:8e:15:a8:6e:fc:17:7a:57:af:b7:17:1d:fc:64:ad:d2:8c:2f:ca:8c:f1:50:7e:34:45:3c:cb:14:70 +-----BEGIN CERTIFICATE----- +MIICGzCCAaGgAwIBAgIQQdKd0XLq7qeAwSxs6S+HUjAKBggqhkjOPQQDAzBPMQsw +CQYDVQQGEwJVUzEpMCcGA1UEChMgSW50ZXJuZXQgU2VjdXJpdHkgUmVzZWFyY2gg +R3JvdXAxFTATBgNVBAMTDElTUkcgUm9vdCBYMjAeFw0yMDA5MDQwMDAwMDBaFw00 +MDA5MTcxNjAwMDBaME8xCzAJBgNVBAYTAlVTMSkwJwYDVQQKEyBJbnRlcm5ldCBT +ZWN1cml0eSBSZXNlYXJjaCBHcm91cDEVMBMGA1UEAxMMSVNSRyBSb290IFgyMHYw +EAYHKoZIzj0CAQYFK4EEACIDYgAEzZvVn4CDCuwJSvMWSj5cz3es3mcFDR0HttwW ++1qLFNvicWDEukWVEYmO6gbf9yoWHKS5xcUy4APgHoIYOIvXRdgKam7mAHf7AlF9 +ItgKbppbd9/w+kHsOdx1ymgHDB/qo0IwQDAOBgNVHQ8BAf8EBAMCAQYwDwYDVR0T +AQH/BAUwAwEB/zAdBgNVHQ4EFgQUfEKWrt5LSDv6kviejM9ti6lyN5UwCgYIKoZI +zj0EAwMDaAAwZQIwe3lORlCEwkSHRhtFcP9Ymd70/aTSVaYgLXTWNLxBo1BfASdW +tL4ndQavEi51mI38AjEAi/V3bNTIZargCyzuFJ0nN6T5U6VR5CmD1/iQMVtCnwr1 +/q4AaOeMSQ+2b1tbFfLn +-----END CERTIFICATE----- + +# Issuer: CN=HiPKI Root CA - G1 O=Chunghwa Telecom Co., Ltd. +# Subject: CN=HiPKI Root CA - G1 O=Chunghwa Telecom Co., Ltd. +# Label: "HiPKI Root CA - G1" +# Serial: 60966262342023497858655262305426234976 +# MD5 Fingerprint: 69:45:df:16:65:4b:e8:68:9a:8f:76:5f:ff:80:9e:d3 +# SHA1 Fingerprint: 6a:92:e4:a8:ee:1b:ec:96:45:37:e3:29:57:49:cd:96:e3:e5:d2:60 +# SHA256 Fingerprint: f0:15:ce:3c:c2:39:bf:ef:06:4b:e9:f1:d2:c4:17:e1:a0:26:4a:0a:94:be:1f:0c:8d:12:18:64:eb:69:49:cc +-----BEGIN CERTIFICATE----- +MIIFajCCA1KgAwIBAgIQLd2szmKXlKFD6LDNdmpeYDANBgkqhkiG9w0BAQsFADBP +MQswCQYDVQQGEwJUVzEjMCEGA1UECgwaQ2h1bmdod2EgVGVsZWNvbSBDby4sIEx0 +ZC4xGzAZBgNVBAMMEkhpUEtJIFJvb3QgQ0EgLSBHMTAeFw0xOTAyMjIwOTQ2MDRa +Fw0zNzEyMzExNTU5NTlaME8xCzAJBgNVBAYTAlRXMSMwIQYDVQQKDBpDaHVuZ2h3 +YSBUZWxlY29tIENvLiwgTHRkLjEbMBkGA1UEAwwSSGlQS0kgUm9vdCBDQSAtIEcx +MIICIjANBgkqhkiG9w0BAQEFAAOCAg8AMIICCgKCAgEA9B5/UnMyDHPkvRN0o9Qw +qNCuS9i233VHZvR85zkEHmpwINJaR3JnVfSl6J3VHiGh8Ge6zCFovkRTv4354twv +Vcg3Px+kwJyz5HdcoEb+d/oaoDjq7Zpy3iu9lFc6uux55199QmQ5eiY29yTw1S+6 +lZgRZq2XNdZ1AYDgr/SEYYwNHl98h5ZeQa/rh+r4XfEuiAU+TCK72h8q3VJGZDnz +Qs7ZngyzsHeXZJzA9KMuH5UHsBffMNsAGJZMoYFL3QRtU6M9/Aes1MU3guvklQgZ +KILSQjqj2FPseYlgSGDIcpJQ3AOPgz+yQlda22rpEZfdhSi8MEyr48KxRURHH+CK +FgeW0iEPU8DtqX7UTuybCeyvQqww1r/REEXgphaypcXTT3OUM3ECoWqj1jOXTyFj +HluP2cFeRXF3D4FdXyGarYPM+l7WjSNfGz1BryB1ZlpK9p/7qxj3ccC2HTHsOyDr +y+K49a6SsvfhhEvyovKTmiKe0xRvNlS9H15ZFblzqMF8b3ti6RZsR1pl8w4Rm0bZ +/W3c1pzAtH2lsN0/Vm+h+fbkEkj9Bn8SV7apI09bA8PgcSojt/ewsTu8mL3WmKgM +a/aOEmem8rJY5AIJEzypuxC00jBF8ez3ABHfZfjcK0NVvxaXxA/VLGGEqnKG/uY6 +fsI/fe78LxQ+5oXdUG+3Se0CAwEAAaNCMEAwDwYDVR0TAQH/BAUwAwEB/zAdBgNV +HQ4EFgQU8ncX+l6o/vY9cdVouslGDDjYr7AwDgYDVR0PAQH/BAQDAgGGMA0GCSqG +SIb3DQEBCwUAA4ICAQBQUfB13HAE4/+qddRxosuej6ip0691x1TPOhwEmSKsxBHi +7zNKpiMdDg1H2DfHb680f0+BazVP6XKlMeJ45/dOlBhbQH3PayFUhuaVevvGyuqc +SE5XCV0vrPSltJczWNWseanMX/mF+lLFjfiRFOs6DRfQUsJ748JzjkZ4Bjgs6Fza +ZsT0pPBWGTMpWmWSBUdGSquEwx4noR8RkpkndZMPvDY7l1ePJlsMu5wP1G4wB9Tc +XzZoZjmDlicmisjEOf6aIW/Vcobpf2Lll07QJNBAsNB1CI69aO4I1258EHBGG3zg +iLKecoaZAeO/n0kZtCW+VmWuF2PlHt/o/0elv+EmBYTksMCv5wiZqAxeJoBF1Pho +L5aPruJKHJwWDBNvOIf2u8g0X5IDUXlwpt/L9ZlNec1OvFefQ05rLisY+GpzjLrF +Ne85akEez3GoorKGB1s6yeHvP2UEgEcyRHCVTjFnanRbEEV16rCf0OY1/k6fi8wr +kkVbbiVghUbN0aqwdmaTd5a+g744tiROJgvM7XpWGuDpWsZkrUx6AEhEL7lAuxM+ +vhV4nYWBSipX3tUZQ9rbyltHhoMLP7YNdnhzeSJesYAfz77RP1YQmCuVh6EfnWQU +YDksswBVLuT1sw5XxJFBAJw/6KXf6vb/yPCtbVKoF6ubYfwSUTXkJf2vqmqGOQ== +-----END CERTIFICATE----- + +# Issuer: CN=GlobalSign O=GlobalSign OU=GlobalSign ECC Root CA - R4 +# Subject: CN=GlobalSign O=GlobalSign OU=GlobalSign ECC Root CA - R4 +# Label: "GlobalSign ECC Root CA - R4" +# Serial: 159662223612894884239637590694 +# MD5 Fingerprint: 26:29:f8:6d:e1:88:bf:a2:65:7f:aa:c4:cd:0f:7f:fc +# SHA1 Fingerprint: 6b:a0:b0:98:e1:71:ef:5a:ad:fe:48:15:80:77:10:f4:bd:6f:0b:28 +# SHA256 Fingerprint: b0:85:d7:0b:96:4f:19:1a:73:e4:af:0d:54:ae:7a:0e:07:aa:fd:af:9b:71:dd:08:62:13:8a:b7:32:5a:24:a2 +-----BEGIN CERTIFICATE----- +MIIB3DCCAYOgAwIBAgINAgPlfvU/k/2lCSGypjAKBggqhkjOPQQDAjBQMSQwIgYD +VQQLExtHbG9iYWxTaWduIEVDQyBSb290IENBIC0gUjQxEzARBgNVBAoTCkdsb2Jh +bFNpZ24xEzARBgNVBAMTCkdsb2JhbFNpZ24wHhcNMTIxMTEzMDAwMDAwWhcNMzgw +MTE5MDMxNDA3WjBQMSQwIgYDVQQLExtHbG9iYWxTaWduIEVDQyBSb290IENBIC0g +UjQxEzARBgNVBAoTCkdsb2JhbFNpZ24xEzARBgNVBAMTCkdsb2JhbFNpZ24wWTAT +BgcqhkjOPQIBBggqhkjOPQMBBwNCAAS4xnnTj2wlDp8uORkcA6SumuU5BwkWymOx +uYb4ilfBV85C+nOh92VC/x7BALJucw7/xyHlGKSq2XE/qNS5zowdo0IwQDAOBgNV +HQ8BAf8EBAMCAYYwDwYDVR0TAQH/BAUwAwEB/zAdBgNVHQ4EFgQUVLB7rUW44kB/ ++wpu+74zyTyjhNUwCgYIKoZIzj0EAwIDRwAwRAIgIk90crlgr/HmnKAWBVBfw147 +bmF0774BxL4YSFlhgjICICadVGNA3jdgUM/I2O2dgq43mLyjj0xMqTQrbO/7lZsm +-----END CERTIFICATE----- + +# Issuer: CN=GTS Root R1 O=Google Trust Services LLC +# Subject: CN=GTS Root R1 O=Google Trust Services LLC +# Label: "GTS Root R1" +# Serial: 159662320309726417404178440727 +# MD5 Fingerprint: 05:fe:d0:bf:71:a8:a3:76:63:da:01:e0:d8:52:dc:40 +# SHA1 Fingerprint: e5:8c:1c:c4:91:3b:38:63:4b:e9:10:6e:e3:ad:8e:6b:9d:d9:81:4a +# SHA256 Fingerprint: d9:47:43:2a:bd:e7:b7:fa:90:fc:2e:6b:59:10:1b:12:80:e0:e1:c7:e4:e4:0f:a3:c6:88:7f:ff:57:a7:f4:cf +-----BEGIN CERTIFICATE----- +MIIFVzCCAz+gAwIBAgINAgPlk28xsBNJiGuiFzANBgkqhkiG9w0BAQwFADBHMQsw +CQYDVQQGEwJVUzEiMCAGA1UEChMZR29vZ2xlIFRydXN0IFNlcnZpY2VzIExMQzEU +MBIGA1UEAxMLR1RTIFJvb3QgUjEwHhcNMTYwNjIyMDAwMDAwWhcNMzYwNjIyMDAw +MDAwWjBHMQswCQYDVQQGEwJVUzEiMCAGA1UEChMZR29vZ2xlIFRydXN0IFNlcnZp +Y2VzIExMQzEUMBIGA1UEAxMLR1RTIFJvb3QgUjEwggIiMA0GCSqGSIb3DQEBAQUA +A4ICDwAwggIKAoICAQC2EQKLHuOhd5s73L+UPreVp0A8of2C+X0yBoJx9vaMf/vo +27xqLpeXo4xL+Sv2sfnOhB2x+cWX3u+58qPpvBKJXqeqUqv4IyfLpLGcY9vXmX7w +Cl7raKb0xlpHDU0QM+NOsROjyBhsS+z8CZDfnWQpJSMHobTSPS5g4M/SCYe7zUjw +TcLCeoiKu7rPWRnWr4+wB7CeMfGCwcDfLqZtbBkOtdh+JhpFAz2weaSUKK0Pfybl +qAj+lug8aJRT7oM6iCsVlgmy4HqMLnXWnOunVmSPlk9orj2XwoSPwLxAwAtcvfaH +szVsrBhQf4TgTM2S0yDpM7xSma8ytSmzJSq0SPly4cpk9+aCEI3oncKKiPo4Zor8 +Y/kB+Xj9e1x3+naH+uzfsQ55lVe0vSbv1gHR6xYKu44LtcXFilWr06zqkUspzBmk +MiVOKvFlRNACzqrOSbTqn3yDsEB750Orp2yjj32JgfpMpf/VjsPOS+C12LOORc92 +wO1AK/1TD7Cn1TsNsYqiA94xrcx36m97PtbfkSIS5r762DL8EGMUUXLeXdYWk70p +aDPvOmbsB4om3xPXV2V4J95eSRQAogB/mqghtqmxlbCluQ0WEdrHbEg8QOB+DVrN +VjzRlwW5y0vtOUucxD/SVRNuJLDWcfr0wbrM7Rv1/oFB2ACYPTrIrnqYNxgFlQID +AQABo0IwQDAOBgNVHQ8BAf8EBAMCAYYwDwYDVR0TAQH/BAUwAwEB/zAdBgNVHQ4E +FgQU5K8rJnEaK0gnhS9SZizv8IkTcT4wDQYJKoZIhvcNAQEMBQADggIBAJ+qQibb +C5u+/x6Wki4+omVKapi6Ist9wTrYggoGxval3sBOh2Z5ofmmWJyq+bXmYOfg6LEe +QkEzCzc9zolwFcq1JKjPa7XSQCGYzyI0zzvFIoTgxQ6KfF2I5DUkzps+GlQebtuy +h6f88/qBVRRiClmpIgUxPoLW7ttXNLwzldMXG+gnoot7TiYaelpkttGsN/H9oPM4 +7HLwEXWdyzRSjeZ2axfG34arJ45JK3VmgRAhpuo+9K4l/3wV3s6MJT/KYnAK9y8J +ZgfIPxz88NtFMN9iiMG1D53Dn0reWVlHxYciNuaCp+0KueIHoI17eko8cdLiA6Ef +MgfdG+RCzgwARWGAtQsgWSl4vflVy2PFPEz0tv/bal8xa5meLMFrUKTX5hgUvYU/ +Z6tGn6D/Qqc6f1zLXbBwHSs09dR2CQzreExZBfMzQsNhFRAbd03OIozUhfJFfbdT +6u9AWpQKXCBfTkBdYiJ23//OYb2MI3jSNwLgjt7RETeJ9r/tSQdirpLsQBqvFAnZ +0E6yove+7u7Y/9waLd64NnHi/Hm3lCXRSHNboTXns5lndcEZOitHTtNCjv0xyBZm +2tIMPNuzjsmhDYAPexZ3FL//2wmUspO8IFgV6dtxQ/PeEMMA3KgqlbbC1j+Qa3bb +bP6MvPJwNQzcmRk13NfIRmPVNnGuV/u3gm3c +-----END CERTIFICATE----- + +# Issuer: CN=GTS Root R2 O=Google Trust Services LLC +# Subject: CN=GTS Root R2 O=Google Trust Services LLC +# Label: "GTS Root R2" +# Serial: 159662449406622349769042896298 +# MD5 Fingerprint: 1e:39:c0:53:e6:1e:29:82:0b:ca:52:55:36:5d:57:dc +# SHA1 Fingerprint: 9a:44:49:76:32:db:de:fa:d0:bc:fb:5a:7b:17:bd:9e:56:09:24:94 +# SHA256 Fingerprint: 8d:25:cd:97:22:9d:bf:70:35:6b:da:4e:b3:cc:73:40:31:e2:4c:f0:0f:af:cf:d3:2d:c7:6e:b5:84:1c:7e:a8 +-----BEGIN CERTIFICATE----- +MIIFVzCCAz+gAwIBAgINAgPlrsWNBCUaqxElqjANBgkqhkiG9w0BAQwFADBHMQsw +CQYDVQQGEwJVUzEiMCAGA1UEChMZR29vZ2xlIFRydXN0IFNlcnZpY2VzIExMQzEU +MBIGA1UEAxMLR1RTIFJvb3QgUjIwHhcNMTYwNjIyMDAwMDAwWhcNMzYwNjIyMDAw +MDAwWjBHMQswCQYDVQQGEwJVUzEiMCAGA1UEChMZR29vZ2xlIFRydXN0IFNlcnZp +Y2VzIExMQzEUMBIGA1UEAxMLR1RTIFJvb3QgUjIwggIiMA0GCSqGSIb3DQEBAQUA +A4ICDwAwggIKAoICAQDO3v2m++zsFDQ8BwZabFn3GTXd98GdVarTzTukk3LvCvpt +nfbwhYBboUhSnznFt+4orO/LdmgUud+tAWyZH8QiHZ/+cnfgLFuv5AS/T3KgGjSY +6Dlo7JUle3ah5mm5hRm9iYz+re026nO8/4Piy33B0s5Ks40FnotJk9/BW9BuXvAu +MC6C/Pq8tBcKSOWIm8Wba96wyrQD8Nr0kLhlZPdcTK3ofmZemde4wj7I0BOdre7k +RXuJVfeKH2JShBKzwkCX44ofR5GmdFrS+LFjKBC4swm4VndAoiaYecb+3yXuPuWg +f9RhD1FLPD+M2uFwdNjCaKH5wQzpoeJ/u1U8dgbuak7MkogwTZq9TwtImoS1mKPV ++3PBV2HdKFZ1E66HjucMUQkQdYhMvI35ezzUIkgfKtzra7tEscszcTJGr61K8Yzo +dDqs5xoic4DSMPclQsciOzsSrZYuxsN2B6ogtzVJV+mSSeh2FnIxZyuWfoqjx5RW +Ir9qS34BIbIjMt/kmkRtWVtd9QCgHJvGeJeNkP+byKq0rxFROV7Z+2et1VsRnTKa +G73VululycslaVNVJ1zgyjbLiGH7HrfQy+4W+9OmTN6SpdTi3/UGVN4unUu0kzCq +gc7dGtxRcw1PcOnlthYhGXmy5okLdWTK1au8CcEYof/UVKGFPP0UJAOyh9OktwID +AQABo0IwQDAOBgNVHQ8BAf8EBAMCAYYwDwYDVR0TAQH/BAUwAwEB/zAdBgNVHQ4E +FgQUu//KjiOfT5nK2+JopqUVJxce2Q4wDQYJKoZIhvcNAQEMBQADggIBAB/Kzt3H +vqGf2SdMC9wXmBFqiN495nFWcrKeGk6c1SuYJF2ba3uwM4IJvd8lRuqYnrYb/oM8 +0mJhwQTtzuDFycgTE1XnqGOtjHsB/ncw4c5omwX4Eu55MaBBRTUoCnGkJE+M3DyC +B19m3H0Q/gxhswWV7uGugQ+o+MePTagjAiZrHYNSVc61LwDKgEDg4XSsYPWHgJ2u +NmSRXbBoGOqKYcl3qJfEycel/FVL8/B/uWU9J2jQzGv6U53hkRrJXRqWbTKH7QMg +yALOWr7Z6v2yTcQvG99fevX4i8buMTolUVVnjWQye+mew4K6Ki3pHrTgSAai/Gev +HyICc/sgCq+dVEuhzf9gR7A/Xe8bVr2XIZYtCtFenTgCR2y59PYjJbigapordwj6 +xLEokCZYCDzifqrXPW+6MYgKBesntaFJ7qBFVHvmJ2WZICGoo7z7GJa7Um8M7YNR +TOlZ4iBgxcJlkoKM8xAfDoqXvneCbT+PHV28SSe9zE8P4c52hgQjxcCMElv924Sg +JPFI/2R80L5cFtHvma3AH/vLrrw4IgYmZNralw4/KBVEqE8AyvCazM90arQ+POuV +7LXTWtiBmelDGDfrs7vRWGJB82bSj6p4lVQgw1oudCvV0b4YacCs1aTPObpRhANl +6WLAYv7YTVWW4tAR+kg0Eeye7QUd5MjWHYbL +-----END CERTIFICATE----- + +# Issuer: CN=GTS Root R3 O=Google Trust Services LLC +# Subject: CN=GTS Root R3 O=Google Trust Services LLC +# Label: "GTS Root R3" +# Serial: 159662495401136852707857743206 +# MD5 Fingerprint: 3e:e7:9d:58:02:94:46:51:94:e5:e0:22:4a:8b:e7:73 +# SHA1 Fingerprint: ed:e5:71:80:2b:c8:92:b9:5b:83:3c:d2:32:68:3f:09:cd:a0:1e:46 +# SHA256 Fingerprint: 34:d8:a7:3e:e2:08:d9:bc:db:0d:95:65:20:93:4b:4e:40:e6:94:82:59:6e:8b:6f:73:c8:42:6b:01:0a:6f:48 +-----BEGIN CERTIFICATE----- +MIICCTCCAY6gAwIBAgINAgPluILrIPglJ209ZjAKBggqhkjOPQQDAzBHMQswCQYD +VQQGEwJVUzEiMCAGA1UEChMZR29vZ2xlIFRydXN0IFNlcnZpY2VzIExMQzEUMBIG +A1UEAxMLR1RTIFJvb3QgUjMwHhcNMTYwNjIyMDAwMDAwWhcNMzYwNjIyMDAwMDAw +WjBHMQswCQYDVQQGEwJVUzEiMCAGA1UEChMZR29vZ2xlIFRydXN0IFNlcnZpY2Vz +IExMQzEUMBIGA1UEAxMLR1RTIFJvb3QgUjMwdjAQBgcqhkjOPQIBBgUrgQQAIgNi +AAQfTzOHMymKoYTey8chWEGJ6ladK0uFxh1MJ7x/JlFyb+Kf1qPKzEUURout736G +jOyxfi//qXGdGIRFBEFVbivqJn+7kAHjSxm65FSWRQmx1WyRRK2EE46ajA2ADDL2 +4CejQjBAMA4GA1UdDwEB/wQEAwIBhjAPBgNVHRMBAf8EBTADAQH/MB0GA1UdDgQW +BBTB8Sa6oC2uhYHP0/EqEr24Cmf9vDAKBggqhkjOPQQDAwNpADBmAjEA9uEglRR7 +VKOQFhG/hMjqb2sXnh5GmCCbn9MN2azTL818+FsuVbu/3ZL3pAzcMeGiAjEA/Jdm +ZuVDFhOD3cffL74UOO0BzrEXGhF16b0DjyZ+hOXJYKaV11RZt+cRLInUue4X +-----END CERTIFICATE----- + +# Issuer: CN=GTS Root R4 O=Google Trust Services LLC +# Subject: CN=GTS Root R4 O=Google Trust Services LLC +# Label: "GTS Root R4" +# Serial: 159662532700760215368942768210 +# MD5 Fingerprint: 43:96:83:77:19:4d:76:b3:9d:65:52:e4:1d:22:a5:e8 +# SHA1 Fingerprint: 77:d3:03:67:b5:e0:0c:15:f6:0c:38:61:df:7c:e1:3b:92:46:4d:47 +# SHA256 Fingerprint: 34:9d:fa:40:58:c5:e2:63:12:3b:39:8a:e7:95:57:3c:4e:13:13:c8:3f:e6:8f:93:55:6c:d5:e8:03:1b:3c:7d +-----BEGIN CERTIFICATE----- +MIICCTCCAY6gAwIBAgINAgPlwGjvYxqccpBQUjAKBggqhkjOPQQDAzBHMQswCQYD +VQQGEwJVUzEiMCAGA1UEChMZR29vZ2xlIFRydXN0IFNlcnZpY2VzIExMQzEUMBIG +A1UEAxMLR1RTIFJvb3QgUjQwHhcNMTYwNjIyMDAwMDAwWhcNMzYwNjIyMDAwMDAw +WjBHMQswCQYDVQQGEwJVUzEiMCAGA1UEChMZR29vZ2xlIFRydXN0IFNlcnZpY2Vz +IExMQzEUMBIGA1UEAxMLR1RTIFJvb3QgUjQwdjAQBgcqhkjOPQIBBgUrgQQAIgNi +AATzdHOnaItgrkO4NcWBMHtLSZ37wWHO5t5GvWvVYRg1rkDdc/eJkTBa6zzuhXyi +QHY7qca4R9gq55KRanPpsXI5nymfopjTX15YhmUPoYRlBtHci8nHc8iMai/lxKvR +HYqjQjBAMA4GA1UdDwEB/wQEAwIBhjAPBgNVHRMBAf8EBTADAQH/MB0GA1UdDgQW +BBSATNbrdP9JNqPV2Py1PsVq8JQdjDAKBggqhkjOPQQDAwNpADBmAjEA6ED/g94D +9J+uHXqnLrmvT/aDHQ4thQEd0dlq7A/Cr8deVl5c1RxYIigL9zC2L7F8AjEA8GE8 +p/SgguMh1YQdc4acLa/KNJvxn7kjNuK8YAOdgLOaVsjh4rsUecrNIdSUtUlD +-----END CERTIFICATE----- + +# Issuer: CN=Telia Root CA v2 O=Telia Finland Oyj +# Subject: CN=Telia Root CA v2 O=Telia Finland Oyj +# Label: "Telia Root CA v2" +# Serial: 7288924052977061235122729490515358 +# MD5 Fingerprint: 0e:8f:ac:aa:82:df:85:b1:f4:dc:10:1c:fc:99:d9:48 +# SHA1 Fingerprint: b9:99:cd:d1:73:50:8a:c4:47:05:08:9c:8c:88:fb:be:a0:2b:40:cd +# SHA256 Fingerprint: 24:2b:69:74:2f:cb:1e:5b:2a:bf:98:89:8b:94:57:21:87:54:4e:5b:4d:99:11:78:65:73:62:1f:6a:74:b8:2c +-----BEGIN CERTIFICATE----- +MIIFdDCCA1ygAwIBAgIPAWdfJ9b+euPkrL4JWwWeMA0GCSqGSIb3DQEBCwUAMEQx +CzAJBgNVBAYTAkZJMRowGAYDVQQKDBFUZWxpYSBGaW5sYW5kIE95ajEZMBcGA1UE +AwwQVGVsaWEgUm9vdCBDQSB2MjAeFw0xODExMjkxMTU1NTRaFw00MzExMjkxMTU1 +NTRaMEQxCzAJBgNVBAYTAkZJMRowGAYDVQQKDBFUZWxpYSBGaW5sYW5kIE95ajEZ +MBcGA1UEAwwQVGVsaWEgUm9vdCBDQSB2MjCCAiIwDQYJKoZIhvcNAQEBBQADggIP +ADCCAgoCggIBALLQPwe84nvQa5n44ndp586dpAO8gm2h/oFlH0wnrI4AuhZ76zBq +AMCzdGh+sq/H1WKzej9Qyow2RCRj0jbpDIX2Q3bVTKFgcmfiKDOlyzG4OiIjNLh9 +vVYiQJ3q9HsDrWj8soFPmNB06o3lfc1jw6P23pLCWBnglrvFxKk9pXSW/q/5iaq9 +lRdU2HhE8Qx3FZLgmEKnpNaqIJLNwaCzlrI6hEKNfdWV5Nbb6WLEWLN5xYzTNTOD +n3WhUidhOPFZPY5Q4L15POdslv5e2QJltI5c0BE0312/UqeBAMN/mUWZFdUXyApT +7GPzmX3MaRKGwhfwAZ6/hLzRUssbkmbOpFPlob/E2wnW5olWK8jjfN7j/4nlNW4o +6GwLI1GpJQXrSPjdscr6bAhR77cYbETKJuFzxokGgeWKrLDiKca5JLNrRBH0pUPC +TEPlcDaMtjNXepUugqD0XBCzYYP2AgWGLnwtbNwDRm41k9V6lS/eINhbfpSQBGq6 +WT0EBXWdN6IOLj3rwaRSg/7Qa9RmjtzG6RJOHSpXqhC8fF6CfaamyfItufUXJ63R +DolUK5X6wK0dmBR4M0KGCqlztft0DbcbMBnEWg4cJ7faGND/isgFuvGqHKI3t+ZI +pEYslOqodmJHixBTB0hXbOKSTbauBcvcwUpej6w9GU7C7WB1K9vBykLVAgMBAAGj +YzBhMB8GA1UdIwQYMBaAFHKs5DN5qkWH9v2sHZ7Wxy+G2CQ5MB0GA1UdDgQWBBRy +rOQzeapFh/b9rB2e1scvhtgkOTAOBgNVHQ8BAf8EBAMCAQYwDwYDVR0TAQH/BAUw +AwEB/zANBgkqhkiG9w0BAQsFAAOCAgEAoDtZpwmUPjaE0n4vOaWWl/oRrfxn83EJ +8rKJhGdEr7nv7ZbsnGTbMjBvZ5qsfl+yqwE2foH65IRe0qw24GtixX1LDoJt0nZi +0f6X+J8wfBj5tFJ3gh1229MdqfDBmgC9bXXYfef6xzijnHDoRnkDry5023X4blMM +A8iZGok1GTzTyVR8qPAs5m4HeW9q4ebqkYJpCh3DflminmtGFZhb069GHWLIzoBS +SRE/yQQSwxN8PzuKlts8oB4KtItUsiRnDe+Cy748fdHif64W1lZYudogsYMVoe+K +TTJvQS8TUoKU1xrBeKJR3Stwbbca+few4GeXVtt8YVMJAygCQMez2P2ccGrGKMOF +6eLtGpOg3kuYooQ+BXcBlj37tCAPnHICehIv1aO6UXivKitEZU61/Qrowc15h2Er +3oBXRb9n8ZuRXqWk7FlIEA04x7D6w0RtBPV4UBySllva9bguulvP5fBqnUsvWHMt +Ty3EHD70sz+rFQ47GUGKpMFXEmZxTPpT41frYpUJnlTd0cI8Vzy9OK2YZLe4A5pT +VmBds9hCG1xLEooc6+t9xnppxyd/pPiL8uSUZodL6ZQHCRJ5irLrdATczvREWeAW +ysUsWNc8e89ihmpQfTU2Zqf7N+cox9jQraVplI/owd8k+BsHMYeB2F326CjYSlKA +rBPuUBQemMc= +-----END CERTIFICATE----- + +# Issuer: CN=D-TRUST BR Root CA 1 2020 O=D-Trust GmbH +# Subject: CN=D-TRUST BR Root CA 1 2020 O=D-Trust GmbH +# Label: "D-TRUST BR Root CA 1 2020" +# Serial: 165870826978392376648679885835942448534 +# MD5 Fingerprint: b5:aa:4b:d5:ed:f7:e3:55:2e:8f:72:0a:f3:75:b8:ed +# SHA1 Fingerprint: 1f:5b:98:f0:e3:b5:f7:74:3c:ed:e6:b0:36:7d:32:cd:f4:09:41:67 +# SHA256 Fingerprint: e5:9a:aa:81:60:09:c2:2b:ff:5b:25:ba:d3:7d:f3:06:f0:49:79:7c:1f:81:d8:5a:b0:89:e6:57:bd:8f:00:44 +-----BEGIN CERTIFICATE----- +MIIC2zCCAmCgAwIBAgIQfMmPK4TX3+oPyWWa00tNljAKBggqhkjOPQQDAzBIMQsw +CQYDVQQGEwJERTEVMBMGA1UEChMMRC1UcnVzdCBHbWJIMSIwIAYDVQQDExlELVRS +VVNUIEJSIFJvb3QgQ0EgMSAyMDIwMB4XDTIwMDIxMTA5NDUwMFoXDTM1MDIxMTA5 +NDQ1OVowSDELMAkGA1UEBhMCREUxFTATBgNVBAoTDEQtVHJ1c3QgR21iSDEiMCAG +A1UEAxMZRC1UUlVTVCBCUiBSb290IENBIDEgMjAyMDB2MBAGByqGSM49AgEGBSuB +BAAiA2IABMbLxyjR+4T1mu9CFCDhQ2tuda38KwOE1HaTJddZO0Flax7mNCq7dPYS +zuht56vkPE4/RAiLzRZxy7+SmfSk1zxQVFKQhYN4lGdnoxwJGT11NIXe7WB9xwy0 +QVK5buXuQqOCAQ0wggEJMA8GA1UdEwEB/wQFMAMBAf8wHQYDVR0OBBYEFHOREKv/ +VbNafAkl1bK6CKBrqx9tMA4GA1UdDwEB/wQEAwIBBjCBxgYDVR0fBIG+MIG7MD6g +PKA6hjhodHRwOi8vY3JsLmQtdHJ1c3QubmV0L2NybC9kLXRydXN0X2JyX3Jvb3Rf +Y2FfMV8yMDIwLmNybDB5oHegdYZzbGRhcDovL2RpcmVjdG9yeS5kLXRydXN0Lm5l +dC9DTj1ELVRSVVNUJTIwQlIlMjBSb290JTIwQ0ElMjAxJTIwMjAyMCxPPUQtVHJ1 +c3QlMjBHbWJILEM9REU/Y2VydGlmaWNhdGVyZXZvY2F0aW9ubGlzdDAKBggqhkjO +PQQDAwNpADBmAjEAlJAtE/rhY/hhY+ithXhUkZy4kzg+GkHaQBZTQgjKL47xPoFW +wKrY7RjEsK70PvomAjEA8yjixtsrmfu3Ubgko6SUeho/5jbiA1czijDLgsfWFBHV +dWNbFJWcHwHP2NVypw87 +-----END CERTIFICATE----- + +# Issuer: CN=D-TRUST EV Root CA 1 2020 O=D-Trust GmbH +# Subject: CN=D-TRUST EV Root CA 1 2020 O=D-Trust GmbH +# Label: "D-TRUST EV Root CA 1 2020" +# Serial: 126288379621884218666039612629459926992 +# MD5 Fingerprint: 8c:2d:9d:70:9f:48:99:11:06:11:fb:e9:cb:30:c0:6e +# SHA1 Fingerprint: 61:db:8c:21:59:69:03:90:d8:7c:9c:12:86:54:cf:9d:3d:f4:dd:07 +# SHA256 Fingerprint: 08:17:0d:1a:a3:64:53:90:1a:2f:95:92:45:e3:47:db:0c:8d:37:ab:aa:bc:56:b8:1a:a1:00:dc:95:89:70:db +-----BEGIN CERTIFICATE----- +MIIC2zCCAmCgAwIBAgIQXwJB13qHfEwDo6yWjfv/0DAKBggqhkjOPQQDAzBIMQsw +CQYDVQQGEwJERTEVMBMGA1UEChMMRC1UcnVzdCBHbWJIMSIwIAYDVQQDExlELVRS +VVNUIEVWIFJvb3QgQ0EgMSAyMDIwMB4XDTIwMDIxMTEwMDAwMFoXDTM1MDIxMTA5 +NTk1OVowSDELMAkGA1UEBhMCREUxFTATBgNVBAoTDEQtVHJ1c3QgR21iSDEiMCAG +A1UEAxMZRC1UUlVTVCBFViBSb290IENBIDEgMjAyMDB2MBAGByqGSM49AgEGBSuB +BAAiA2IABPEL3YZDIBnfl4XoIkqbz52Yv7QFJsnL46bSj8WeeHsxiamJrSc8ZRCC +/N/DnU7wMyPE0jL1HLDfMxddxfCxivnvubcUyilKwg+pf3VlSSowZ/Rk99Yad9rD +wpdhQntJraOCAQ0wggEJMA8GA1UdEwEB/wQFMAMBAf8wHQYDVR0OBBYEFH8QARY3 +OqQo5FD4pPfsazK2/umLMA4GA1UdDwEB/wQEAwIBBjCBxgYDVR0fBIG+MIG7MD6g +PKA6hjhodHRwOi8vY3JsLmQtdHJ1c3QubmV0L2NybC9kLXRydXN0X2V2X3Jvb3Rf +Y2FfMV8yMDIwLmNybDB5oHegdYZzbGRhcDovL2RpcmVjdG9yeS5kLXRydXN0Lm5l +dC9DTj1ELVRSVVNUJTIwRVYlMjBSb290JTIwQ0ElMjAxJTIwMjAyMCxPPUQtVHJ1 +c3QlMjBHbWJILEM9REU/Y2VydGlmaWNhdGVyZXZvY2F0aW9ubGlzdDAKBggqhkjO +PQQDAwNpADBmAjEAyjzGKnXCXnViOTYAYFqLwZOZzNnbQTs7h5kXO9XMT8oi96CA +y/m0sRtW9XLS/BnRAjEAkfcwkz8QRitxpNA7RJvAKQIFskF3UfN5Wp6OFKBOQtJb +gfM0agPnIjhQW+0ZT0MW +-----END CERTIFICATE----- + +# Issuer: CN=DigiCert TLS ECC P384 Root G5 O=DigiCert, Inc. +# Subject: CN=DigiCert TLS ECC P384 Root G5 O=DigiCert, Inc. +# Label: "DigiCert TLS ECC P384 Root G5" +# Serial: 13129116028163249804115411775095713523 +# MD5 Fingerprint: d3:71:04:6a:43:1c:db:a6:59:e1:a8:a3:aa:c5:71:ed +# SHA1 Fingerprint: 17:f3:de:5e:9f:0f:19:e9:8e:f6:1f:32:26:6e:20:c4:07:ae:30:ee +# SHA256 Fingerprint: 01:8e:13:f0:77:25:32:cf:80:9b:d1:b1:72:81:86:72:83:fc:48:c6:e1:3b:e9:c6:98:12:85:4a:49:0c:1b:05 +-----BEGIN CERTIFICATE----- +MIICGTCCAZ+gAwIBAgIQCeCTZaz32ci5PhwLBCou8zAKBggqhkjOPQQDAzBOMQsw +CQYDVQQGEwJVUzEXMBUGA1UEChMORGlnaUNlcnQsIEluYy4xJjAkBgNVBAMTHURp +Z2lDZXJ0IFRMUyBFQ0MgUDM4NCBSb290IEc1MB4XDTIxMDExNTAwMDAwMFoXDTQ2 +MDExNDIzNTk1OVowTjELMAkGA1UEBhMCVVMxFzAVBgNVBAoTDkRpZ2lDZXJ0LCBJ +bmMuMSYwJAYDVQQDEx1EaWdpQ2VydCBUTFMgRUNDIFAzODQgUm9vdCBHNTB2MBAG +ByqGSM49AgEGBSuBBAAiA2IABMFEoc8Rl1Ca3iOCNQfN0MsYndLxf3c1TzvdlHJS +7cI7+Oz6e2tYIOyZrsn8aLN1udsJ7MgT9U7GCh1mMEy7H0cKPGEQQil8pQgO4CLp +0zVozptjn4S1mU1YoI71VOeVyaNCMEAwHQYDVR0OBBYEFMFRRVBZqz7nLFr6ICIS +B4CIfBFqMA4GA1UdDwEB/wQEAwIBhjAPBgNVHRMBAf8EBTADAQH/MAoGCCqGSM49 +BAMDA2gAMGUCMQCJao1H5+z8blUD2WdsJk6Dxv3J+ysTvLd6jLRl0mlpYxNjOyZQ +LgGheQaRnUi/wr4CMEfDFXuxoJGZSZOoPHzoRgaLLPIxAJSdYsiJvRmEFOml+wG4 +DXZDjC5Ty3zfDBeWUA== +-----END CERTIFICATE----- + +# Issuer: CN=DigiCert TLS RSA4096 Root G5 O=DigiCert, Inc. +# Subject: CN=DigiCert TLS RSA4096 Root G5 O=DigiCert, Inc. +# Label: "DigiCert TLS RSA4096 Root G5" +# Serial: 11930366277458970227240571539258396554 +# MD5 Fingerprint: ac:fe:f7:34:96:a9:f2:b3:b4:12:4b:e4:27:41:6f:e1 +# SHA1 Fingerprint: a7:88:49:dc:5d:7c:75:8c:8c:de:39:98:56:b3:aa:d0:b2:a5:71:35 +# SHA256 Fingerprint: 37:1a:00:dc:05:33:b3:72:1a:7e:eb:40:e8:41:9e:70:79:9d:2b:0a:0f:2c:1d:80:69:31:65:f7:ce:c4:ad:75 +-----BEGIN CERTIFICATE----- +MIIFZjCCA06gAwIBAgIQCPm0eKj6ftpqMzeJ3nzPijANBgkqhkiG9w0BAQwFADBN +MQswCQYDVQQGEwJVUzEXMBUGA1UEChMORGlnaUNlcnQsIEluYy4xJTAjBgNVBAMT +HERpZ2lDZXJ0IFRMUyBSU0E0MDk2IFJvb3QgRzUwHhcNMjEwMTE1MDAwMDAwWhcN +NDYwMTE0MjM1OTU5WjBNMQswCQYDVQQGEwJVUzEXMBUGA1UEChMORGlnaUNlcnQs +IEluYy4xJTAjBgNVBAMTHERpZ2lDZXJ0IFRMUyBSU0E0MDk2IFJvb3QgRzUwggIi +MA0GCSqGSIb3DQEBAQUAA4ICDwAwggIKAoICAQCz0PTJeRGd/fxmgefM1eS87IE+ +ajWOLrfn3q/5B03PMJ3qCQuZvWxX2hhKuHisOjmopkisLnLlvevxGs3npAOpPxG0 +2C+JFvuUAT27L/gTBaF4HI4o4EXgg/RZG5Wzrn4DReW+wkL+7vI8toUTmDKdFqgp +wgscONyfMXdcvyej/Cestyu9dJsXLfKB2l2w4SMXPohKEiPQ6s+d3gMXsUJKoBZM +pG2T6T867jp8nVid9E6P/DsjyG244gXazOvswzH016cpVIDPRFtMbzCe88zdH5RD +nU1/cHAN1DrRN/BsnZvAFJNY781BOHW8EwOVfH/jXOnVDdXifBBiqmvwPXbzP6Po +sMH976pXTayGpxi0KcEsDr9kvimM2AItzVwv8n/vFfQMFawKsPHTDU9qTXeXAaDx +Zre3zu/O7Oyldcqs4+Fj97ihBMi8ez9dLRYiVu1ISf6nL3kwJZu6ay0/nTvEF+cd +Lvvyz6b84xQslpghjLSR6Rlgg/IwKwZzUNWYOwbpx4oMYIwo+FKbbuH2TbsGJJvX +KyY//SovcfXWJL5/MZ4PbeiPT02jP/816t9JXkGPhvnxd3lLG7SjXi/7RgLQZhNe +XoVPzthwiHvOAbWWl9fNff2C+MIkwcoBOU+NosEUQB+cZtUMCUbW8tDRSHZWOkPL +tgoRObqME2wGtZ7P6wIDAQABo0IwQDAdBgNVHQ4EFgQUUTMc7TZArxfTJc1paPKv +TiM+s0EwDgYDVR0PAQH/BAQDAgGGMA8GA1UdEwEB/wQFMAMBAf8wDQYJKoZIhvcN +AQEMBQADggIBAGCmr1tfV9qJ20tQqcQjNSH/0GEwhJG3PxDPJY7Jv0Y02cEhJhxw +GXIeo8mH/qlDZJY6yFMECrZBu8RHANmfGBg7sg7zNOok992vIGCukihfNudd5N7H +PNtQOa27PShNlnx2xlv0wdsUpasZYgcYQF+Xkdycx6u1UQ3maVNVzDl92sURVXLF +O4uJ+DQtpBflF+aZfTCIITfNMBc9uPK8qHWgQ9w+iUuQrm0D4ByjoJYJu32jtyoQ +REtGBzRj7TG5BO6jm5qu5jF49OokYTurWGT/u4cnYiWB39yhL/btp/96j1EuMPik +AdKFOV8BmZZvWltwGUb+hmA+rYAQCd05JS9Yf7vSdPD3Rh9GOUrYU9DzLjtxpdRv +/PNn5AeP3SYZ4Y1b+qOTEZvpyDrDVWiakuFSdjjo4bq9+0/V77PnSIMx8IIh47a+ +p6tv75/fTM8BuGJqIz3nCU2AG3swpMPdB380vqQmsvZB6Akd4yCYqjdP//fx4ilw +MUc/dNAUFvohigLVigmUdy7yWSiLfFCSCmZ4OIN1xLVaqBHG5cGdZlXPU8Sv13WF +qUITVuwhd4GTWgzqltlJyqEI8pc7bZsEGCREjnwB8twl2F6GmrE52/WRMmrRpnCK +ovfepEWFJqgejF0pW8hL2JpqA15w8oVPbEtoL8pU9ozaMv7Da4M/OMZ+ +-----END CERTIFICATE----- + +# Issuer: CN=Certainly Root R1 O=Certainly +# Subject: CN=Certainly Root R1 O=Certainly +# Label: "Certainly Root R1" +# Serial: 188833316161142517227353805653483829216 +# MD5 Fingerprint: 07:70:d4:3e:82:87:a0:fa:33:36:13:f4:fa:33:e7:12 +# SHA1 Fingerprint: a0:50:ee:0f:28:71:f4:27:b2:12:6d:6f:50:96:25:ba:cc:86:42:af +# SHA256 Fingerprint: 77:b8:2c:d8:64:4c:43:05:f7:ac:c5:cb:15:6b:45:67:50:04:03:3d:51:c6:0c:62:02:a8:e0:c3:34:67:d3:a0 +-----BEGIN CERTIFICATE----- +MIIFRzCCAy+gAwIBAgIRAI4P+UuQcWhlM1T01EQ5t+AwDQYJKoZIhvcNAQELBQAw +PTELMAkGA1UEBhMCVVMxEjAQBgNVBAoTCUNlcnRhaW5seTEaMBgGA1UEAxMRQ2Vy +dGFpbmx5IFJvb3QgUjEwHhcNMjEwNDAxMDAwMDAwWhcNNDYwNDAxMDAwMDAwWjA9 +MQswCQYDVQQGEwJVUzESMBAGA1UEChMJQ2VydGFpbmx5MRowGAYDVQQDExFDZXJ0 +YWlubHkgUm9vdCBSMTCCAiIwDQYJKoZIhvcNAQEBBQADggIPADCCAgoCggIBANA2 +1B/q3avk0bbm+yLA3RMNansiExyXPGhjZjKcA7WNpIGD2ngwEc/csiu+kr+O5MQT +vqRoTNoCaBZ0vrLdBORrKt03H2As2/X3oXyVtwxwhi7xOu9S98zTm/mLvg7fMbed +aFySpvXl8wo0tf97ouSHocavFwDvA5HtqRxOcT3Si2yJ9HiG5mpJoM610rCrm/b0 +1C7jcvk2xusVtyWMOvwlDbMicyF0yEqWYZL1LwsYpfSt4u5BvQF5+paMjRcCMLT5 +r3gajLQ2EBAHBXDQ9DGQilHFhiZ5shGIXsXwClTNSaa/ApzSRKft43jvRl5tcdF5 +cBxGX1HpyTfcX35pe0HfNEXgO4T0oYoKNp43zGJS4YkNKPl6I7ENPT2a/Z2B7yyQ +wHtETrtJ4A5KVpK8y7XdeReJkd5hiXSSqOMyhb5OhaRLWcsrxXiOcVTQAjeZjOVJ +6uBUcqQRBi8LjMFbvrWhsFNunLhgkR9Za/kt9JQKl7XsxXYDVBtlUrpMklZRNaBA +2CnbrlJ2Oy0wQJuK0EJWtLeIAaSHO1OWzaMWj/Nmqhexx2DgwUMFDO6bW2BvBlyH +Wyf5QBGenDPBt+U1VwV/J84XIIwc/PH72jEpSe31C4SnT8H2TsIonPru4K8H+zMR +eiFPCyEQtkA6qyI6BJyLm4SGcprSp6XEtHWRqSsjAgMBAAGjQjBAMA4GA1UdDwEB +/wQEAwIBBjAPBgNVHRMBAf8EBTADAQH/MB0GA1UdDgQWBBTgqj8ljZ9EXME66C6u +d0yEPmcM9DANBgkqhkiG9w0BAQsFAAOCAgEAuVevuBLaV4OPaAszHQNTVfSVcOQr +PbA56/qJYv331hgELyE03fFo8NWWWt7CgKPBjcZq91l3rhVkz1t5BXdm6ozTaw3d +8VkswTOlMIAVRQdFGjEitpIAq5lNOo93r6kiyi9jyhXWx8bwPWz8HA2YEGGeEaIi +1wrykXprOQ4vMMM2SZ/g6Q8CRFA3lFV96p/2O7qUpUzpvD5RtOjKkjZUbVwlKNrd +rRT90+7iIgXr0PK3aBLXWopBGsaSpVo7Y0VPv+E6dyIvXL9G+VoDhRNCX8reU9di +taY1BMJH/5n9hN9czulegChB8n3nHpDYT3Y+gjwN/KUD+nsa2UUeYNrEjvn8K8l7 +lcUq/6qJ34IxD3L/DCfXCh5WAFAeDJDBlrXYFIW7pw0WwfgHJBu6haEaBQmAupVj +yTrsJZ9/nbqkRxWbRHDxakvWOF5D8xh+UG7pWijmZeZ3Gzr9Hb4DJqPb1OG7fpYn +Kx3upPvaJVQTA945xsMfTZDsjxtK0hzthZU4UHlG1sGQUDGpXJpuHfUzVounmdLy +yCwzk5Iwx06MZTMQZBf9JBeW0Y3COmor6xOLRPIh80oat3df1+2IpHLlOR+Vnb5n +wXARPbv0+Em34yaXOp/SX3z7wJl8OSngex2/DaeP0ik0biQVy96QXr8axGbqwua6 +OV+KmalBWQewLK8= +-----END CERTIFICATE----- + +# Issuer: CN=Certainly Root E1 O=Certainly +# Subject: CN=Certainly Root E1 O=Certainly +# Label: "Certainly Root E1" +# Serial: 8168531406727139161245376702891150584 +# MD5 Fingerprint: 0a:9e:ca:cd:3e:52:50:c6:36:f3:4b:a3:ed:a7:53:e9 +# SHA1 Fingerprint: f9:e1:6d:dc:01:89:cf:d5:82:45:63:3e:c5:37:7d:c2:eb:93:6f:2b +# SHA256 Fingerprint: b4:58:5f:22:e4:ac:75:6a:4e:86:12:a1:36:1c:5d:9d:03:1a:93:fd:84:fe:bb:77:8f:a3:06:8b:0f:c4:2d:c2 +-----BEGIN CERTIFICATE----- +MIIB9zCCAX2gAwIBAgIQBiUzsUcDMydc+Y2aub/M+DAKBggqhkjOPQQDAzA9MQsw +CQYDVQQGEwJVUzESMBAGA1UEChMJQ2VydGFpbmx5MRowGAYDVQQDExFDZXJ0YWlu +bHkgUm9vdCBFMTAeFw0yMTA0MDEwMDAwMDBaFw00NjA0MDEwMDAwMDBaMD0xCzAJ +BgNVBAYTAlVTMRIwEAYDVQQKEwlDZXJ0YWlubHkxGjAYBgNVBAMTEUNlcnRhaW5s +eSBSb290IEUxMHYwEAYHKoZIzj0CAQYFK4EEACIDYgAE3m/4fxzf7flHh4axpMCK ++IKXgOqPyEpeKn2IaKcBYhSRJHpcnqMXfYqGITQYUBsQ3tA3SybHGWCA6TS9YBk2 +QNYphwk8kXr2vBMj3VlOBF7PyAIcGFPBMdjaIOlEjeR2o0IwQDAOBgNVHQ8BAf8E +BAMCAQYwDwYDVR0TAQH/BAUwAwEB/zAdBgNVHQ4EFgQU8ygYy2R17ikq6+2uI1g4 +hevIIgcwCgYIKoZIzj0EAwMDaAAwZQIxALGOWiDDshliTd6wT99u0nCK8Z9+aozm +ut6Dacpps6kFtZaSF4fC0urQe87YQVt8rgIwRt7qy12a7DLCZRawTDBcMPPaTnOG +BtjOiQRINzf43TNRnXCve1XYAS59BWQOhriR +-----END CERTIFICATE----- + +# Issuer: CN=E-Tugra Global Root CA RSA v3 O=E-Tugra EBG A.S. OU=E-Tugra Trust Center +# Subject: CN=E-Tugra Global Root CA RSA v3 O=E-Tugra EBG A.S. OU=E-Tugra Trust Center +# Label: "E-Tugra Global Root CA RSA v3" +# Serial: 75951268308633135324246244059508261641472512052 +# MD5 Fingerprint: 22:be:10:f6:c2:f8:03:88:73:5f:33:29:47:28:47:a4 +# SHA1 Fingerprint: e9:a8:5d:22:14:52:1c:5b:aa:0a:b4:be:24:6a:23:8a:c9:ba:e2:a9 +# SHA256 Fingerprint: ef:66:b0:b1:0a:3c:db:9f:2e:36:48:c7:6b:d2:af:18:ea:d2:bf:e6:f1:17:65:5e:28:c4:06:0d:a1:a3:f4:c2 +-----BEGIN CERTIFICATE----- +MIIF8zCCA9ugAwIBAgIUDU3FzRYilZYIfrgLfxUGNPt5EDQwDQYJKoZIhvcNAQEL +BQAwgYAxCzAJBgNVBAYTAlRSMQ8wDQYDVQQHEwZBbmthcmExGTAXBgNVBAoTEEUt +VHVncmEgRUJHIEEuUy4xHTAbBgNVBAsTFEUtVHVncmEgVHJ1c3QgQ2VudGVyMSYw +JAYDVQQDEx1FLVR1Z3JhIEdsb2JhbCBSb290IENBIFJTQSB2MzAeFw0yMDAzMTgw +OTA3MTdaFw00NTAzMTIwOTA3MTdaMIGAMQswCQYDVQQGEwJUUjEPMA0GA1UEBxMG +QW5rYXJhMRkwFwYDVQQKExBFLVR1Z3JhIEVCRyBBLlMuMR0wGwYDVQQLExRFLVR1 +Z3JhIFRydXN0IENlbnRlcjEmMCQGA1UEAxMdRS1UdWdyYSBHbG9iYWwgUm9vdCBD +QSBSU0EgdjMwggIiMA0GCSqGSIb3DQEBAQUAA4ICDwAwggIKAoICAQCiZvCJt3J7 +7gnJY9LTQ91ew6aEOErxjYG7FL1H6EAX8z3DeEVypi6Q3po61CBxyryfHUuXCscx +uj7X/iWpKo429NEvx7epXTPcMHD4QGxLsqYxYdE0PD0xesevxKenhOGXpOhL9hd8 +7jwH7eKKV9y2+/hDJVDqJ4GohryPUkqWOmAalrv9c/SF/YP9f4RtNGx/ardLAQO/ +rWm31zLZ9Vdq6YaCPqVmMbMWPcLzJmAy01IesGykNz709a/r4d+ABs8qQedmCeFL +l+d3vSFtKbZnwy1+7dZ5ZdHPOrbRsV5WYVB6Ws5OUDGAA5hH5+QYfERaxqSzO8bG +wzrwbMOLyKSRBfP12baqBqG3q+Sx6iEUXIOk/P+2UNOMEiaZdnDpwA+mdPy70Bt4 +znKS4iicvObpCdg604nmvi533wEKb5b25Y08TVJ2Glbhc34XrD2tbKNSEhhw5oBO +M/J+JjKsBY04pOZ2PJ8QaQ5tndLBeSBrW88zjdGUdjXnXVXHt6woq0bM5zshtQoK +5EpZ3IE1S0SVEgpnpaH/WwAH0sDM+T/8nzPyAPiMbIedBi3x7+PmBvrFZhNb/FAH +nnGGstpvdDDPk1Po3CLW3iAfYY2jLqN4MpBs3KwytQXk9TwzDdbgh3cXTJ2w2Amo +DVf3RIXwyAS+XF1a4xeOVGNpf0l0ZAWMowIDAQABo2MwYTAPBgNVHRMBAf8EBTAD +AQH/MB8GA1UdIwQYMBaAFLK0ruYt9ybVqnUtdkvAG1Mh0EjvMB0GA1UdDgQWBBSy +tK7mLfcm1ap1LXZLwBtTIdBI7zAOBgNVHQ8BAf8EBAMCAQYwDQYJKoZIhvcNAQEL +BQADggIBAImocn+M684uGMQQgC0QDP/7FM0E4BQ8Tpr7nym/Ip5XuYJzEmMmtcyQ +6dIqKe6cLcwsmb5FJ+Sxce3kOJUxQfJ9emN438o2Fi+CiJ+8EUdPdk3ILY7r3y18 +Tjvarvbj2l0Upq7ohUSdBm6O++96SmotKygY/r+QLHUWnw/qln0F7psTpURs+APQ +3SPh/QMSEgj0GDSz4DcLdxEBSL9htLX4GdnLTeqjjO/98Aa1bZL0SmFQhO3sSdPk +vmjmLuMxC1QLGpLWgti2omU8ZgT5Vdps+9u1FGZNlIM7zR6mK7L+d0CGq+ffCsn9 +9t2HVhjYsCxVYJb6CH5SkPVLpi6HfMsg2wY+oF0Dd32iPBMbKaITVaA9FCKvb7jQ +mhty3QUBjYZgv6Rn7rWlDdF/5horYmbDB7rnoEgcOMPpRfunf/ztAmgayncSd6YA +VSgU7NbHEqIbZULpkejLPoeJVF3Zr52XnGnnCv8PWniLYypMfUeUP95L6VPQMPHF +9p5J3zugkaOj/s1YzOrfr28oO6Bpm4/srK4rVJ2bBLFHIK+WEj5jlB0E5y67hscM +moi/dkfv97ALl2bSRM9gUgfh1SxKOidhd8rXj+eHDjD/DLsE4mHDosiXYY60MGo8 +bcIHX0pzLz/5FooBZu+6kcpSV3uu1OYP3Qt6f4ueJiDPO++BcYNZ +-----END CERTIFICATE----- + +# Issuer: CN=E-Tugra Global Root CA ECC v3 O=E-Tugra EBG A.S. OU=E-Tugra Trust Center +# Subject: CN=E-Tugra Global Root CA ECC v3 O=E-Tugra EBG A.S. OU=E-Tugra Trust Center +# Label: "E-Tugra Global Root CA ECC v3" +# Serial: 218504919822255052842371958738296604628416471745 +# MD5 Fingerprint: 46:bc:81:bb:f1:b5:1e:f7:4b:96:bc:14:e2:e7:27:64 +# SHA1 Fingerprint: 8a:2f:af:57:53:b1:b0:e6:a1:04:ec:5b:6a:69:71:6d:f6:1c:e2:84 +# SHA256 Fingerprint: 87:3f:46:85:fa:7f:56:36:25:25:2e:6d:36:bc:d7:f1:6f:c2:49:51:f2:64:e4:7e:1b:95:4f:49:08:cd:ca:13 +-----BEGIN CERTIFICATE----- +MIICpTCCAiqgAwIBAgIUJkYZdzHhT28oNt45UYbm1JeIIsEwCgYIKoZIzj0EAwMw +gYAxCzAJBgNVBAYTAlRSMQ8wDQYDVQQHEwZBbmthcmExGTAXBgNVBAoTEEUtVHVn +cmEgRUJHIEEuUy4xHTAbBgNVBAsTFEUtVHVncmEgVHJ1c3QgQ2VudGVyMSYwJAYD +VQQDEx1FLVR1Z3JhIEdsb2JhbCBSb290IENBIEVDQyB2MzAeFw0yMDAzMTgwOTQ2 +NThaFw00NTAzMTIwOTQ2NThaMIGAMQswCQYDVQQGEwJUUjEPMA0GA1UEBxMGQW5r +YXJhMRkwFwYDVQQKExBFLVR1Z3JhIEVCRyBBLlMuMR0wGwYDVQQLExRFLVR1Z3Jh +IFRydXN0IENlbnRlcjEmMCQGA1UEAxMdRS1UdWdyYSBHbG9iYWwgUm9vdCBDQSBF +Q0MgdjMwdjAQBgcqhkjOPQIBBgUrgQQAIgNiAASOmCm/xxAeJ9urA8woLNheSBkQ +KczLWYHMjLiSF4mDKpL2w6QdTGLVn9agRtwcvHbB40fQWxPa56WzZkjnIZpKT4YK +fWzqTTKACrJ6CZtpS5iB4i7sAnCWH/31Rs7K3IKjYzBhMA8GA1UdEwEB/wQFMAMB +Af8wHwYDVR0jBBgwFoAU/4Ixcj75xGZsrTie0bBRiKWQzPUwHQYDVR0OBBYEFP+C +MXI++cRmbK04ntGwUYilkMz1MA4GA1UdDwEB/wQEAwIBBjAKBggqhkjOPQQDAwNp +ADBmAjEA5gVYaWHlLcoNy/EZCL3W/VGSGn5jVASQkZo1kTmZ+gepZpO6yGjUij/6 +7W4WAie3AjEA3VoXK3YdZUKWpqxdinlW2Iob35reX8dQj7FbcQwm32pAAOwzkSFx +vmjkI6TZraE3 +-----END CERTIFICATE----- + +# Issuer: CN=Security Communication RootCA3 O=SECOM Trust Systems CO.,LTD. +# Subject: CN=Security Communication RootCA3 O=SECOM Trust Systems CO.,LTD. +# Label: "Security Communication RootCA3" +# Serial: 16247922307909811815 +# MD5 Fingerprint: 1c:9a:16:ff:9e:5c:e0:4d:8a:14:01:f4:35:5d:29:26 +# SHA1 Fingerprint: c3:03:c8:22:74:92:e5:61:a2:9c:5f:79:91:2b:1e:44:13:91:30:3a +# SHA256 Fingerprint: 24:a5:5c:2a:b0:51:44:2d:06:17:76:65:41:23:9a:4a:d0:32:d7:c5:51:75:aa:34:ff:de:2f:bc:4f:5c:52:94 +-----BEGIN CERTIFICATE----- +MIIFfzCCA2egAwIBAgIJAOF8N0D9G/5nMA0GCSqGSIb3DQEBDAUAMF0xCzAJBgNV +BAYTAkpQMSUwIwYDVQQKExxTRUNPTSBUcnVzdCBTeXN0ZW1zIENPLixMVEQuMScw +JQYDVQQDEx5TZWN1cml0eSBDb21tdW5pY2F0aW9uIFJvb3RDQTMwHhcNMTYwNjE2 +MDYxNzE2WhcNMzgwMTE4MDYxNzE2WjBdMQswCQYDVQQGEwJKUDElMCMGA1UEChMc +U0VDT00gVHJ1c3QgU3lzdGVtcyBDTy4sTFRELjEnMCUGA1UEAxMeU2VjdXJpdHkg +Q29tbXVuaWNhdGlvbiBSb290Q0EzMIICIjANBgkqhkiG9w0BAQEFAAOCAg8AMIIC +CgKCAgEA48lySfcw3gl8qUCBWNO0Ot26YQ+TUG5pPDXC7ltzkBtnTCHsXzW7OT4r +CmDvu20rhvtxosis5FaU+cmvsXLUIKx00rgVrVH+hXShuRD+BYD5UpOzQD11EKzA +lrenfna84xtSGc4RHwsENPXY9Wk8d/Nk9A2qhd7gCVAEF5aEt8iKvE1y/By7z/MG +TfmfZPd+pmaGNXHIEYBMwXFAWB6+oHP2/D5Q4eAvJj1+XCO1eXDe+uDRpdYMQXF7 +9+qMHIjH7Iv10S9VlkZ8WjtYO/u62C21Jdp6Ts9EriGmnpjKIG58u4iFW/vAEGK7 +8vknR+/RiTlDxN/e4UG/VHMgly1s2vPUB6PmudhvrvyMGS7TZ2crldtYXLVqAvO4 +g160a75BflcJdURQVc1aEWEhCmHCqYj9E7wtiS/NYeCVvsq1e+F7NGcLH7YMx3we +GVPKp7FKFSBWFHA9K4IsD50VHUeAR/94mQ4xr28+j+2GaR57GIgUssL8gjMunEst ++3A7caoreyYn8xrC3PsXuKHqy6C0rtOUfnrQq8PsOC0RLoi/1D+tEjtCrI8Cbn3M +0V9hvqG8OmpI6iZVIhZdXw3/JzOfGAN0iltSIEdrRU0id4xVJ/CvHozJgyJUt5rQ +T9nO/NkuHJYosQLTA70lUhw0Zk8jq/R3gpYd0VcwCBEF/VfR2ccCAwEAAaNCMEAw +HQYDVR0OBBYEFGQUfPxYchamCik0FW8qy7z8r6irMA4GA1UdDwEB/wQEAwIBBjAP +BgNVHRMBAf8EBTADAQH/MA0GCSqGSIb3DQEBDAUAA4ICAQDcAiMI4u8hOscNtybS +YpOnpSNyByCCYN8Y11StaSWSntkUz5m5UoHPrmyKO1o5yGwBQ8IibQLwYs1OY0PA +FNr0Y/Dq9HHuTofjcan0yVflLl8cebsjqodEV+m9NU1Bu0soo5iyG9kLFwfl9+qd +9XbXv8S2gVj/yP9kaWJ5rW4OH3/uHWnlt3Jxs/6lATWUVCvAUm2PVcTJ0rjLyjQI +UYWg9by0F1jqClx6vWPGOi//lkkZhOpn2ASxYfQAW0q3nHE3GYV5v4GwxxMOdnE+ +OoAGrgYWp421wsTL/0ClXI2lyTrtcoHKXJg80jQDdwj98ClZXSEIx2C/pHF7uNke +gr4Jr2VvKKu/S7XuPghHJ6APbw+LP6yVGPO5DtxnVW5inkYO0QR4ynKudtml+LLf +iAlhi+8kTtFZP1rUPcmTPCtk9YENFpb3ksP+MW/oKjJ0DvRMmEoYDjBU1cXrvMUV +nuiZIesnKwkK2/HmcBhWuwzkvvnoEKQTkrgc4NtnHVMDpCKn3F2SEDzq//wbEBrD +2NCcnWXL0CsnMQMeNuE9dnUM/0Umud1RvCPHX9jYhxBAEg09ODfnRDwYwFMJZI// +1ZqmfHAuc1Uh6N//g7kdPjIe1qZ9LPFm6Vwdp6POXiUyK+OVrCoHzrQoeIY8Laad +TdJ0MN1kURXbg4NR16/9M51NZg== +-----END CERTIFICATE----- + +# Issuer: CN=Security Communication ECC RootCA1 O=SECOM Trust Systems CO.,LTD. +# Subject: CN=Security Communication ECC RootCA1 O=SECOM Trust Systems CO.,LTD. +# Label: "Security Communication ECC RootCA1" +# Serial: 15446673492073852651 +# MD5 Fingerprint: 7e:43:b0:92:68:ec:05:43:4c:98:ab:5d:35:2e:7e:86 +# SHA1 Fingerprint: b8:0e:26:a9:bf:d2:b2:3b:c0:ef:46:c9:ba:c7:bb:f6:1d:0d:41:41 +# SHA256 Fingerprint: e7:4f:bd:a5:5b:d5:64:c4:73:a3:6b:44:1a:a7:99:c8:a6:8e:07:74:40:e8:28:8b:9f:a1:e5:0e:4b:ba:ca:11 +-----BEGIN CERTIFICATE----- +MIICODCCAb6gAwIBAgIJANZdm7N4gS7rMAoGCCqGSM49BAMDMGExCzAJBgNVBAYT +AkpQMSUwIwYDVQQKExxTRUNPTSBUcnVzdCBTeXN0ZW1zIENPLixMVEQuMSswKQYD +VQQDEyJTZWN1cml0eSBDb21tdW5pY2F0aW9uIEVDQyBSb290Q0ExMB4XDTE2MDYx +NjA1MTUyOFoXDTM4MDExODA1MTUyOFowYTELMAkGA1UEBhMCSlAxJTAjBgNVBAoT +HFNFQ09NIFRydXN0IFN5c3RlbXMgQ08uLExURC4xKzApBgNVBAMTIlNlY3VyaXR5 +IENvbW11bmljYXRpb24gRUNDIFJvb3RDQTEwdjAQBgcqhkjOPQIBBgUrgQQAIgNi +AASkpW9gAwPDvTH00xecK4R1rOX9PVdu12O/5gSJko6BnOPpR27KkBLIE+Cnnfdl +dB9sELLo5OnvbYUymUSxXv3MdhDYW72ixvnWQuRXdtyQwjWpS4g8EkdtXP9JTxpK +ULGjQjBAMB0GA1UdDgQWBBSGHOf+LaVKiwj+KBH6vqNm+GBZLzAOBgNVHQ8BAf8E +BAMCAQYwDwYDVR0TAQH/BAUwAwEB/zAKBggqhkjOPQQDAwNoADBlAjAVXUI9/Lbu +9zuxNuie9sRGKEkz0FhDKmMpzE2xtHqiuQ04pV1IKv3LsnNdo4gIxwwCMQDAqy0O +be0YottT6SXbVQjgUMzfRGEWgqtJsLKB7HOHeLRMsmIbEvoWTSVLY70eN9k= +-----END CERTIFICATE----- diff --git a/Splunk_TA_paloalto/bin/splunk_ta_paloalto/aob_py3/certifi/core.py b/Splunk_TA_paloalto/bin/splunk_ta_paloalto/aob_py3/certifi/core.py old mode 100755 new mode 100644 index 7271acf4..de028981 --- a/Splunk_TA_paloalto/bin/splunk_ta_paloalto/aob_py3/certifi/core.py +++ b/Splunk_TA_paloalto/bin/splunk_ta_paloalto/aob_py3/certifi/core.py @@ -1,15 +1,108 @@ -# -*- coding: utf-8 -*- - """ certifi.py ~~~~~~~~~~ -This module returns the installation location of cacert.pem. +This module returns the installation location of cacert.pem or its contents. """ -import os +import sys + + +if sys.version_info >= (3, 11): + + from importlib.resources import as_file, files + + _CACERT_CTX = None + _CACERT_PATH = None + + def where() -> str: + # This is slightly terrible, but we want to delay extracting the file + # in cases where we're inside of a zipimport situation until someone + # actually calls where(), but we don't want to re-extract the file + # on every call of where(), so we'll do it once then store it in a + # global variable. + global _CACERT_CTX + global _CACERT_PATH + if _CACERT_PATH is None: + # This is slightly janky, the importlib.resources API wants you to + # manage the cleanup of this file, so it doesn't actually return a + # path, it returns a context manager that will give you the path + # when you enter it and will do any cleanup when you leave it. In + # the common case of not needing a temporary file, it will just + # return the file system location and the __exit__() is a no-op. + # + # We also have to hold onto the actual context manager, because + # it will do the cleanup whenever it gets garbage collected, so + # we will also store that at the global level as well. + _CACERT_CTX = as_file(files("certifi").joinpath("cacert.pem")) + _CACERT_PATH = str(_CACERT_CTX.__enter__()) + + return _CACERT_PATH + + def contents() -> str: + return files("certifi").joinpath("cacert.pem").read_text(encoding="ascii") + +elif sys.version_info >= (3, 7): + + from importlib.resources import path as get_path, read_text + + _CACERT_CTX = None + _CACERT_PATH = None + + def where() -> str: + # This is slightly terrible, but we want to delay extracting the + # file in cases where we're inside of a zipimport situation until + # someone actually calls where(), but we don't want to re-extract + # the file on every call of where(), so we'll do it once then store + # it in a global variable. + global _CACERT_CTX + global _CACERT_PATH + if _CACERT_PATH is None: + # This is slightly janky, the importlib.resources API wants you + # to manage the cleanup of this file, so it doesn't actually + # return a path, it returns a context manager that will give + # you the path when you enter it and will do any cleanup when + # you leave it. In the common case of not needing a temporary + # file, it will just return the file system location and the + # __exit__() is a no-op. + # + # We also have to hold onto the actual context manager, because + # it will do the cleanup whenever it gets garbage collected, so + # we will also store that at the global level as well. + _CACERT_CTX = get_path("certifi", "cacert.pem") + _CACERT_PATH = str(_CACERT_CTX.__enter__()) + + return _CACERT_PATH + + def contents() -> str: + return read_text("certifi", "cacert.pem", encoding="ascii") + +else: + import os + import types + from typing import Union + + Package = Union[types.ModuleType, str] + Resource = Union[str, "os.PathLike"] + + # This fallback will work for Python versions prior to 3.7 that lack the + # importlib.resources module but relies on the existing `where` function + # so won't address issues with environments like PyOxidizer that don't set + # __file__ on modules. + def read_text( + package: Package, + resource: Resource, + encoding: str = 'utf-8', + errors: str = 'strict' + ) -> str: + with open(where(), encoding=encoding) as data: + return data.read() + # If we don't have importlib.resources, then we will just do the old logic + # of assuming we're on the filesystem and munge the path directly. + def where() -> str: + f = os.path.dirname(__file__) -def where(): - f = os.path.dirname(__file__) + return os.path.join(f, "cacert.pem") - return os.path.join(f, 'cacert.pem') + def contents() -> str: + return read_text("certifi", "cacert.pem", encoding="ascii") diff --git a/Splunk_TA_paloalto/bin/splunk_ta_paloalto/aob_py3/certifi/py.typed b/Splunk_TA_paloalto/bin/splunk_ta_paloalto/aob_py3/certifi/py.typed new file mode 100644 index 00000000..e69de29b diff --git a/Splunk_TA_paloalto/bin/splunk_ta_paloalto/aob_py3/charset_normalizer-2.0.12.dist-info/INSTALLER b/Splunk_TA_paloalto/bin/splunk_ta_paloalto/aob_py3/charset_normalizer-2.0.12.dist-info/INSTALLER new file mode 100644 index 00000000..a1b589e3 --- /dev/null +++ b/Splunk_TA_paloalto/bin/splunk_ta_paloalto/aob_py3/charset_normalizer-2.0.12.dist-info/INSTALLER @@ -0,0 +1 @@ +pip diff --git a/Splunk_TA_paloalto/bin/splunk_ta_paloalto/aob_py3/charset_normalizer-2.0.12.dist-info/LICENSE b/Splunk_TA_paloalto/bin/splunk_ta_paloalto/aob_py3/charset_normalizer-2.0.12.dist-info/LICENSE new file mode 100644 index 00000000..ad82355b --- /dev/null +++ b/Splunk_TA_paloalto/bin/splunk_ta_paloalto/aob_py3/charset_normalizer-2.0.12.dist-info/LICENSE @@ -0,0 +1,21 @@ +MIT License + +Copyright (c) 2019 TAHRI Ahmed R. + +Permission is hereby granted, free of charge, to any person obtaining a copy +of this software and associated documentation files (the "Software"), to deal +in the Software without restriction, including without limitation the rights +to use, copy, modify, merge, publish, distribute, sublicense, and/or sell +copies of the Software, and to permit persons to whom the Software is +furnished to do so, subject to the following conditions: + +The above copyright notice and this permission notice shall be included in all +copies or substantial portions of the Software. + +THE SOFTWARE IS PROVIDED "AS IS", WITHOUT WARRANTY OF ANY KIND, EXPRESS OR +IMPLIED, INCLUDING BUT NOT LIMITED TO THE WARRANTIES OF MERCHANTABILITY, +FITNESS FOR A PARTICULAR PURPOSE AND NONINFRINGEMENT. IN NO EVENT SHALL THE +AUTHORS OR COPYRIGHT HOLDERS BE LIABLE FOR ANY CLAIM, DAMAGES OR OTHER +LIABILITY, WHETHER IN AN ACTION OF CONTRACT, TORT OR OTHERWISE, ARISING FROM, +OUT OF OR IN CONNECTION WITH THE SOFTWARE OR THE USE OR OTHER DEALINGS IN THE +SOFTWARE. \ No newline at end of file diff --git a/Splunk_TA_paloalto/bin/splunk_ta_paloalto/aob_py3/charset_normalizer-2.0.12.dist-info/METADATA b/Splunk_TA_paloalto/bin/splunk_ta_paloalto/aob_py3/charset_normalizer-2.0.12.dist-info/METADATA new file mode 100644 index 00000000..1b04ed4c --- /dev/null +++ b/Splunk_TA_paloalto/bin/splunk_ta_paloalto/aob_py3/charset_normalizer-2.0.12.dist-info/METADATA @@ -0,0 +1,269 @@ +Metadata-Version: 2.1 +Name: charset-normalizer +Version: 2.0.12 +Summary: The Real First Universal Charset Detector. Open, modern and actively maintained alternative to Chardet. +Home-page: https://github.com/ousret/charset_normalizer +Author: Ahmed TAHRI @Ousret +Author-email: ahmed.tahri@cloudnursery.dev +License: MIT +Project-URL: Bug Reports, https://github.com/Ousret/charset_normalizer/issues +Project-URL: Documentation, https://charset-normalizer.readthedocs.io/en/latest +Keywords: encoding,i18n,txt,text,charset,charset-detector,normalization,unicode,chardet +Platform: UNKNOWN +Classifier: License :: OSI Approved :: MIT License +Classifier: Intended Audience :: Developers +Classifier: Topic :: Software Development :: Libraries :: Python Modules +Classifier: Operating System :: OS Independent +Classifier: Programming Language :: Python +Classifier: Programming Language :: Python :: 3 +Classifier: Programming Language :: Python :: 3.5 +Classifier: Programming Language :: Python :: 3.6 +Classifier: Programming Language :: Python :: 3.7 +Classifier: Programming Language :: Python :: 3.8 +Classifier: Programming Language :: Python :: 3.9 +Classifier: Programming Language :: Python :: 3.10 +Classifier: Programming Language :: Python :: 3.11 +Classifier: Topic :: Text Processing :: Linguistic +Classifier: Topic :: Utilities +Classifier: Programming Language :: Python :: Implementation :: PyPy +Classifier: Typing :: Typed +Requires-Python: >=3.5.0 +Description-Content-Type: text/markdown +License-File: LICENSE +Provides-Extra: unicode_backport +Requires-Dist: unicodedata2 ; extra == 'unicode_backport' + + +

Charset Detection, for Everyone 👋

+ +

+ The Real First Universal Charset Detector
+ + + + + + + + Download Count Total + +

+ +> A library that helps you read text from an unknown charset encoding.
Motivated by `chardet`, +> I'm trying to resolve the issue by taking a new approach. +> All IANA character set names for which the Python core library provides codecs are supported. + +

+ >>>>> 👉 Try Me Online Now, Then Adopt Me 👈 <<<<< +

+ +This project offers you an alternative to **Universal Charset Encoding Detector**, also known as **Chardet**. + +| Feature | [Chardet](https://github.com/chardet/chardet) | Charset Normalizer | [cChardet](https://github.com/PyYoshi/cChardet) | +| ------------- | :-------------: | :------------------: | :------------------: | +| `Fast` | ❌
| ✅
| ✅
| +| `Universal**` | ❌ | ✅ | ❌ | +| `Reliable` **without** distinguishable standards | ❌ | ✅ | ✅ | +| `Reliable` **with** distinguishable standards | ✅ | ✅ | ✅ | +| `Free & Open` | ✅ | ✅ | ✅ | +| `License` | LGPL-2.1 | MIT | MPL-1.1 +| `Native Python` | ✅ | ✅ | ❌ | +| `Detect spoken language` | ❌ | ✅ | N/A | +| `Supported Encoding` | 30 | :tada: [93](https://charset-normalizer.readthedocs.io/en/latest/user/support.html#supported-encodings) | 40 + +

+Reading Normalized TextCat Reading Text + +*\*\* : They are clearly using specific code for a specific encoding even if covering most of used one*
+Did you got there because of the logs? See [https://charset-normalizer.readthedocs.io/en/latest/user/miscellaneous.html](https://charset-normalizer.readthedocs.io/en/latest/user/miscellaneous.html) + +## ⭐ Your support + +*Fork, test-it, star-it, submit your ideas! We do listen.* + +## ⚡ Performance + +This package offer better performance than its counterpart Chardet. Here are some numbers. + +| Package | Accuracy | Mean per file (ms) | File per sec (est) | +| ------------- | :-------------: | :------------------: | :------------------: | +| [chardet](https://github.com/chardet/chardet) | 92 % | 220 ms | 5 file/sec | +| charset-normalizer | **98 %** | **40 ms** | 25 file/sec | + +| Package | 99th percentile | 95th percentile | 50th percentile | +| ------------- | :-------------: | :------------------: | :------------------: | +| [chardet](https://github.com/chardet/chardet) | 1115 ms | 300 ms | 27 ms | +| charset-normalizer | 460 ms | 240 ms | 18 ms | + +Chardet's performance on larger file (1MB+) are very poor. Expect huge difference on large payload. + +> Stats are generated using 400+ files using default parameters. More details on used files, see GHA workflows. +> And yes, these results might change at any time. The dataset can be updated to include more files. +> The actual delays heavily depends on your CPU capabilities. The factors should remain the same. + +[cchardet](https://github.com/PyYoshi/cChardet) is a non-native (cpp binding) and unmaintained faster alternative with +a better accuracy than chardet but lower than this package. If speed is the most important factor, you should try it. + +## ✨ Installation + +Using PyPi for latest stable +```sh +pip install charset-normalizer -U +``` + +If you want a more up-to-date `unicodedata` than the one available in your Python setup. +```sh +pip install charset-normalizer[unicode_backport] -U +``` + +## 🚀 Basic Usage + +### CLI +This package comes with a CLI. + +``` +usage: normalizer [-h] [-v] [-a] [-n] [-m] [-r] [-f] [-t THRESHOLD] + file [file ...] + +The Real First Universal Charset Detector. Discover originating encoding used +on text file. Normalize text to unicode. + +positional arguments: + files File(s) to be analysed + +optional arguments: + -h, --help show this help message and exit + -v, --verbose Display complementary information about file if any. + Stdout will contain logs about the detection process. + -a, --with-alternative + Output complementary possibilities if any. Top-level + JSON WILL be a list. + -n, --normalize Permit to normalize input file. If not set, program + does not write anything. + -m, --minimal Only output the charset detected to STDOUT. Disabling + JSON output. + -r, --replace Replace file when trying to normalize it instead of + creating a new one. + -f, --force Replace file without asking if you are sure, use this + flag with caution. + -t THRESHOLD, --threshold THRESHOLD + Define a custom maximum amount of chaos allowed in + decoded content. 0. <= chaos <= 1. + --version Show version information and exit. +``` + +```bash +normalizer ./data/sample.1.fr.srt +``` + +:tada: Since version 1.4.0 the CLI produce easily usable stdout result in JSON format. + +```json +{ + "path": "/home/default/projects/charset_normalizer/data/sample.1.fr.srt", + "encoding": "cp1252", + "encoding_aliases": [ + "1252", + "windows_1252" + ], + "alternative_encodings": [ + "cp1254", + "cp1256", + "cp1258", + "iso8859_14", + "iso8859_15", + "iso8859_16", + "iso8859_3", + "iso8859_9", + "latin_1", + "mbcs" + ], + "language": "French", + "alphabets": [ + "Basic Latin", + "Latin-1 Supplement" + ], + "has_sig_or_bom": false, + "chaos": 0.149, + "coherence": 97.152, + "unicode_path": null, + "is_preferred": true +} +``` + +### Python +*Just print out normalized text* +```python +from charset_normalizer import from_path + +results = from_path('./my_subtitle.srt') + +print(str(results.best())) +``` + +*Normalize any text file* +```python +from charset_normalizer import normalize +try: + normalize('./my_subtitle.srt') # should write to disk my_subtitle-***.srt +except IOError as e: + print('Sadly, we are unable to perform charset normalization.', str(e)) +``` + +*Upgrade your code without effort* +```python +from charset_normalizer import detect +``` + +The above code will behave the same as **chardet**. We ensure that we offer the best (reasonable) BC result possible. + +See the docs for advanced usage : [readthedocs.io](https://charset-normalizer.readthedocs.io/en/latest/) + +## 😇 Why + +When I started using Chardet, I noticed that it was not suited to my expectations, and I wanted to propose a +reliable alternative using a completely different method. Also! I never back down on a good challenge! + +I **don't care** about the **originating charset** encoding, because **two different tables** can +produce **two identical rendered string.** +What I want is to get readable text, the best I can. + +In a way, **I'm brute forcing text decoding.** How cool is that ? 😎 + +Don't confuse package **ftfy** with charset-normalizer or chardet. ftfy goal is to repair unicode string whereas charset-normalizer to convert raw file in unknown encoding to unicode. + +## 🍰 How + + - Discard all charset encoding table that could not fit the binary content. + - Measure chaos, or the mess once opened (by chunks) with a corresponding charset encoding. + - Extract matches with the lowest mess detected. + - Additionally, we measure coherence / probe for a language. + +**Wait a minute**, what is chaos/mess and coherence according to **YOU ?** + +*Chaos :* I opened hundred of text files, **written by humans**, with the wrong encoding table. **I observed**, then +**I established** some ground rules about **what is obvious** when **it seems like** a mess. + I know that my interpretation of what is chaotic is very subjective, feel free to contribute in order to + improve or rewrite it. + +*Coherence :* For each language there is on earth, we have computed ranked letter appearance occurrences (the best we can). So I thought +that intel is worth something here. So I use those records against decoded text to check if I can detect intelligent design. + +## ⚡ Known limitations + + - Language detection is unreliable when text contains two or more languages sharing identical letters. (eg. HTML (english tags) + Turkish content (Sharing Latin characters)) + - Every charset detector heavily depends on sufficient content. In common cases, do not bother run detection on very tiny content. + +## 👤 Contributing + +Contributions, issues and feature requests are very much welcome.
+Feel free to check [issues page](https://github.com/ousret/charset_normalizer/issues) if you want to contribute. + +## 📝 License + +Copyright © 2019 [Ahmed TAHRI @Ousret](https://github.com/Ousret).
+This project is [MIT](https://github.com/Ousret/charset_normalizer/blob/master/LICENSE) licensed. + +Characters frequencies used in this project © 2012 [Denny Vrandečić](http://simia.net/letters/) + + diff --git a/Splunk_TA_paloalto/bin/splunk_ta_paloalto/aob_py3/charset_normalizer-2.0.12.dist-info/RECORD b/Splunk_TA_paloalto/bin/splunk_ta_paloalto/aob_py3/charset_normalizer-2.0.12.dist-info/RECORD new file mode 100644 index 00000000..d5a8e21c --- /dev/null +++ b/Splunk_TA_paloalto/bin/splunk_ta_paloalto/aob_py3/charset_normalizer-2.0.12.dist-info/RECORD @@ -0,0 +1,21 @@ +../../bin/normalizer,sha256=_6VNRnw7MyVCL-CINQ044FaHcpLHwQVgd-_D0cWq4KI,244 +charset_normalizer-2.0.12.dist-info/INSTALLER,sha256=zuuue4knoyJ-UwPPXg8fezS7VCrXJQrAP7zeNuwvFQg,4 +charset_normalizer-2.0.12.dist-info/LICENSE,sha256=6zGgxaT7Cbik4yBV0lweX5w1iidS_vPNcgIT0cz-4kE,1070 +charset_normalizer-2.0.12.dist-info/METADATA,sha256=eX-U3s7nb6wcvXZFyM1mdBf1yz4I0msVBgNvLEscAbo,11713 +charset_normalizer-2.0.12.dist-info/RECORD,, +charset_normalizer-2.0.12.dist-info/WHEEL,sha256=G16H4A3IeoQmnOrYV4ueZGKSjhipXx8zc8nu9FGlvMA,92 +charset_normalizer-2.0.12.dist-info/entry_points.txt,sha256=5AJq_EPtGGUwJPgQLnBZfbVr-FYCIwT0xP7dIEZO3NI,77 +charset_normalizer-2.0.12.dist-info/top_level.txt,sha256=7ASyzePr8_xuZWJsnqJjIBtyV8vhEo0wBCv1MPRRi3Q,19 +charset_normalizer/__init__.py,sha256=x2A2OW29MBcqdxsvy6t1wzkUlH3ma0guxL6ZCfS8J94,1790 +charset_normalizer/api.py,sha256=r__Wz85F5pYOkRwEY5imXY_pCZ2Nil1DkdaAJY7T5o0,20303 +charset_normalizer/assets/__init__.py,sha256=FPnfk8limZRb8ZIUQcTvPEcbuM1eqOdWGw0vbWGycDs,25485 +charset_normalizer/cd.py,sha256=a9Kzzd9tHl_W08ExbCFMmRJqdo2k7EBQ8Z_3y9DmYsg,11076 +charset_normalizer/cli/__init__.py,sha256=47DEQpj8HBSa-_TImW-5JCeuQeRkm5NMpJWZG3hSuFU,0 +charset_normalizer/cli/normalizer.py,sha256=LkeFIRc1l28rOgXpEby695x0bcKQv4D8z9FmA3Z2c3A,9364 +charset_normalizer/constant.py,sha256=51u_RS10I1vYVpBao__xHqf--HHNrR6me1A1se5r5Y0,19449 +charset_normalizer/legacy.py,sha256=XKeZOts_HdYQU_Jb3C9ZfOjY2CiUL132k9_nXer8gig,3384 +charset_normalizer/md.py,sha256=WEwnu2MyIiMeEaorRduqcTxGjIBclWIG3i-9_UL6LLs,18191 +charset_normalizer/models.py,sha256=XrGpVxfonhcilIWC1WeiP3-ZORGEe_RG3sgrfPLl9qM,13303 +charset_normalizer/py.typed,sha256=47DEQpj8HBSa-_TImW-5JCeuQeRkm5NMpJWZG3hSuFU,0 +charset_normalizer/utils.py,sha256=AWSL0z1B42IwdLfjX4ZMASA9cTUsTp0PweCdW98SI-4,9308 +charset_normalizer/version.py,sha256=uxO2cT0YIavQv4dQlNGmHPIOOwOa-exspxXi3IR7dck,80 diff --git a/Splunk_TA_paloalto/bin/splunk_ta_paloalto/aob_py3/charset_normalizer-2.0.12.dist-info/WHEEL b/Splunk_TA_paloalto/bin/splunk_ta_paloalto/aob_py3/charset_normalizer-2.0.12.dist-info/WHEEL new file mode 100644 index 00000000..becc9a66 --- /dev/null +++ b/Splunk_TA_paloalto/bin/splunk_ta_paloalto/aob_py3/charset_normalizer-2.0.12.dist-info/WHEEL @@ -0,0 +1,5 @@ +Wheel-Version: 1.0 +Generator: bdist_wheel (0.37.1) +Root-Is-Purelib: true +Tag: py3-none-any + diff --git a/Splunk_TA_paloalto/bin/splunk_ta_paloalto/aob_py3/charset_normalizer-2.0.12.dist-info/entry_points.txt b/Splunk_TA_paloalto/bin/splunk_ta_paloalto/aob_py3/charset_normalizer-2.0.12.dist-info/entry_points.txt new file mode 100644 index 00000000..a67f60bc --- /dev/null +++ b/Splunk_TA_paloalto/bin/splunk_ta_paloalto/aob_py3/charset_normalizer-2.0.12.dist-info/entry_points.txt @@ -0,0 +1,3 @@ +[console_scripts] +normalizer = charset_normalizer.cli.normalizer:cli_detect + diff --git a/Splunk_TA_paloalto/bin/splunk_ta_paloalto/aob_py3/charset_normalizer-2.0.12.dist-info/top_level.txt b/Splunk_TA_paloalto/bin/splunk_ta_paloalto/aob_py3/charset_normalizer-2.0.12.dist-info/top_level.txt new file mode 100644 index 00000000..66958f0a --- /dev/null +++ b/Splunk_TA_paloalto/bin/splunk_ta_paloalto/aob_py3/charset_normalizer-2.0.12.dist-info/top_level.txt @@ -0,0 +1 @@ +charset_normalizer diff --git a/Splunk_TA_paloalto/bin/splunk_ta_paloalto/aob_py3/charset_normalizer/__init__.py b/Splunk_TA_paloalto/bin/splunk_ta_paloalto/aob_py3/charset_normalizer/__init__.py new file mode 100644 index 00000000..1aea851a --- /dev/null +++ b/Splunk_TA_paloalto/bin/splunk_ta_paloalto/aob_py3/charset_normalizer/__init__.py @@ -0,0 +1,56 @@ +# -*- coding: utf_8 -*- +""" +Charset-Normalizer +~~~~~~~~~~~~~~ +The Real First Universal Charset Detector. +A library that helps you read text from an unknown charset encoding. +Motivated by chardet, This package is trying to resolve the issue by taking a new approach. +All IANA character set names for which the Python core library provides codecs are supported. + +Basic usage: + >>> from charset_normalizer import from_bytes + >>> results = from_bytes('Bсеки човек има право на образование. Oбразованието!'.encode('utf_8')) + >>> best_guess = results.best() + >>> str(best_guess) + 'Bсеки човек има право на образование. Oбразованието!' + +Others methods and usages are available - see the full documentation +at . +:copyright: (c) 2021 by Ahmed TAHRI +:license: MIT, see LICENSE for more details. +""" +import logging + +from .api import from_bytes, from_fp, from_path, normalize +from .legacy import ( + CharsetDetector, + CharsetDoctor, + CharsetNormalizerMatch, + CharsetNormalizerMatches, + detect, +) +from .models import CharsetMatch, CharsetMatches +from .utils import set_logging_handler +from .version import VERSION, __version__ + +__all__ = ( + "from_fp", + "from_path", + "from_bytes", + "normalize", + "detect", + "CharsetMatch", + "CharsetMatches", + "CharsetNormalizerMatch", + "CharsetNormalizerMatches", + "CharsetDetector", + "CharsetDoctor", + "__version__", + "VERSION", + "set_logging_handler", +) + +# Attach a NullHandler to the top level logger by default +# https://docs.python.org/3.3/howto/logging.html#configuring-logging-for-a-library + +logging.getLogger("charset_normalizer").addHandler(logging.NullHandler()) diff --git a/Splunk_TA_paloalto/bin/splunk_ta_paloalto/aob_py3/charset_normalizer/api.py b/Splunk_TA_paloalto/bin/splunk_ta_paloalto/aob_py3/charset_normalizer/api.py new file mode 100644 index 00000000..bdc8ed98 --- /dev/null +++ b/Splunk_TA_paloalto/bin/splunk_ta_paloalto/aob_py3/charset_normalizer/api.py @@ -0,0 +1,608 @@ +import logging +from os.path import basename, splitext +from typing import BinaryIO, List, Optional, Set + +try: + from os import PathLike +except ImportError: # pragma: no cover + PathLike = str # type: ignore + +from .cd import ( + coherence_ratio, + encoding_languages, + mb_encoding_languages, + merge_coherence_ratios, +) +from .constant import IANA_SUPPORTED, TOO_BIG_SEQUENCE, TOO_SMALL_SEQUENCE, TRACE +from .md import mess_ratio +from .models import CharsetMatch, CharsetMatches +from .utils import ( + any_specified_encoding, + iana_name, + identify_sig_or_bom, + is_cp_similar, + is_multi_byte_encoding, + should_strip_sig_or_bom, +) + +# Will most likely be controversial +# logging.addLevelName(TRACE, "TRACE") +logger = logging.getLogger("charset_normalizer") +explain_handler = logging.StreamHandler() +explain_handler.setFormatter( + logging.Formatter("%(asctime)s | %(levelname)s | %(message)s") +) + + +def from_bytes( + sequences: bytes, + steps: int = 5, + chunk_size: int = 512, + threshold: float = 0.2, + cp_isolation: List[str] = None, + cp_exclusion: List[str] = None, + preemptive_behaviour: bool = True, + explain: bool = False, +) -> CharsetMatches: + """ + Given a raw bytes sequence, return the best possibles charset usable to render str objects. + If there is no results, it is a strong indicator that the source is binary/not text. + By default, the process will extract 5 blocs of 512o each to assess the mess and coherence of a given sequence. + And will give up a particular code page after 20% of measured mess. Those criteria are customizable at will. + + The preemptive behavior DOES NOT replace the traditional detection workflow, it prioritize a particular code page + but never take it for granted. Can improve the performance. + + You may want to focus your attention to some code page or/and not others, use cp_isolation and cp_exclusion for that + purpose. + + This function will strip the SIG in the payload/sequence every time except on UTF-16, UTF-32. + By default the library does not setup any handler other than the NullHandler, if you choose to set the 'explain' + toggle to True it will alter the logger configuration to add a StreamHandler that is suitable for debugging. + Custom logging format and handler can be set manually. + """ + + if not isinstance(sequences, (bytearray, bytes)): + raise TypeError( + "Expected object of type bytes or bytearray, got: {0}".format( + type(sequences) + ) + ) + + if explain: + previous_logger_level = logger.level # type: int + logger.addHandler(explain_handler) + logger.setLevel(TRACE) + + length = len(sequences) # type: int + + if length == 0: + logger.debug("Encoding detection on empty bytes, assuming utf_8 intention.") + if explain: + logger.removeHandler(explain_handler) + logger.setLevel(previous_logger_level or logging.WARNING) + return CharsetMatches([CharsetMatch(sequences, "utf_8", 0.0, False, [], "")]) + + if cp_isolation is not None: + logger.log( + TRACE, + "cp_isolation is set. use this flag for debugging purpose. " + "limited list of encoding allowed : %s.", + ", ".join(cp_isolation), + ) + cp_isolation = [iana_name(cp, False) for cp in cp_isolation] + else: + cp_isolation = [] + + if cp_exclusion is not None: + logger.log( + TRACE, + "cp_exclusion is set. use this flag for debugging purpose. " + "limited list of encoding excluded : %s.", + ", ".join(cp_exclusion), + ) + cp_exclusion = [iana_name(cp, False) for cp in cp_exclusion] + else: + cp_exclusion = [] + + if length <= (chunk_size * steps): + logger.log( + TRACE, + "override steps (%i) and chunk_size (%i) as content does not fit (%i byte(s) given) parameters.", + steps, + chunk_size, + length, + ) + steps = 1 + chunk_size = length + + if steps > 1 and length / steps < chunk_size: + chunk_size = int(length / steps) + + is_too_small_sequence = len(sequences) < TOO_SMALL_SEQUENCE # type: bool + is_too_large_sequence = len(sequences) >= TOO_BIG_SEQUENCE # type: bool + + if is_too_small_sequence: + logger.log( + TRACE, + "Trying to detect encoding from a tiny portion of ({}) byte(s).".format( + length + ), + ) + elif is_too_large_sequence: + logger.log( + TRACE, + "Using lazy str decoding because the payload is quite large, ({}) byte(s).".format( + length + ), + ) + + prioritized_encodings = [] # type: List[str] + + specified_encoding = ( + any_specified_encoding(sequences) if preemptive_behaviour else None + ) # type: Optional[str] + + if specified_encoding is not None: + prioritized_encodings.append(specified_encoding) + logger.log( + TRACE, + "Detected declarative mark in sequence. Priority +1 given for %s.", + specified_encoding, + ) + + tested = set() # type: Set[str] + tested_but_hard_failure = [] # type: List[str] + tested_but_soft_failure = [] # type: List[str] + + fallback_ascii = None # type: Optional[CharsetMatch] + fallback_u8 = None # type: Optional[CharsetMatch] + fallback_specified = None # type: Optional[CharsetMatch] + + results = CharsetMatches() # type: CharsetMatches + + sig_encoding, sig_payload = identify_sig_or_bom(sequences) + + if sig_encoding is not None: + prioritized_encodings.append(sig_encoding) + logger.log( + TRACE, + "Detected a SIG or BOM mark on first %i byte(s). Priority +1 given for %s.", + len(sig_payload), + sig_encoding, + ) + + prioritized_encodings.append("ascii") + + if "utf_8" not in prioritized_encodings: + prioritized_encodings.append("utf_8") + + for encoding_iana in prioritized_encodings + IANA_SUPPORTED: + + if cp_isolation and encoding_iana not in cp_isolation: + continue + + if cp_exclusion and encoding_iana in cp_exclusion: + continue + + if encoding_iana in tested: + continue + + tested.add(encoding_iana) + + decoded_payload = None # type: Optional[str] + bom_or_sig_available = sig_encoding == encoding_iana # type: bool + strip_sig_or_bom = bom_or_sig_available and should_strip_sig_or_bom( + encoding_iana + ) # type: bool + + if encoding_iana in {"utf_16", "utf_32"} and not bom_or_sig_available: + logger.log( + TRACE, + "Encoding %s wont be tested as-is because it require a BOM. Will try some sub-encoder LE/BE.", + encoding_iana, + ) + continue + + try: + is_multi_byte_decoder = is_multi_byte_encoding(encoding_iana) # type: bool + except (ModuleNotFoundError, ImportError): + logger.log( + TRACE, + "Encoding %s does not provide an IncrementalDecoder", + encoding_iana, + ) + continue + + try: + if is_too_large_sequence and is_multi_byte_decoder is False: + str( + sequences[: int(50e4)] + if strip_sig_or_bom is False + else sequences[len(sig_payload) : int(50e4)], + encoding=encoding_iana, + ) + else: + decoded_payload = str( + sequences + if strip_sig_or_bom is False + else sequences[len(sig_payload) :], + encoding=encoding_iana, + ) + except (UnicodeDecodeError, LookupError) as e: + if not isinstance(e, LookupError): + logger.log( + TRACE, + "Code page %s does not fit given bytes sequence at ALL. %s", + encoding_iana, + str(e), + ) + tested_but_hard_failure.append(encoding_iana) + continue + + similar_soft_failure_test = False # type: bool + + for encoding_soft_failed in tested_but_soft_failure: + if is_cp_similar(encoding_iana, encoding_soft_failed): + similar_soft_failure_test = True + break + + if similar_soft_failure_test: + logger.log( + TRACE, + "%s is deemed too similar to code page %s and was consider unsuited already. Continuing!", + encoding_iana, + encoding_soft_failed, + ) + continue + + r_ = range( + 0 if not bom_or_sig_available else len(sig_payload), + length, + int(length / steps), + ) + + multi_byte_bonus = ( + is_multi_byte_decoder + and decoded_payload is not None + and len(decoded_payload) < length + ) # type: bool + + if multi_byte_bonus: + logger.log( + TRACE, + "Code page %s is a multi byte encoding table and it appear that at least one character " + "was encoded using n-bytes.", + encoding_iana, + ) + + max_chunk_gave_up = int(len(r_) / 4) # type: int + + max_chunk_gave_up = max(max_chunk_gave_up, 2) + early_stop_count = 0 # type: int + lazy_str_hard_failure = False + + md_chunks = [] # type: List[str] + md_ratios = [] + + for i in r_: + if i + chunk_size > length + 8: + continue + + cut_sequence = sequences[i : i + chunk_size] + + if bom_or_sig_available and strip_sig_or_bom is False: + cut_sequence = sig_payload + cut_sequence + + try: + chunk = cut_sequence.decode( + encoding_iana, + errors="ignore" if is_multi_byte_decoder else "strict", + ) # type: str + except UnicodeDecodeError as e: # Lazy str loading may have missed something there + logger.log( + TRACE, + "LazyStr Loading: After MD chunk decode, code page %s does not fit given bytes sequence at ALL. %s", + encoding_iana, + str(e), + ) + early_stop_count = max_chunk_gave_up + lazy_str_hard_failure = True + break + + # multi-byte bad cutting detector and adjustment + # not the cleanest way to perform that fix but clever enough for now. + if is_multi_byte_decoder and i > 0 and sequences[i] >= 0x80: + + chunk_partial_size_chk = min(chunk_size, 16) # type: int + + if ( + decoded_payload + and chunk[:chunk_partial_size_chk] not in decoded_payload + ): + for j in range(i, i - 4, -1): + cut_sequence = sequences[j : i + chunk_size] + + if bom_or_sig_available and strip_sig_or_bom is False: + cut_sequence = sig_payload + cut_sequence + + chunk = cut_sequence.decode(encoding_iana, errors="ignore") + + if chunk[:chunk_partial_size_chk] in decoded_payload: + break + + md_chunks.append(chunk) + + md_ratios.append(mess_ratio(chunk, threshold)) + + if md_ratios[-1] >= threshold: + early_stop_count += 1 + + if (early_stop_count >= max_chunk_gave_up) or ( + bom_or_sig_available and strip_sig_or_bom is False + ): + break + + # We might want to check the sequence again with the whole content + # Only if initial MD tests passes + if ( + not lazy_str_hard_failure + and is_too_large_sequence + and not is_multi_byte_decoder + ): + try: + sequences[int(50e3) :].decode(encoding_iana, errors="strict") + except UnicodeDecodeError as e: + logger.log( + TRACE, + "LazyStr Loading: After final lookup, code page %s does not fit given bytes sequence at ALL. %s", + encoding_iana, + str(e), + ) + tested_but_hard_failure.append(encoding_iana) + continue + + mean_mess_ratio = ( + sum(md_ratios) / len(md_ratios) if md_ratios else 0.0 + ) # type: float + if mean_mess_ratio >= threshold or early_stop_count >= max_chunk_gave_up: + tested_but_soft_failure.append(encoding_iana) + logger.log( + TRACE, + "%s was excluded because of initial chaos probing. Gave up %i time(s). " + "Computed mean chaos is %f %%.", + encoding_iana, + early_stop_count, + round(mean_mess_ratio * 100, ndigits=3), + ) + # Preparing those fallbacks in case we got nothing. + if ( + encoding_iana in ["ascii", "utf_8", specified_encoding] + and not lazy_str_hard_failure + ): + fallback_entry = CharsetMatch( + sequences, encoding_iana, threshold, False, [], decoded_payload + ) + if encoding_iana == specified_encoding: + fallback_specified = fallback_entry + elif encoding_iana == "ascii": + fallback_ascii = fallback_entry + else: + fallback_u8 = fallback_entry + continue + + logger.log( + TRACE, + "%s passed initial chaos probing. Mean measured chaos is %f %%", + encoding_iana, + round(mean_mess_ratio * 100, ndigits=3), + ) + + if not is_multi_byte_decoder: + target_languages = encoding_languages(encoding_iana) # type: List[str] + else: + target_languages = mb_encoding_languages(encoding_iana) + + if target_languages: + logger.log( + TRACE, + "{} should target any language(s) of {}".format( + encoding_iana, str(target_languages) + ), + ) + + cd_ratios = [] + + # We shall skip the CD when its about ASCII + # Most of the time its not relevant to run "language-detection" on it. + if encoding_iana != "ascii": + for chunk in md_chunks: + chunk_languages = coherence_ratio( + chunk, 0.1, ",".join(target_languages) if target_languages else None + ) + + cd_ratios.append(chunk_languages) + + cd_ratios_merged = merge_coherence_ratios(cd_ratios) + + if cd_ratios_merged: + logger.log( + TRACE, + "We detected language {} using {}".format( + cd_ratios_merged, encoding_iana + ), + ) + + results.append( + CharsetMatch( + sequences, + encoding_iana, + mean_mess_ratio, + bom_or_sig_available, + cd_ratios_merged, + decoded_payload, + ) + ) + + if ( + encoding_iana in [specified_encoding, "ascii", "utf_8"] + and mean_mess_ratio < 0.1 + ): + logger.debug( + "Encoding detection: %s is most likely the one.", encoding_iana + ) + if explain: + logger.removeHandler(explain_handler) + logger.setLevel(previous_logger_level) + return CharsetMatches([results[encoding_iana]]) + + if encoding_iana == sig_encoding: + logger.debug( + "Encoding detection: %s is most likely the one as we detected a BOM or SIG within " + "the beginning of the sequence.", + encoding_iana, + ) + if explain: + logger.removeHandler(explain_handler) + logger.setLevel(previous_logger_level) + return CharsetMatches([results[encoding_iana]]) + + if len(results) == 0: + if fallback_u8 or fallback_ascii or fallback_specified: + logger.log( + TRACE, + "Nothing got out of the detection process. Using ASCII/UTF-8/Specified fallback.", + ) + + if fallback_specified: + logger.debug( + "Encoding detection: %s will be used as a fallback match", + fallback_specified.encoding, + ) + results.append(fallback_specified) + elif ( + (fallback_u8 and fallback_ascii is None) + or ( + fallback_u8 + and fallback_ascii + and fallback_u8.fingerprint != fallback_ascii.fingerprint + ) + or (fallback_u8 is not None) + ): + logger.debug("Encoding detection: utf_8 will be used as a fallback match") + results.append(fallback_u8) + elif fallback_ascii: + logger.debug("Encoding detection: ascii will be used as a fallback match") + results.append(fallback_ascii) + + if results: + logger.debug( + "Encoding detection: Found %s as plausible (best-candidate) for content. With %i alternatives.", + results.best().encoding, # type: ignore + len(results) - 1, + ) + else: + logger.debug("Encoding detection: Unable to determine any suitable charset.") + + if explain: + logger.removeHandler(explain_handler) + logger.setLevel(previous_logger_level) + + return results + + +def from_fp( + fp: BinaryIO, + steps: int = 5, + chunk_size: int = 512, + threshold: float = 0.20, + cp_isolation: List[str] = None, + cp_exclusion: List[str] = None, + preemptive_behaviour: bool = True, + explain: bool = False, +) -> CharsetMatches: + """ + Same thing than the function from_bytes but using a file pointer that is already ready. + Will not close the file pointer. + """ + return from_bytes( + fp.read(), + steps, + chunk_size, + threshold, + cp_isolation, + cp_exclusion, + preemptive_behaviour, + explain, + ) + + +def from_path( + path: PathLike, + steps: int = 5, + chunk_size: int = 512, + threshold: float = 0.20, + cp_isolation: List[str] = None, + cp_exclusion: List[str] = None, + preemptive_behaviour: bool = True, + explain: bool = False, +) -> CharsetMatches: + """ + Same thing than the function from_bytes but with one extra step. Opening and reading given file path in binary mode. + Can raise IOError. + """ + with open(path, "rb") as fp: + return from_fp( + fp, + steps, + chunk_size, + threshold, + cp_isolation, + cp_exclusion, + preemptive_behaviour, + explain, + ) + + +def normalize( + path: PathLike, + steps: int = 5, + chunk_size: int = 512, + threshold: float = 0.20, + cp_isolation: List[str] = None, + cp_exclusion: List[str] = None, + preemptive_behaviour: bool = True, +) -> CharsetMatch: + """ + Take a (text-based) file path and try to create another file next to it, this time using UTF-8. + """ + results = from_path( + path, + steps, + chunk_size, + threshold, + cp_isolation, + cp_exclusion, + preemptive_behaviour, + ) + + filename = basename(path) + target_extensions = list(splitext(filename)) + + if len(results) == 0: + raise IOError( + 'Unable to normalize "{}", no encoding charset seems to fit.'.format( + filename + ) + ) + + result = results.best() + + target_extensions[0] += "-" + result.encoding # type: ignore + + with open( + "{}".format(str(path).replace(filename, "".join(target_extensions))), "wb" + ) as fp: + fp.write(result.output()) # type: ignore + + return result # type: ignore diff --git a/Splunk_TA_paloalto/bin/splunk_ta_paloalto/aob_py3/charset_normalizer/assets/__init__.py b/Splunk_TA_paloalto/bin/splunk_ta_paloalto/aob_py3/charset_normalizer/assets/__init__.py new file mode 100644 index 00000000..b2e56ff3 --- /dev/null +++ b/Splunk_TA_paloalto/bin/splunk_ta_paloalto/aob_py3/charset_normalizer/assets/__init__.py @@ -0,0 +1,1244 @@ +# -*- coding: utf_8 -*- +from collections import OrderedDict + +FREQUENCIES = OrderedDict( + [ + ( + "English", + [ + "e", + "a", + "t", + "i", + "o", + "n", + "s", + "r", + "h", + "l", + "d", + "c", + "u", + "m", + "f", + "p", + "g", + "w", + "y", + "b", + "v", + "k", + "x", + "j", + "z", + "q", + ], + ), + ( + "German", + [ + "e", + "n", + "i", + "r", + "s", + "t", + "a", + "d", + "h", + "u", + "l", + "g", + "o", + "c", + "m", + "b", + "f", + "k", + "w", + "z", + "p", + "v", + "ü", + "ä", + "ö", + "j", + ], + ), + ( + "French", + [ + "e", + "a", + "s", + "n", + "i", + "t", + "r", + "l", + "u", + "o", + "d", + "c", + "p", + "m", + "é", + "v", + "g", + "f", + "b", + "h", + "q", + "à", + "x", + "è", + "y", + "j", + ], + ), + ( + "Dutch", + [ + "e", + "n", + "a", + "i", + "r", + "t", + "o", + "d", + "s", + "l", + "g", + "h", + "v", + "m", + "u", + "k", + "c", + "p", + "b", + "w", + "j", + "z", + "f", + "y", + "x", + "ë", + ], + ), + ( + "Italian", + [ + "e", + "i", + "a", + "o", + "n", + "l", + "t", + "r", + "s", + "c", + "d", + "u", + "p", + "m", + "g", + "v", + "f", + "b", + "z", + "h", + "q", + "è", + "à", + "k", + "y", + "ò", + ], + ), + ( + "Polish", + [ + "a", + "i", + "o", + "e", + "n", + "r", + "z", + "w", + "s", + "c", + "t", + "k", + "y", + "d", + "p", + "m", + "u", + "l", + "j", + "ł", + "g", + "b", + "h", + "ą", + "ę", + "ó", + ], + ), + ( + "Spanish", + [ + "e", + "a", + "o", + "n", + "s", + "r", + "i", + "l", + "d", + "t", + "c", + "u", + "m", + "p", + "b", + "g", + "v", + "f", + "y", + "ó", + "h", + "q", + "í", + "j", + "z", + "á", + ], + ), + ( + "Russian", + [ + "о", + "а", + "е", + "и", + "н", + "с", + "т", + "р", + "в", + "л", + "к", + "м", + "д", + "п", + "у", + "г", + "я", + "ы", + "з", + "б", + "й", + "ь", + "ч", + "х", + "ж", + "ц", + ], + ), + ( + "Japanese", + [ + "の", + "に", + "る", + "た", + "は", + "ー", + "と", + "し", + "を", + "で", + "て", + "が", + "い", + "ン", + "れ", + "な", + "年", + "ス", + "っ", + "ル", + "か", + "ら", + "あ", + "さ", + "も", + "り", + ], + ), + ( + "Portuguese", + [ + "a", + "e", + "o", + "s", + "i", + "r", + "d", + "n", + "t", + "m", + "u", + "c", + "l", + "p", + "g", + "v", + "b", + "f", + "h", + "ã", + "q", + "é", + "ç", + "á", + "z", + "í", + ], + ), + ( + "Swedish", + [ + "e", + "a", + "n", + "r", + "t", + "s", + "i", + "l", + "d", + "o", + "m", + "k", + "g", + "v", + "h", + "f", + "u", + "p", + "ä", + "c", + "b", + "ö", + "å", + "y", + "j", + "x", + ], + ), + ( + "Chinese", + [ + "的", + "一", + "是", + "不", + "了", + "在", + "人", + "有", + "我", + "他", + "这", + "个", + "们", + "中", + "来", + "上", + "大", + "为", + "和", + "国", + "地", + "到", + "以", + "说", + "时", + "要", + "就", + "出", + "会", + ], + ), + ( + "Ukrainian", + [ + "о", + "а", + "н", + "і", + "и", + "р", + "в", + "т", + "е", + "с", + "к", + "л", + "у", + "д", + "м", + "п", + "з", + "я", + "ь", + "б", + "г", + "й", + "ч", + "х", + "ц", + "ї", + ], + ), + ( + "Norwegian", + [ + "e", + "r", + "n", + "t", + "a", + "s", + "i", + "o", + "l", + "d", + "g", + "k", + "m", + "v", + "f", + "p", + "u", + "b", + "h", + "å", + "y", + "j", + "ø", + "c", + "æ", + "w", + ], + ), + ( + "Finnish", + [ + "a", + "i", + "n", + "t", + "e", + "s", + "l", + "o", + "u", + "k", + "ä", + "m", + "r", + "v", + "j", + "h", + "p", + "y", + "d", + "ö", + "g", + "c", + "b", + "f", + "w", + "z", + ], + ), + ( + "Vietnamese", + [ + "n", + "h", + "t", + "i", + "c", + "g", + "a", + "o", + "u", + "m", + "l", + "r", + "à", + "đ", + "s", + "e", + "v", + "p", + "b", + "y", + "ư", + "d", + "á", + "k", + "ộ", + "ế", + ], + ), + ( + "Czech", + [ + "o", + "e", + "a", + "n", + "t", + "s", + "i", + "l", + "v", + "r", + "k", + "d", + "u", + "m", + "p", + "í", + "c", + "h", + "z", + "á", + "y", + "j", + "b", + "ě", + "é", + "ř", + ], + ), + ( + "Hungarian", + [ + "e", + "a", + "t", + "l", + "s", + "n", + "k", + "r", + "i", + "o", + "z", + "á", + "é", + "g", + "m", + "b", + "y", + "v", + "d", + "h", + "u", + "p", + "j", + "ö", + "f", + "c", + ], + ), + ( + "Korean", + [ + "이", + "다", + "에", + "의", + "는", + "로", + "하", + "을", + "가", + "고", + "지", + "서", + "한", + "은", + "기", + "으", + "년", + "대", + "사", + "시", + "를", + "리", + "도", + "인", + "스", + "일", + ], + ), + ( + "Indonesian", + [ + "a", + "n", + "e", + "i", + "r", + "t", + "u", + "s", + "d", + "k", + "m", + "l", + "g", + "p", + "b", + "o", + "h", + "y", + "j", + "c", + "w", + "f", + "v", + "z", + "x", + "q", + ], + ), + ( + "Turkish", + [ + "a", + "e", + "i", + "n", + "r", + "l", + "ı", + "k", + "d", + "t", + "s", + "m", + "y", + "u", + "o", + "b", + "ü", + "ş", + "v", + "g", + "z", + "h", + "c", + "p", + "ç", + "ğ", + ], + ), + ( + "Romanian", + [ + "e", + "i", + "a", + "r", + "n", + "t", + "u", + "l", + "o", + "c", + "s", + "d", + "p", + "m", + "ă", + "f", + "v", + "î", + "g", + "b", + "ș", + "ț", + "z", + "h", + "â", + "j", + ], + ), + ( + "Farsi", + [ + "ا", + "ی", + "ر", + "د", + "ن", + "ه", + "و", + "م", + "ت", + "ب", + "س", + "ل", + "ک", + "ش", + "ز", + "ف", + "گ", + "ع", + "خ", + "ق", + "ج", + "آ", + "پ", + "ح", + "ط", + "ص", + ], + ), + ( + "Arabic", + [ + "ا", + "ل", + "ي", + "م", + "و", + "ن", + "ر", + "ت", + "ب", + "ة", + "ع", + "د", + "س", + "ف", + "ه", + "ك", + "ق", + "أ", + "ح", + "ج", + "ش", + "ط", + "ص", + "ى", + "خ", + "إ", + ], + ), + ( + "Danish", + [ + "e", + "r", + "n", + "t", + "a", + "i", + "s", + "d", + "l", + "o", + "g", + "m", + "k", + "f", + "v", + "u", + "b", + "h", + "p", + "å", + "y", + "ø", + "æ", + "c", + "j", + "w", + ], + ), + ( + "Serbian", + [ + "а", + "и", + "о", + "е", + "н", + "р", + "с", + "у", + "т", + "к", + "ј", + "в", + "д", + "м", + "п", + "л", + "г", + "з", + "б", + "a", + "i", + "e", + "o", + "n", + "ц", + "ш", + ], + ), + ( + "Lithuanian", + [ + "i", + "a", + "s", + "o", + "r", + "e", + "t", + "n", + "u", + "k", + "m", + "l", + "p", + "v", + "d", + "j", + "g", + "ė", + "b", + "y", + "ų", + "š", + "ž", + "c", + "ą", + "į", + ], + ), + ( + "Slovene", + [ + "e", + "a", + "i", + "o", + "n", + "r", + "s", + "l", + "t", + "j", + "v", + "k", + "d", + "p", + "m", + "u", + "z", + "b", + "g", + "h", + "č", + "c", + "š", + "ž", + "f", + "y", + ], + ), + ( + "Slovak", + [ + "o", + "a", + "e", + "n", + "i", + "r", + "v", + "t", + "s", + "l", + "k", + "d", + "m", + "p", + "u", + "c", + "h", + "j", + "b", + "z", + "á", + "y", + "ý", + "í", + "č", + "é", + ], + ), + ( + "Hebrew", + [ + "י", + "ו", + "ה", + "ל", + "ר", + "ב", + "ת", + "מ", + "א", + "ש", + "נ", + "ע", + "ם", + "ד", + "ק", + "ח", + "פ", + "ס", + "כ", + "ג", + "ט", + "צ", + "ן", + "ז", + "ך", + ], + ), + ( + "Bulgarian", + [ + "а", + "и", + "о", + "е", + "н", + "т", + "р", + "с", + "в", + "л", + "к", + "д", + "п", + "м", + "з", + "г", + "я", + "ъ", + "у", + "б", + "ч", + "ц", + "й", + "ж", + "щ", + "х", + ], + ), + ( + "Croatian", + [ + "a", + "i", + "o", + "e", + "n", + "r", + "j", + "s", + "t", + "u", + "k", + "l", + "v", + "d", + "m", + "p", + "g", + "z", + "b", + "c", + "č", + "h", + "š", + "ž", + "ć", + "f", + ], + ), + ( + "Hindi", + [ + "क", + "र", + "स", + "न", + "त", + "म", + "ह", + "प", + "य", + "ल", + "व", + "ज", + "द", + "ग", + "ब", + "श", + "ट", + "अ", + "ए", + "थ", + "भ", + "ड", + "च", + "ध", + "ष", + "इ", + ], + ), + ( + "Estonian", + [ + "a", + "i", + "e", + "s", + "t", + "l", + "u", + "n", + "o", + "k", + "r", + "d", + "m", + "v", + "g", + "p", + "j", + "h", + "ä", + "b", + "õ", + "ü", + "f", + "c", + "ö", + "y", + ], + ), + ( + "Simple English", + [ + "e", + "a", + "t", + "i", + "o", + "n", + "s", + "r", + "h", + "l", + "d", + "c", + "m", + "u", + "f", + "p", + "g", + "w", + "b", + "y", + "v", + "k", + "j", + "x", + "z", + "q", + ], + ), + ( + "Thai", + [ + "า", + "น", + "ร", + "อ", + "ก", + "เ", + "ง", + "ม", + "ย", + "ล", + "ว", + "ด", + "ท", + "ส", + "ต", + "ะ", + "ป", + "บ", + "ค", + "ห", + "แ", + "จ", + "พ", + "ช", + "ข", + "ใ", + ], + ), + ( + "Greek", + [ + "α", + "τ", + "ο", + "ι", + "ε", + "ν", + "ρ", + "σ", + "κ", + "η", + "π", + "ς", + "υ", + "μ", + "λ", + "ί", + "ό", + "ά", + "γ", + "έ", + "δ", + "ή", + "ω", + "χ", + "θ", + "ύ", + ], + ), + ( + "Tamil", + [ + "க", + "த", + "ப", + "ட", + "ர", + "ம", + "ல", + "ன", + "வ", + "ற", + "ய", + "ள", + "ச", + "ந", + "இ", + "ண", + "அ", + "ஆ", + "ழ", + "ங", + "எ", + "உ", + "ஒ", + "ஸ", + ], + ), + ( + "Classical Chinese", + [ + "之", + "年", + "為", + "也", + "以", + "一", + "人", + "其", + "者", + "國", + "有", + "二", + "十", + "於", + "曰", + "三", + "不", + "大", + "而", + "子", + "中", + "五", + "四", + ], + ), + ( + "Kazakh", + [ + "а", + "ы", + "е", + "н", + "т", + "р", + "л", + "і", + "д", + "с", + "м", + "қ", + "к", + "о", + "б", + "и", + "у", + "ғ", + "ж", + "ң", + "з", + "ш", + "й", + "п", + "г", + "ө", + ], + ), + ] +) diff --git a/Splunk_TA_paloalto/bin/splunk_ta_paloalto/aob_py3/charset_normalizer/cd.py b/Splunk_TA_paloalto/bin/splunk_ta_paloalto/aob_py3/charset_normalizer/cd.py new file mode 100644 index 00000000..8429a0eb --- /dev/null +++ b/Splunk_TA_paloalto/bin/splunk_ta_paloalto/aob_py3/charset_normalizer/cd.py @@ -0,0 +1,340 @@ +import importlib +from codecs import IncrementalDecoder +from collections import Counter, OrderedDict +from functools import lru_cache +from typing import Dict, List, Optional, Tuple + +from .assets import FREQUENCIES +from .constant import KO_NAMES, LANGUAGE_SUPPORTED_COUNT, TOO_SMALL_SEQUENCE, ZH_NAMES +from .md import is_suspiciously_successive_range +from .models import CoherenceMatches +from .utils import ( + is_accentuated, + is_latin, + is_multi_byte_encoding, + is_unicode_range_secondary, + unicode_range, +) + + +def encoding_unicode_range(iana_name: str) -> List[str]: + """ + Return associated unicode ranges in a single byte code page. + """ + if is_multi_byte_encoding(iana_name): + raise IOError("Function not supported on multi-byte code page") + + decoder = importlib.import_module("encodings.{}".format(iana_name)).IncrementalDecoder # type: ignore + + p = decoder(errors="ignore") # type: IncrementalDecoder + seen_ranges = {} # type: Dict[str, int] + character_count = 0 # type: int + + for i in range(0x40, 0xFF): + chunk = p.decode(bytes([i])) # type: str + + if chunk: + character_range = unicode_range(chunk) # type: Optional[str] + + if character_range is None: + continue + + if is_unicode_range_secondary(character_range) is False: + if character_range not in seen_ranges: + seen_ranges[character_range] = 0 + seen_ranges[character_range] += 1 + character_count += 1 + + return sorted( + [ + character_range + for character_range in seen_ranges + if seen_ranges[character_range] / character_count >= 0.15 + ] + ) + + +def unicode_range_languages(primary_range: str) -> List[str]: + """ + Return inferred languages used with a unicode range. + """ + languages = [] # type: List[str] + + for language, characters in FREQUENCIES.items(): + for character in characters: + if unicode_range(character) == primary_range: + languages.append(language) + break + + return languages + + +@lru_cache() +def encoding_languages(iana_name: str) -> List[str]: + """ + Single-byte encoding language association. Some code page are heavily linked to particular language(s). + This function does the correspondence. + """ + unicode_ranges = encoding_unicode_range(iana_name) # type: List[str] + primary_range = None # type: Optional[str] + + for specified_range in unicode_ranges: + if "Latin" not in specified_range: + primary_range = specified_range + break + + if primary_range is None: + return ["Latin Based"] + + return unicode_range_languages(primary_range) + + +@lru_cache() +def mb_encoding_languages(iana_name: str) -> List[str]: + """ + Multi-byte encoding language association. Some code page are heavily linked to particular language(s). + This function does the correspondence. + """ + if ( + iana_name.startswith("shift_") + or iana_name.startswith("iso2022_jp") + or iana_name.startswith("euc_j") + or iana_name == "cp932" + ): + return ["Japanese"] + if iana_name.startswith("gb") or iana_name in ZH_NAMES: + return ["Chinese", "Classical Chinese"] + if iana_name.startswith("iso2022_kr") or iana_name in KO_NAMES: + return ["Korean"] + + return [] + + +@lru_cache(maxsize=LANGUAGE_SUPPORTED_COUNT) +def get_target_features(language: str) -> Tuple[bool, bool]: + """ + Determine main aspects from a supported language if it contains accents and if is pure Latin. + """ + target_have_accents = False # type: bool + target_pure_latin = True # type: bool + + for character in FREQUENCIES[language]: + if not target_have_accents and is_accentuated(character): + target_have_accents = True + if target_pure_latin and is_latin(character) is False: + target_pure_latin = False + + return target_have_accents, target_pure_latin + + +def alphabet_languages( + characters: List[str], ignore_non_latin: bool = False +) -> List[str]: + """ + Return associated languages associated to given characters. + """ + languages = [] # type: List[Tuple[str, float]] + + source_have_accents = any(is_accentuated(character) for character in characters) + + for language, language_characters in FREQUENCIES.items(): + + target_have_accents, target_pure_latin = get_target_features(language) + + if ignore_non_latin and target_pure_latin is False: + continue + + if target_have_accents is False and source_have_accents: + continue + + character_count = len(language_characters) # type: int + + character_match_count = len( + [c for c in language_characters if c in characters] + ) # type: int + + ratio = character_match_count / character_count # type: float + + if ratio >= 0.2: + languages.append((language, ratio)) + + languages = sorted(languages, key=lambda x: x[1], reverse=True) + + return [compatible_language[0] for compatible_language in languages] + + +def characters_popularity_compare( + language: str, ordered_characters: List[str] +) -> float: + """ + Determine if a ordered characters list (by occurrence from most appearance to rarest) match a particular language. + The result is a ratio between 0. (absolutely no correspondence) and 1. (near perfect fit). + Beware that is function is not strict on the match in order to ease the detection. (Meaning close match is 1.) + """ + if language not in FREQUENCIES: + raise ValueError("{} not available".format(language)) + + character_approved_count = 0 # type: int + + for character in ordered_characters: + if character not in FREQUENCIES[language]: + continue + + characters_before_source = FREQUENCIES[language][ + 0 : FREQUENCIES[language].index(character) + ] # type: List[str] + characters_after_source = FREQUENCIES[language][ + FREQUENCIES[language].index(character) : + ] # type: List[str] + + characters_before = ordered_characters[ + 0 : ordered_characters.index(character) + ] # type: List[str] + characters_after = ordered_characters[ + ordered_characters.index(character) : + ] # type: List[str] + + before_match_count = [ + e in characters_before for e in characters_before_source + ].count( + True + ) # type: int + after_match_count = [ + e in characters_after for e in characters_after_source + ].count( + True + ) # type: int + + if len(characters_before_source) == 0 and before_match_count <= 4: + character_approved_count += 1 + continue + + if len(characters_after_source) == 0 and after_match_count <= 4: + character_approved_count += 1 + continue + + if ( + before_match_count / len(characters_before_source) >= 0.4 + or after_match_count / len(characters_after_source) >= 0.4 + ): + character_approved_count += 1 + continue + + return character_approved_count / len(ordered_characters) + + +def alpha_unicode_split(decoded_sequence: str) -> List[str]: + """ + Given a decoded text sequence, return a list of str. Unicode range / alphabet separation. + Ex. a text containing English/Latin with a bit a Hebrew will return two items in the resulting list; + One containing the latin letters and the other hebrew. + """ + layers = OrderedDict() # type: Dict[str, str] + + for character in decoded_sequence: + if character.isalpha() is False: + continue + + character_range = unicode_range(character) # type: Optional[str] + + if character_range is None: + continue + + layer_target_range = None # type: Optional[str] + + for discovered_range in layers: + if ( + is_suspiciously_successive_range(discovered_range, character_range) + is False + ): + layer_target_range = discovered_range + break + + if layer_target_range is None: + layer_target_range = character_range + + if layer_target_range not in layers: + layers[layer_target_range] = character.lower() + continue + + layers[layer_target_range] += character.lower() + + return list(layers.values()) + + +def merge_coherence_ratios(results: List[CoherenceMatches]) -> CoherenceMatches: + """ + This function merge results previously given by the function coherence_ratio. + The return type is the same as coherence_ratio. + """ + per_language_ratios = OrderedDict() # type: Dict[str, List[float]] + for result in results: + for sub_result in result: + language, ratio = sub_result + if language not in per_language_ratios: + per_language_ratios[language] = [ratio] + continue + per_language_ratios[language].append(ratio) + + merge = [ + ( + language, + round( + sum(per_language_ratios[language]) / len(per_language_ratios[language]), + 4, + ), + ) + for language in per_language_ratios + ] + + return sorted(merge, key=lambda x: x[1], reverse=True) + + +@lru_cache(maxsize=2048) +def coherence_ratio( + decoded_sequence: str, threshold: float = 0.1, lg_inclusion: Optional[str] = None +) -> CoherenceMatches: + """ + Detect ANY language that can be identified in given sequence. The sequence will be analysed by layers. + A layer = Character extraction by alphabets/ranges. + """ + + results = [] # type: List[Tuple[str, float]] + ignore_non_latin = False # type: bool + + sufficient_match_count = 0 # type: int + + lg_inclusion_list = lg_inclusion.split(",") if lg_inclusion is not None else [] + if "Latin Based" in lg_inclusion_list: + ignore_non_latin = True + lg_inclusion_list.remove("Latin Based") + + for layer in alpha_unicode_split(decoded_sequence): + sequence_frequencies = Counter(layer) # type: Counter + most_common = sequence_frequencies.most_common() + + character_count = sum(o for c, o in most_common) # type: int + + if character_count <= TOO_SMALL_SEQUENCE: + continue + + popular_character_ordered = [c for c, o in most_common] # type: List[str] + + for language in lg_inclusion_list or alphabet_languages( + popular_character_ordered, ignore_non_latin + ): + ratio = characters_popularity_compare( + language, popular_character_ordered + ) # type: float + + if ratio < threshold: + continue + elif ratio >= 0.8: + sufficient_match_count += 1 + + results.append((language, round(ratio, 4))) + + if sufficient_match_count >= 3: + break + + return sorted(results, key=lambda x: x[1], reverse=True) diff --git a/Splunk_TA_paloalto/bin/splunk_ta_paloalto/aob_py3/charset_normalizer/cli/__init__.py b/Splunk_TA_paloalto/bin/splunk_ta_paloalto/aob_py3/charset_normalizer/cli/__init__.py new file mode 100644 index 00000000..e69de29b diff --git a/Splunk_TA_paloalto/bin/splunk_ta_paloalto/aob_py3/charset_normalizer/cli/normalizer.py b/Splunk_TA_paloalto/bin/splunk_ta_paloalto/aob_py3/charset_normalizer/cli/normalizer.py new file mode 100644 index 00000000..5f912c92 --- /dev/null +++ b/Splunk_TA_paloalto/bin/splunk_ta_paloalto/aob_py3/charset_normalizer/cli/normalizer.py @@ -0,0 +1,290 @@ +import argparse +import sys +from json import dumps +from os.path import abspath +from platform import python_version +from typing import List + +from charset_normalizer import from_fp +from charset_normalizer.models import CliDetectionResult +from charset_normalizer.version import __version__ + + +def query_yes_no(question: str, default: str = "yes") -> bool: + """Ask a yes/no question via input() and return their answer. + + "question" is a string that is presented to the user. + "default" is the presumed answer if the user just hits . + It must be "yes" (the default), "no" or None (meaning + an answer is required of the user). + + The "answer" return value is True for "yes" or False for "no". + + Credit goes to (c) https://stackoverflow.com/questions/3041986/apt-command-line-interface-like-yes-no-input + """ + valid = {"yes": True, "y": True, "ye": True, "no": False, "n": False} + if default is None: + prompt = " [y/n] " + elif default == "yes": + prompt = " [Y/n] " + elif default == "no": + prompt = " [y/N] " + else: + raise ValueError("invalid default answer: '%s'" % default) + + while True: + sys.stdout.write(question + prompt) + choice = input().lower() + if default is not None and choice == "": + return valid[default] + elif choice in valid: + return valid[choice] + else: + sys.stdout.write("Please respond with 'yes' or 'no' " "(or 'y' or 'n').\n") + + +def cli_detect(argv: List[str] = None) -> int: + """ + CLI assistant using ARGV and ArgumentParser + :param argv: + :return: 0 if everything is fine, anything else equal trouble + """ + parser = argparse.ArgumentParser( + description="The Real First Universal Charset Detector. " + "Discover originating encoding used on text file. " + "Normalize text to unicode." + ) + + parser.add_argument( + "files", type=argparse.FileType("rb"), nargs="+", help="File(s) to be analysed" + ) + parser.add_argument( + "-v", + "--verbose", + action="store_true", + default=False, + dest="verbose", + help="Display complementary information about file if any. " + "Stdout will contain logs about the detection process.", + ) + parser.add_argument( + "-a", + "--with-alternative", + action="store_true", + default=False, + dest="alternatives", + help="Output complementary possibilities if any. Top-level JSON WILL be a list.", + ) + parser.add_argument( + "-n", + "--normalize", + action="store_true", + default=False, + dest="normalize", + help="Permit to normalize input file. If not set, program does not write anything.", + ) + parser.add_argument( + "-m", + "--minimal", + action="store_true", + default=False, + dest="minimal", + help="Only output the charset detected to STDOUT. Disabling JSON output.", + ) + parser.add_argument( + "-r", + "--replace", + action="store_true", + default=False, + dest="replace", + help="Replace file when trying to normalize it instead of creating a new one.", + ) + parser.add_argument( + "-f", + "--force", + action="store_true", + default=False, + dest="force", + help="Replace file without asking if you are sure, use this flag with caution.", + ) + parser.add_argument( + "-t", + "--threshold", + action="store", + default=0.1, + type=float, + dest="threshold", + help="Define a custom maximum amount of chaos allowed in decoded content. 0. <= chaos <= 1.", + ) + parser.add_argument( + "--version", + action="version", + version="Charset-Normalizer {} - Python {}".format( + __version__, python_version() + ), + help="Show version information and exit.", + ) + + args = parser.parse_args(argv) + + if args.replace is True and args.normalize is False: + print("Use --replace in addition of --normalize only.", file=sys.stderr) + return 1 + + if args.force is True and args.replace is False: + print("Use --force in addition of --replace only.", file=sys.stderr) + return 1 + + if args.threshold < 0.0 or args.threshold > 1.0: + print("--threshold VALUE should be between 0. AND 1.", file=sys.stderr) + return 1 + + x_ = [] + + for my_file in args.files: + + matches = from_fp(my_file, threshold=args.threshold, explain=args.verbose) + + best_guess = matches.best() + + if best_guess is None: + print( + 'Unable to identify originating encoding for "{}". {}'.format( + my_file.name, + "Maybe try increasing maximum amount of chaos." + if args.threshold < 1.0 + else "", + ), + file=sys.stderr, + ) + x_.append( + CliDetectionResult( + abspath(my_file.name), + None, + [], + [], + "Unknown", + [], + False, + 1.0, + 0.0, + None, + True, + ) + ) + else: + x_.append( + CliDetectionResult( + abspath(my_file.name), + best_guess.encoding, + best_guess.encoding_aliases, + [ + cp + for cp in best_guess.could_be_from_charset + if cp != best_guess.encoding + ], + best_guess.language, + best_guess.alphabets, + best_guess.bom, + best_guess.percent_chaos, + best_guess.percent_coherence, + None, + True, + ) + ) + + if len(matches) > 1 and args.alternatives: + for el in matches: + if el != best_guess: + x_.append( + CliDetectionResult( + abspath(my_file.name), + el.encoding, + el.encoding_aliases, + [ + cp + for cp in el.could_be_from_charset + if cp != el.encoding + ], + el.language, + el.alphabets, + el.bom, + el.percent_chaos, + el.percent_coherence, + None, + False, + ) + ) + + if args.normalize is True: + + if best_guess.encoding.startswith("utf") is True: + print( + '"{}" file does not need to be normalized, as it already came from unicode.'.format( + my_file.name + ), + file=sys.stderr, + ) + if my_file.closed is False: + my_file.close() + continue + + o_ = my_file.name.split(".") # type: List[str] + + if args.replace is False: + o_.insert(-1, best_guess.encoding) + if my_file.closed is False: + my_file.close() + elif ( + args.force is False + and query_yes_no( + 'Are you sure to normalize "{}" by replacing it ?'.format( + my_file.name + ), + "no", + ) + is False + ): + if my_file.closed is False: + my_file.close() + continue + + try: + x_[0].unicode_path = abspath("./{}".format(".".join(o_))) + + with open(x_[0].unicode_path, "w", encoding="utf-8") as fp: + fp.write(str(best_guess)) + except IOError as e: + print(str(e), file=sys.stderr) + if my_file.closed is False: + my_file.close() + return 2 + + if my_file.closed is False: + my_file.close() + + if args.minimal is False: + print( + dumps( + [el.__dict__ for el in x_] if len(x_) > 1 else x_[0].__dict__, + ensure_ascii=True, + indent=4, + ) + ) + else: + for my_file in args.files: + print( + ", ".join( + [ + el.encoding or "undefined" + for el in x_ + if el.path == abspath(my_file.name) + ] + ) + ) + + return 0 + + +if __name__ == "__main__": + cli_detect() diff --git a/Splunk_TA_paloalto/bin/splunk_ta_paloalto/aob_py3/charset_normalizer/constant.py b/Splunk_TA_paloalto/bin/splunk_ta_paloalto/aob_py3/charset_normalizer/constant.py new file mode 100644 index 00000000..c32f5cf2 --- /dev/null +++ b/Splunk_TA_paloalto/bin/splunk_ta_paloalto/aob_py3/charset_normalizer/constant.py @@ -0,0 +1,503 @@ +from codecs import BOM_UTF8, BOM_UTF16_BE, BOM_UTF16_LE, BOM_UTF32_BE, BOM_UTF32_LE +from collections import OrderedDict +from encodings.aliases import aliases +from re import IGNORECASE, compile as re_compile +from typing import Dict, List, Set, Union + +from .assets import FREQUENCIES + +# Contain for each eligible encoding a list of/item bytes SIG/BOM +ENCODING_MARKS = OrderedDict( + [ + ("utf_8", BOM_UTF8), + ( + "utf_7", + [ + b"\x2b\x2f\x76\x38", + b"\x2b\x2f\x76\x39", + b"\x2b\x2f\x76\x2b", + b"\x2b\x2f\x76\x2f", + b"\x2b\x2f\x76\x38\x2d", + ], + ), + ("gb18030", b"\x84\x31\x95\x33"), + ("utf_32", [BOM_UTF32_BE, BOM_UTF32_LE]), + ("utf_16", [BOM_UTF16_BE, BOM_UTF16_LE]), + ] +) # type: Dict[str, Union[bytes, List[bytes]]] + +TOO_SMALL_SEQUENCE = 32 # type: int +TOO_BIG_SEQUENCE = int(10e6) # type: int + +UTF8_MAXIMAL_ALLOCATION = 1112064 # type: int + +UNICODE_RANGES_COMBINED = { + "Control character": range(31 + 1), + "Basic Latin": range(32, 127 + 1), + "Latin-1 Supplement": range(128, 255 + 1), + "Latin Extended-A": range(256, 383 + 1), + "Latin Extended-B": range(384, 591 + 1), + "IPA Extensions": range(592, 687 + 1), + "Spacing Modifier Letters": range(688, 767 + 1), + "Combining Diacritical Marks": range(768, 879 + 1), + "Greek and Coptic": range(880, 1023 + 1), + "Cyrillic": range(1024, 1279 + 1), + "Cyrillic Supplement": range(1280, 1327 + 1), + "Armenian": range(1328, 1423 + 1), + "Hebrew": range(1424, 1535 + 1), + "Arabic": range(1536, 1791 + 1), + "Syriac": range(1792, 1871 + 1), + "Arabic Supplement": range(1872, 1919 + 1), + "Thaana": range(1920, 1983 + 1), + "NKo": range(1984, 2047 + 1), + "Samaritan": range(2048, 2111 + 1), + "Mandaic": range(2112, 2143 + 1), + "Syriac Supplement": range(2144, 2159 + 1), + "Arabic Extended-A": range(2208, 2303 + 1), + "Devanagari": range(2304, 2431 + 1), + "Bengali": range(2432, 2559 + 1), + "Gurmukhi": range(2560, 2687 + 1), + "Gujarati": range(2688, 2815 + 1), + "Oriya": range(2816, 2943 + 1), + "Tamil": range(2944, 3071 + 1), + "Telugu": range(3072, 3199 + 1), + "Kannada": range(3200, 3327 + 1), + "Malayalam": range(3328, 3455 + 1), + "Sinhala": range(3456, 3583 + 1), + "Thai": range(3584, 3711 + 1), + "Lao": range(3712, 3839 + 1), + "Tibetan": range(3840, 4095 + 1), + "Myanmar": range(4096, 4255 + 1), + "Georgian": range(4256, 4351 + 1), + "Hangul Jamo": range(4352, 4607 + 1), + "Ethiopic": range(4608, 4991 + 1), + "Ethiopic Supplement": range(4992, 5023 + 1), + "Cherokee": range(5024, 5119 + 1), + "Unified Canadian Aboriginal Syllabics": range(5120, 5759 + 1), + "Ogham": range(5760, 5791 + 1), + "Runic": range(5792, 5887 + 1), + "Tagalog": range(5888, 5919 + 1), + "Hanunoo": range(5920, 5951 + 1), + "Buhid": range(5952, 5983 + 1), + "Tagbanwa": range(5984, 6015 + 1), + "Khmer": range(6016, 6143 + 1), + "Mongolian": range(6144, 6319 + 1), + "Unified Canadian Aboriginal Syllabics Extended": range(6320, 6399 + 1), + "Limbu": range(6400, 6479 + 1), + "Tai Le": range(6480, 6527 + 1), + "New Tai Lue": range(6528, 6623 + 1), + "Khmer Symbols": range(6624, 6655 + 1), + "Buginese": range(6656, 6687 + 1), + "Tai Tham": range(6688, 6831 + 1), + "Combining Diacritical Marks Extended": range(6832, 6911 + 1), + "Balinese": range(6912, 7039 + 1), + "Sundanese": range(7040, 7103 + 1), + "Batak": range(7104, 7167 + 1), + "Lepcha": range(7168, 7247 + 1), + "Ol Chiki": range(7248, 7295 + 1), + "Cyrillic Extended C": range(7296, 7311 + 1), + "Sundanese Supplement": range(7360, 7375 + 1), + "Vedic Extensions": range(7376, 7423 + 1), + "Phonetic Extensions": range(7424, 7551 + 1), + "Phonetic Extensions Supplement": range(7552, 7615 + 1), + "Combining Diacritical Marks Supplement": range(7616, 7679 + 1), + "Latin Extended Additional": range(7680, 7935 + 1), + "Greek Extended": range(7936, 8191 + 1), + "General Punctuation": range(8192, 8303 + 1), + "Superscripts and Subscripts": range(8304, 8351 + 1), + "Currency Symbols": range(8352, 8399 + 1), + "Combining Diacritical Marks for Symbols": range(8400, 8447 + 1), + "Letterlike Symbols": range(8448, 8527 + 1), + "Number Forms": range(8528, 8591 + 1), + "Arrows": range(8592, 8703 + 1), + "Mathematical Operators": range(8704, 8959 + 1), + "Miscellaneous Technical": range(8960, 9215 + 1), + "Control Pictures": range(9216, 9279 + 1), + "Optical Character Recognition": range(9280, 9311 + 1), + "Enclosed Alphanumerics": range(9312, 9471 + 1), + "Box Drawing": range(9472, 9599 + 1), + "Block Elements": range(9600, 9631 + 1), + "Geometric Shapes": range(9632, 9727 + 1), + "Miscellaneous Symbols": range(9728, 9983 + 1), + "Dingbats": range(9984, 10175 + 1), + "Miscellaneous Mathematical Symbols-A": range(10176, 10223 + 1), + "Supplemental Arrows-A": range(10224, 10239 + 1), + "Braille Patterns": range(10240, 10495 + 1), + "Supplemental Arrows-B": range(10496, 10623 + 1), + "Miscellaneous Mathematical Symbols-B": range(10624, 10751 + 1), + "Supplemental Mathematical Operators": range(10752, 11007 + 1), + "Miscellaneous Symbols and Arrows": range(11008, 11263 + 1), + "Glagolitic": range(11264, 11359 + 1), + "Latin Extended-C": range(11360, 11391 + 1), + "Coptic": range(11392, 11519 + 1), + "Georgian Supplement": range(11520, 11567 + 1), + "Tifinagh": range(11568, 11647 + 1), + "Ethiopic Extended": range(11648, 11743 + 1), + "Cyrillic Extended-A": range(11744, 11775 + 1), + "Supplemental Punctuation": range(11776, 11903 + 1), + "CJK Radicals Supplement": range(11904, 12031 + 1), + "Kangxi Radicals": range(12032, 12255 + 1), + "Ideographic Description Characters": range(12272, 12287 + 1), + "CJK Symbols and Punctuation": range(12288, 12351 + 1), + "Hiragana": range(12352, 12447 + 1), + "Katakana": range(12448, 12543 + 1), + "Bopomofo": range(12544, 12591 + 1), + "Hangul Compatibility Jamo": range(12592, 12687 + 1), + "Kanbun": range(12688, 12703 + 1), + "Bopomofo Extended": range(12704, 12735 + 1), + "CJK Strokes": range(12736, 12783 + 1), + "Katakana Phonetic Extensions": range(12784, 12799 + 1), + "Enclosed CJK Letters and Months": range(12800, 13055 + 1), + "CJK Compatibility": range(13056, 13311 + 1), + "CJK Unified Ideographs Extension A": range(13312, 19903 + 1), + "Yijing Hexagram Symbols": range(19904, 19967 + 1), + "CJK Unified Ideographs": range(19968, 40959 + 1), + "Yi Syllables": range(40960, 42127 + 1), + "Yi Radicals": range(42128, 42191 + 1), + "Lisu": range(42192, 42239 + 1), + "Vai": range(42240, 42559 + 1), + "Cyrillic Extended-B": range(42560, 42655 + 1), + "Bamum": range(42656, 42751 + 1), + "Modifier Tone Letters": range(42752, 42783 + 1), + "Latin Extended-D": range(42784, 43007 + 1), + "Syloti Nagri": range(43008, 43055 + 1), + "Common Indic Number Forms": range(43056, 43071 + 1), + "Phags-pa": range(43072, 43135 + 1), + "Saurashtra": range(43136, 43231 + 1), + "Devanagari Extended": range(43232, 43263 + 1), + "Kayah Li": range(43264, 43311 + 1), + "Rejang": range(43312, 43359 + 1), + "Hangul Jamo Extended-A": range(43360, 43391 + 1), + "Javanese": range(43392, 43487 + 1), + "Myanmar Extended-B": range(43488, 43519 + 1), + "Cham": range(43520, 43615 + 1), + "Myanmar Extended-A": range(43616, 43647 + 1), + "Tai Viet": range(43648, 43743 + 1), + "Meetei Mayek Extensions": range(43744, 43775 + 1), + "Ethiopic Extended-A": range(43776, 43823 + 1), + "Latin Extended-E": range(43824, 43887 + 1), + "Cherokee Supplement": range(43888, 43967 + 1), + "Meetei Mayek": range(43968, 44031 + 1), + "Hangul Syllables": range(44032, 55215 + 1), + "Hangul Jamo Extended-B": range(55216, 55295 + 1), + "High Surrogates": range(55296, 56191 + 1), + "High Private Use Surrogates": range(56192, 56319 + 1), + "Low Surrogates": range(56320, 57343 + 1), + "Private Use Area": range(57344, 63743 + 1), + "CJK Compatibility Ideographs": range(63744, 64255 + 1), + "Alphabetic Presentation Forms": range(64256, 64335 + 1), + "Arabic Presentation Forms-A": range(64336, 65023 + 1), + "Variation Selectors": range(65024, 65039 + 1), + "Vertical Forms": range(65040, 65055 + 1), + "Combining Half Marks": range(65056, 65071 + 1), + "CJK Compatibility Forms": range(65072, 65103 + 1), + "Small Form Variants": range(65104, 65135 + 1), + "Arabic Presentation Forms-B": range(65136, 65279 + 1), + "Halfwidth and Fullwidth Forms": range(65280, 65519 + 1), + "Specials": range(65520, 65535 + 1), + "Linear B Syllabary": range(65536, 65663 + 1), + "Linear B Ideograms": range(65664, 65791 + 1), + "Aegean Numbers": range(65792, 65855 + 1), + "Ancient Greek Numbers": range(65856, 65935 + 1), + "Ancient Symbols": range(65936, 65999 + 1), + "Phaistos Disc": range(66000, 66047 + 1), + "Lycian": range(66176, 66207 + 1), + "Carian": range(66208, 66271 + 1), + "Coptic Epact Numbers": range(66272, 66303 + 1), + "Old Italic": range(66304, 66351 + 1), + "Gothic": range(66352, 66383 + 1), + "Old Permic": range(66384, 66431 + 1), + "Ugaritic": range(66432, 66463 + 1), + "Old Persian": range(66464, 66527 + 1), + "Deseret": range(66560, 66639 + 1), + "Shavian": range(66640, 66687 + 1), + "Osmanya": range(66688, 66735 + 1), + "Osage": range(66736, 66815 + 1), + "Elbasan": range(66816, 66863 + 1), + "Caucasian Albanian": range(66864, 66927 + 1), + "Linear A": range(67072, 67455 + 1), + "Cypriot Syllabary": range(67584, 67647 + 1), + "Imperial Aramaic": range(67648, 67679 + 1), + "Palmyrene": range(67680, 67711 + 1), + "Nabataean": range(67712, 67759 + 1), + "Hatran": range(67808, 67839 + 1), + "Phoenician": range(67840, 67871 + 1), + "Lydian": range(67872, 67903 + 1), + "Meroitic Hieroglyphs": range(67968, 67999 + 1), + "Meroitic Cursive": range(68000, 68095 + 1), + "Kharoshthi": range(68096, 68191 + 1), + "Old South Arabian": range(68192, 68223 + 1), + "Old North Arabian": range(68224, 68255 + 1), + "Manichaean": range(68288, 68351 + 1), + "Avestan": range(68352, 68415 + 1), + "Inscriptional Parthian": range(68416, 68447 + 1), + "Inscriptional Pahlavi": range(68448, 68479 + 1), + "Psalter Pahlavi": range(68480, 68527 + 1), + "Old Turkic": range(68608, 68687 + 1), + "Old Hungarian": range(68736, 68863 + 1), + "Rumi Numeral Symbols": range(69216, 69247 + 1), + "Brahmi": range(69632, 69759 + 1), + "Kaithi": range(69760, 69839 + 1), + "Sora Sompeng": range(69840, 69887 + 1), + "Chakma": range(69888, 69967 + 1), + "Mahajani": range(69968, 70015 + 1), + "Sharada": range(70016, 70111 + 1), + "Sinhala Archaic Numbers": range(70112, 70143 + 1), + "Khojki": range(70144, 70223 + 1), + "Multani": range(70272, 70319 + 1), + "Khudawadi": range(70320, 70399 + 1), + "Grantha": range(70400, 70527 + 1), + "Newa": range(70656, 70783 + 1), + "Tirhuta": range(70784, 70879 + 1), + "Siddham": range(71040, 71167 + 1), + "Modi": range(71168, 71263 + 1), + "Mongolian Supplement": range(71264, 71295 + 1), + "Takri": range(71296, 71375 + 1), + "Ahom": range(71424, 71487 + 1), + "Warang Citi": range(71840, 71935 + 1), + "Zanabazar Square": range(72192, 72271 + 1), + "Soyombo": range(72272, 72367 + 1), + "Pau Cin Hau": range(72384, 72447 + 1), + "Bhaiksuki": range(72704, 72815 + 1), + "Marchen": range(72816, 72895 + 1), + "Masaram Gondi": range(72960, 73055 + 1), + "Cuneiform": range(73728, 74751 + 1), + "Cuneiform Numbers and Punctuation": range(74752, 74879 + 1), + "Early Dynastic Cuneiform": range(74880, 75087 + 1), + "Egyptian Hieroglyphs": range(77824, 78895 + 1), + "Anatolian Hieroglyphs": range(82944, 83583 + 1), + "Bamum Supplement": range(92160, 92735 + 1), + "Mro": range(92736, 92783 + 1), + "Bassa Vah": range(92880, 92927 + 1), + "Pahawh Hmong": range(92928, 93071 + 1), + "Miao": range(93952, 94111 + 1), + "Ideographic Symbols and Punctuation": range(94176, 94207 + 1), + "Tangut": range(94208, 100351 + 1), + "Tangut Components": range(100352, 101119 + 1), + "Kana Supplement": range(110592, 110847 + 1), + "Kana Extended-A": range(110848, 110895 + 1), + "Nushu": range(110960, 111359 + 1), + "Duployan": range(113664, 113823 + 1), + "Shorthand Format Controls": range(113824, 113839 + 1), + "Byzantine Musical Symbols": range(118784, 119039 + 1), + "Musical Symbols": range(119040, 119295 + 1), + "Ancient Greek Musical Notation": range(119296, 119375 + 1), + "Tai Xuan Jing Symbols": range(119552, 119647 + 1), + "Counting Rod Numerals": range(119648, 119679 + 1), + "Mathematical Alphanumeric Symbols": range(119808, 120831 + 1), + "Sutton SignWriting": range(120832, 121519 + 1), + "Glagolitic Supplement": range(122880, 122927 + 1), + "Mende Kikakui": range(124928, 125151 + 1), + "Adlam": range(125184, 125279 + 1), + "Arabic Mathematical Alphabetic Symbols": range(126464, 126719 + 1), + "Mahjong Tiles": range(126976, 127023 + 1), + "Domino Tiles": range(127024, 127135 + 1), + "Playing Cards": range(127136, 127231 + 1), + "Enclosed Alphanumeric Supplement": range(127232, 127487 + 1), + "Enclosed Ideographic Supplement": range(127488, 127743 + 1), + "Miscellaneous Symbols and Pictographs": range(127744, 128511 + 1), + "Emoticons range(Emoji)": range(128512, 128591 + 1), + "Ornamental Dingbats": range(128592, 128639 + 1), + "Transport and Map Symbols": range(128640, 128767 + 1), + "Alchemical Symbols": range(128768, 128895 + 1), + "Geometric Shapes Extended": range(128896, 129023 + 1), + "Supplemental Arrows-C": range(129024, 129279 + 1), + "Supplemental Symbols and Pictographs": range(129280, 129535 + 1), + "CJK Unified Ideographs Extension B": range(131072, 173791 + 1), + "CJK Unified Ideographs Extension C": range(173824, 177983 + 1), + "CJK Unified Ideographs Extension D": range(177984, 178207 + 1), + "CJK Unified Ideographs Extension E": range(178208, 183983 + 1), + "CJK Unified Ideographs Extension F": range(183984, 191471 + 1), + "CJK Compatibility Ideographs Supplement": range(194560, 195103 + 1), + "Tags": range(917504, 917631 + 1), + "Variation Selectors Supplement": range(917760, 917999 + 1), +} # type: Dict[str, range] + + +UNICODE_SECONDARY_RANGE_KEYWORD = [ + "Supplement", + "Extended", + "Extensions", + "Modifier", + "Marks", + "Punctuation", + "Symbols", + "Forms", + "Operators", + "Miscellaneous", + "Drawing", + "Block", + "Shapes", + "Supplemental", + "Tags", +] # type: List[str] + +RE_POSSIBLE_ENCODING_INDICATION = re_compile( + r"(?:(?:encoding)|(?:charset)|(?:coding))(?:[\:= ]{1,10})(?:[\"\']?)([a-zA-Z0-9\-_]+)(?:[\"\']?)", + IGNORECASE, +) + +IANA_SUPPORTED = sorted( + filter( + lambda x: x.endswith("_codec") is False + and x not in {"rot_13", "tactis", "mbcs"}, + list(set(aliases.values())), + ) +) # type: List[str] + +IANA_SUPPORTED_COUNT = len(IANA_SUPPORTED) # type: int + +# pre-computed code page that are similar using the function cp_similarity. +IANA_SUPPORTED_SIMILAR = { + "cp037": ["cp1026", "cp1140", "cp273", "cp500"], + "cp1026": ["cp037", "cp1140", "cp273", "cp500"], + "cp1125": ["cp866"], + "cp1140": ["cp037", "cp1026", "cp273", "cp500"], + "cp1250": ["iso8859_2"], + "cp1251": ["kz1048", "ptcp154"], + "cp1252": ["iso8859_15", "iso8859_9", "latin_1"], + "cp1253": ["iso8859_7"], + "cp1254": ["iso8859_15", "iso8859_9", "latin_1"], + "cp1257": ["iso8859_13"], + "cp273": ["cp037", "cp1026", "cp1140", "cp500"], + "cp437": ["cp850", "cp858", "cp860", "cp861", "cp862", "cp863", "cp865"], + "cp500": ["cp037", "cp1026", "cp1140", "cp273"], + "cp850": ["cp437", "cp857", "cp858", "cp865"], + "cp857": ["cp850", "cp858", "cp865"], + "cp858": ["cp437", "cp850", "cp857", "cp865"], + "cp860": ["cp437", "cp861", "cp862", "cp863", "cp865"], + "cp861": ["cp437", "cp860", "cp862", "cp863", "cp865"], + "cp862": ["cp437", "cp860", "cp861", "cp863", "cp865"], + "cp863": ["cp437", "cp860", "cp861", "cp862", "cp865"], + "cp865": ["cp437", "cp850", "cp857", "cp858", "cp860", "cp861", "cp862", "cp863"], + "cp866": ["cp1125"], + "iso8859_10": ["iso8859_14", "iso8859_15", "iso8859_4", "iso8859_9", "latin_1"], + "iso8859_11": ["tis_620"], + "iso8859_13": ["cp1257"], + "iso8859_14": [ + "iso8859_10", + "iso8859_15", + "iso8859_16", + "iso8859_3", + "iso8859_9", + "latin_1", + ], + "iso8859_15": [ + "cp1252", + "cp1254", + "iso8859_10", + "iso8859_14", + "iso8859_16", + "iso8859_3", + "iso8859_9", + "latin_1", + ], + "iso8859_16": [ + "iso8859_14", + "iso8859_15", + "iso8859_2", + "iso8859_3", + "iso8859_9", + "latin_1", + ], + "iso8859_2": ["cp1250", "iso8859_16", "iso8859_4"], + "iso8859_3": ["iso8859_14", "iso8859_15", "iso8859_16", "iso8859_9", "latin_1"], + "iso8859_4": ["iso8859_10", "iso8859_2", "iso8859_9", "latin_1"], + "iso8859_7": ["cp1253"], + "iso8859_9": [ + "cp1252", + "cp1254", + "cp1258", + "iso8859_10", + "iso8859_14", + "iso8859_15", + "iso8859_16", + "iso8859_3", + "iso8859_4", + "latin_1", + ], + "kz1048": ["cp1251", "ptcp154"], + "latin_1": [ + "cp1252", + "cp1254", + "cp1258", + "iso8859_10", + "iso8859_14", + "iso8859_15", + "iso8859_16", + "iso8859_3", + "iso8859_4", + "iso8859_9", + ], + "mac_iceland": ["mac_roman", "mac_turkish"], + "mac_roman": ["mac_iceland", "mac_turkish"], + "mac_turkish": ["mac_iceland", "mac_roman"], + "ptcp154": ["cp1251", "kz1048"], + "tis_620": ["iso8859_11"], +} # type: Dict[str, List[str]] + + +CHARDET_CORRESPONDENCE = { + "iso2022_kr": "ISO-2022-KR", + "iso2022_jp": "ISO-2022-JP", + "euc_kr": "EUC-KR", + "tis_620": "TIS-620", + "utf_32": "UTF-32", + "euc_jp": "EUC-JP", + "koi8_r": "KOI8-R", + "iso8859_1": "ISO-8859-1", + "iso8859_2": "ISO-8859-2", + "iso8859_5": "ISO-8859-5", + "iso8859_6": "ISO-8859-6", + "iso8859_7": "ISO-8859-7", + "iso8859_8": "ISO-8859-8", + "utf_16": "UTF-16", + "cp855": "IBM855", + "mac_cyrillic": "MacCyrillic", + "gb2312": "GB2312", + "gb18030": "GB18030", + "cp932": "CP932", + "cp866": "IBM866", + "utf_8": "utf-8", + "utf_8_sig": "UTF-8-SIG", + "shift_jis": "SHIFT_JIS", + "big5": "Big5", + "cp1250": "windows-1250", + "cp1251": "windows-1251", + "cp1252": "Windows-1252", + "cp1253": "windows-1253", + "cp1255": "windows-1255", + "cp1256": "windows-1256", + "cp1254": "Windows-1254", + "cp949": "CP949", +} # type: Dict[str, str] + + +COMMON_SAFE_ASCII_CHARACTERS = { + "<", + ">", + "=", + ":", + "/", + "&", + ";", + "{", + "}", + "[", + "]", + ",", + "|", + '"', + "-", +} # type: Set[str] + + +KO_NAMES = {"johab", "cp949", "euc_kr"} # type: Set[str] +ZH_NAMES = {"big5", "cp950", "big5hkscs", "hz"} # type: Set[str] + +NOT_PRINTABLE_PATTERN = re_compile(r"[0-9\W\n\r\t]+") + +LANGUAGE_SUPPORTED_COUNT = len(FREQUENCIES) # type: int + +# Logging LEVEL bellow DEBUG +TRACE = 5 # type: int diff --git a/Splunk_TA_paloalto/bin/splunk_ta_paloalto/aob_py3/charset_normalizer/legacy.py b/Splunk_TA_paloalto/bin/splunk_ta_paloalto/aob_py3/charset_normalizer/legacy.py new file mode 100644 index 00000000..cdebe2b8 --- /dev/null +++ b/Splunk_TA_paloalto/bin/splunk_ta_paloalto/aob_py3/charset_normalizer/legacy.py @@ -0,0 +1,95 @@ +import warnings +from typing import Dict, Optional, Union + +from .api import from_bytes, from_fp, from_path, normalize +from .constant import CHARDET_CORRESPONDENCE +from .models import CharsetMatch, CharsetMatches + + +def detect(byte_str: bytes) -> Dict[str, Optional[Union[str, float]]]: + """ + chardet legacy method + Detect the encoding of the given byte string. It should be mostly backward-compatible. + Encoding name will match Chardet own writing whenever possible. (Not on encoding name unsupported by it) + This function is deprecated and should be used to migrate your project easily, consult the documentation for + further information. Not planned for removal. + + :param byte_str: The byte sequence to examine. + """ + if not isinstance(byte_str, (bytearray, bytes)): + raise TypeError( # pragma: nocover + "Expected object of type bytes or bytearray, got: " + "{0}".format(type(byte_str)) + ) + + if isinstance(byte_str, bytearray): + byte_str = bytes(byte_str) + + r = from_bytes(byte_str).best() + + encoding = r.encoding if r is not None else None + language = r.language if r is not None and r.language != "Unknown" else "" + confidence = 1.0 - r.chaos if r is not None else None + + # Note: CharsetNormalizer does not return 'UTF-8-SIG' as the sig get stripped in the detection/normalization process + # but chardet does return 'utf-8-sig' and it is a valid codec name. + if r is not None and encoding == "utf_8" and r.bom: + encoding += "_sig" + + return { + "encoding": encoding + if encoding not in CHARDET_CORRESPONDENCE + else CHARDET_CORRESPONDENCE[encoding], + "language": language, + "confidence": confidence, + } + + +class CharsetNormalizerMatch(CharsetMatch): + pass + + +class CharsetNormalizerMatches(CharsetMatches): + @staticmethod + def from_fp(*args, **kwargs): # type: ignore + warnings.warn( # pragma: nocover + "staticmethod from_fp, from_bytes, from_path and normalize are deprecated " + "and scheduled to be removed in 3.0", + DeprecationWarning, + ) + return from_fp(*args, **kwargs) # pragma: nocover + + @staticmethod + def from_bytes(*args, **kwargs): # type: ignore + warnings.warn( # pragma: nocover + "staticmethod from_fp, from_bytes, from_path and normalize are deprecated " + "and scheduled to be removed in 3.0", + DeprecationWarning, + ) + return from_bytes(*args, **kwargs) # pragma: nocover + + @staticmethod + def from_path(*args, **kwargs): # type: ignore + warnings.warn( # pragma: nocover + "staticmethod from_fp, from_bytes, from_path and normalize are deprecated " + "and scheduled to be removed in 3.0", + DeprecationWarning, + ) + return from_path(*args, **kwargs) # pragma: nocover + + @staticmethod + def normalize(*args, **kwargs): # type: ignore + warnings.warn( # pragma: nocover + "staticmethod from_fp, from_bytes, from_path and normalize are deprecated " + "and scheduled to be removed in 3.0", + DeprecationWarning, + ) + return normalize(*args, **kwargs) # pragma: nocover + + +class CharsetDetector(CharsetNormalizerMatches): + pass + + +class CharsetDoctor(CharsetNormalizerMatches): + pass diff --git a/Splunk_TA_paloalto/bin/splunk_ta_paloalto/aob_py3/charset_normalizer/md.py b/Splunk_TA_paloalto/bin/splunk_ta_paloalto/aob_py3/charset_normalizer/md.py new file mode 100644 index 00000000..f3d6505c --- /dev/null +++ b/Splunk_TA_paloalto/bin/splunk_ta_paloalto/aob_py3/charset_normalizer/md.py @@ -0,0 +1,559 @@ +from functools import lru_cache +from typing import List, Optional + +from .constant import COMMON_SAFE_ASCII_CHARACTERS, UNICODE_SECONDARY_RANGE_KEYWORD +from .utils import ( + is_accentuated, + is_ascii, + is_case_variable, + is_cjk, + is_emoticon, + is_hangul, + is_hiragana, + is_katakana, + is_latin, + is_punctuation, + is_separator, + is_symbol, + is_thai, + remove_accent, + unicode_range, +) + + +class MessDetectorPlugin: + """ + Base abstract class used for mess detection plugins. + All detectors MUST extend and implement given methods. + """ + + def eligible(self, character: str) -> bool: + """ + Determine if given character should be fed in. + """ + raise NotImplementedError # pragma: nocover + + def feed(self, character: str) -> None: + """ + The main routine to be executed upon character. + Insert the logic in witch the text would be considered chaotic. + """ + raise NotImplementedError # pragma: nocover + + def reset(self) -> None: # pragma: no cover + """ + Permit to reset the plugin to the initial state. + """ + raise NotImplementedError + + @property + def ratio(self) -> float: + """ + Compute the chaos ratio based on what your feed() has seen. + Must NOT be lower than 0.; No restriction gt 0. + """ + raise NotImplementedError # pragma: nocover + + +class TooManySymbolOrPunctuationPlugin(MessDetectorPlugin): + def __init__(self) -> None: + self._punctuation_count = 0 # type: int + self._symbol_count = 0 # type: int + self._character_count = 0 # type: int + + self._last_printable_char = None # type: Optional[str] + self._frenzy_symbol_in_word = False # type: bool + + def eligible(self, character: str) -> bool: + return character.isprintable() + + def feed(self, character: str) -> None: + self._character_count += 1 + + if ( + character != self._last_printable_char + and character not in COMMON_SAFE_ASCII_CHARACTERS + ): + if is_punctuation(character): + self._punctuation_count += 1 + elif ( + character.isdigit() is False + and is_symbol(character) + and is_emoticon(character) is False + ): + self._symbol_count += 2 + + self._last_printable_char = character + + def reset(self) -> None: # pragma: no cover + self._punctuation_count = 0 + self._character_count = 0 + self._symbol_count = 0 + + @property + def ratio(self) -> float: + if self._character_count == 0: + return 0.0 + + ratio_of_punctuation = ( + self._punctuation_count + self._symbol_count + ) / self._character_count # type: float + + return ratio_of_punctuation if ratio_of_punctuation >= 0.3 else 0.0 + + +class TooManyAccentuatedPlugin(MessDetectorPlugin): + def __init__(self) -> None: + self._character_count = 0 # type: int + self._accentuated_count = 0 # type: int + + def eligible(self, character: str) -> bool: + return character.isalpha() + + def feed(self, character: str) -> None: + self._character_count += 1 + + if is_accentuated(character): + self._accentuated_count += 1 + + def reset(self) -> None: # pragma: no cover + self._character_count = 0 + self._accentuated_count = 0 + + @property + def ratio(self) -> float: + if self._character_count == 0: + return 0.0 + ratio_of_accentuation = ( + self._accentuated_count / self._character_count + ) # type: float + return ratio_of_accentuation if ratio_of_accentuation >= 0.35 else 0.0 + + +class UnprintablePlugin(MessDetectorPlugin): + def __init__(self) -> None: + self._unprintable_count = 0 # type: int + self._character_count = 0 # type: int + + def eligible(self, character: str) -> bool: + return True + + def feed(self, character: str) -> None: + if ( + character.isspace() is False # includes \n \t \r \v + and character.isprintable() is False + and character != "\x1A" # Why? Its the ASCII substitute character. + ): + self._unprintable_count += 1 + self._character_count += 1 + + def reset(self) -> None: # pragma: no cover + self._unprintable_count = 0 + + @property + def ratio(self) -> float: + if self._character_count == 0: + return 0.0 + + return (self._unprintable_count * 8) / self._character_count + + +class SuspiciousDuplicateAccentPlugin(MessDetectorPlugin): + def __init__(self) -> None: + self._successive_count = 0 # type: int + self._character_count = 0 # type: int + + self._last_latin_character = None # type: Optional[str] + + def eligible(self, character: str) -> bool: + return character.isalpha() and is_latin(character) + + def feed(self, character: str) -> None: + self._character_count += 1 + if ( + self._last_latin_character is not None + and is_accentuated(character) + and is_accentuated(self._last_latin_character) + ): + if character.isupper() and self._last_latin_character.isupper(): + self._successive_count += 1 + # Worse if its the same char duplicated with different accent. + if remove_accent(character) == remove_accent(self._last_latin_character): + self._successive_count += 1 + self._last_latin_character = character + + def reset(self) -> None: # pragma: no cover + self._successive_count = 0 + self._character_count = 0 + self._last_latin_character = None + + @property + def ratio(self) -> float: + if self._character_count == 0: + return 0.0 + + return (self._successive_count * 2) / self._character_count + + +class SuspiciousRange(MessDetectorPlugin): + def __init__(self) -> None: + self._suspicious_successive_range_count = 0 # type: int + self._character_count = 0 # type: int + self._last_printable_seen = None # type: Optional[str] + + def eligible(self, character: str) -> bool: + return character.isprintable() + + def feed(self, character: str) -> None: + self._character_count += 1 + + if ( + character.isspace() + or is_punctuation(character) + or character in COMMON_SAFE_ASCII_CHARACTERS + ): + self._last_printable_seen = None + return + + if self._last_printable_seen is None: + self._last_printable_seen = character + return + + unicode_range_a = unicode_range( + self._last_printable_seen + ) # type: Optional[str] + unicode_range_b = unicode_range(character) # type: Optional[str] + + if is_suspiciously_successive_range(unicode_range_a, unicode_range_b): + self._suspicious_successive_range_count += 1 + + self._last_printable_seen = character + + def reset(self) -> None: # pragma: no cover + self._character_count = 0 + self._suspicious_successive_range_count = 0 + self._last_printable_seen = None + + @property + def ratio(self) -> float: + if self._character_count == 0: + return 0.0 + + ratio_of_suspicious_range_usage = ( + self._suspicious_successive_range_count * 2 + ) / self._character_count # type: float + + if ratio_of_suspicious_range_usage < 0.1: + return 0.0 + + return ratio_of_suspicious_range_usage + + +class SuperWeirdWordPlugin(MessDetectorPlugin): + def __init__(self) -> None: + self._word_count = 0 # type: int + self._bad_word_count = 0 # type: int + self._foreign_long_count = 0 # type: int + + self._is_current_word_bad = False # type: bool + self._foreign_long_watch = False # type: bool + + self._character_count = 0 # type: int + self._bad_character_count = 0 # type: int + + self._buffer = "" # type: str + self._buffer_accent_count = 0 # type: int + + def eligible(self, character: str) -> bool: + return True + + def feed(self, character: str) -> None: + if character.isalpha(): + self._buffer = "".join([self._buffer, character]) + if is_accentuated(character): + self._buffer_accent_count += 1 + if ( + self._foreign_long_watch is False + and (is_latin(character) is False or is_accentuated(character)) + and is_cjk(character) is False + and is_hangul(character) is False + and is_katakana(character) is False + and is_hiragana(character) is False + and is_thai(character) is False + ): + self._foreign_long_watch = True + return + if not self._buffer: + return + if ( + character.isspace() or is_punctuation(character) or is_separator(character) + ) and self._buffer: + self._word_count += 1 + buffer_length = len(self._buffer) # type: int + + self._character_count += buffer_length + + if buffer_length >= 4: + if self._buffer_accent_count / buffer_length > 0.34: + self._is_current_word_bad = True + # Word/Buffer ending with a upper case accentuated letter are so rare, + # that we will consider them all as suspicious. Same weight as foreign_long suspicious. + if is_accentuated(self._buffer[-1]) and self._buffer[-1].isupper(): + self._foreign_long_count += 1 + self._is_current_word_bad = True + if buffer_length >= 24 and self._foreign_long_watch: + self._foreign_long_count += 1 + self._is_current_word_bad = True + + if self._is_current_word_bad: + self._bad_word_count += 1 + self._bad_character_count += len(self._buffer) + self._is_current_word_bad = False + + self._foreign_long_watch = False + self._buffer = "" + self._buffer_accent_count = 0 + elif ( + character not in {"<", ">", "-", "=", "~", "|", "_"} + and character.isdigit() is False + and is_symbol(character) + ): + self._is_current_word_bad = True + self._buffer += character + + def reset(self) -> None: # pragma: no cover + self._buffer = "" + self._is_current_word_bad = False + self._foreign_long_watch = False + self._bad_word_count = 0 + self._word_count = 0 + self._character_count = 0 + self._bad_character_count = 0 + self._foreign_long_count = 0 + + @property + def ratio(self) -> float: + if self._word_count <= 10 and self._foreign_long_count == 0: + return 0.0 + + return self._bad_character_count / self._character_count + + +class CjkInvalidStopPlugin(MessDetectorPlugin): + """ + GB(Chinese) based encoding often render the stop incorrectly when the content does not fit and + can be easily detected. Searching for the overuse of '丅' and '丄'. + """ + + def __init__(self) -> None: + self._wrong_stop_count = 0 # type: int + self._cjk_character_count = 0 # type: int + + def eligible(self, character: str) -> bool: + return True + + def feed(self, character: str) -> None: + if character in {"丅", "丄"}: + self._wrong_stop_count += 1 + return + if is_cjk(character): + self._cjk_character_count += 1 + + def reset(self) -> None: # pragma: no cover + self._wrong_stop_count = 0 + self._cjk_character_count = 0 + + @property + def ratio(self) -> float: + if self._cjk_character_count < 16: + return 0.0 + return self._wrong_stop_count / self._cjk_character_count + + +class ArchaicUpperLowerPlugin(MessDetectorPlugin): + def __init__(self) -> None: + self._buf = False # type: bool + + self._character_count_since_last_sep = 0 # type: int + + self._successive_upper_lower_count = 0 # type: int + self._successive_upper_lower_count_final = 0 # type: int + + self._character_count = 0 # type: int + + self._last_alpha_seen = None # type: Optional[str] + self._current_ascii_only = True # type: bool + + def eligible(self, character: str) -> bool: + return True + + def feed(self, character: str) -> None: + is_concerned = character.isalpha() and is_case_variable(character) + chunk_sep = is_concerned is False + + if chunk_sep and self._character_count_since_last_sep > 0: + if ( + self._character_count_since_last_sep <= 64 + and character.isdigit() is False + and self._current_ascii_only is False + ): + self._successive_upper_lower_count_final += ( + self._successive_upper_lower_count + ) + + self._successive_upper_lower_count = 0 + self._character_count_since_last_sep = 0 + self._last_alpha_seen = None + self._buf = False + self._character_count += 1 + self._current_ascii_only = True + + return + + if self._current_ascii_only is True and is_ascii(character) is False: + self._current_ascii_only = False + + if self._last_alpha_seen is not None: + if (character.isupper() and self._last_alpha_seen.islower()) or ( + character.islower() and self._last_alpha_seen.isupper() + ): + if self._buf is True: + self._successive_upper_lower_count += 2 + self._buf = False + else: + self._buf = True + else: + self._buf = False + + self._character_count += 1 + self._character_count_since_last_sep += 1 + self._last_alpha_seen = character + + def reset(self) -> None: # pragma: no cover + self._character_count = 0 + self._character_count_since_last_sep = 0 + self._successive_upper_lower_count = 0 + self._successive_upper_lower_count_final = 0 + self._last_alpha_seen = None + self._buf = False + self._current_ascii_only = True + + @property + def ratio(self) -> float: + if self._character_count == 0: + return 0.0 + + return self._successive_upper_lower_count_final / self._character_count + + +def is_suspiciously_successive_range( + unicode_range_a: Optional[str], unicode_range_b: Optional[str] +) -> bool: + """ + Determine if two Unicode range seen next to each other can be considered as suspicious. + """ + if unicode_range_a is None or unicode_range_b is None: + return True + + if unicode_range_a == unicode_range_b: + return False + + if "Latin" in unicode_range_a and "Latin" in unicode_range_b: + return False + + if "Emoticons" in unicode_range_a or "Emoticons" in unicode_range_b: + return False + + # Latin characters can be accompanied with a combining diacritical mark + # eg. Vietnamese. + if ("Latin" in unicode_range_a or "Latin" in unicode_range_b) and ( + "Combining" in unicode_range_a or "Combining" in unicode_range_b + ): + return False + + keywords_range_a, keywords_range_b = unicode_range_a.split( + " " + ), unicode_range_b.split(" ") + + for el in keywords_range_a: + if el in UNICODE_SECONDARY_RANGE_KEYWORD: + continue + if el in keywords_range_b: + return False + + # Japanese Exception + range_a_jp_chars, range_b_jp_chars = ( + unicode_range_a + in ( + "Hiragana", + "Katakana", + ), + unicode_range_b in ("Hiragana", "Katakana"), + ) + if (range_a_jp_chars or range_b_jp_chars) and ( + "CJK" in unicode_range_a or "CJK" in unicode_range_b + ): + return False + if range_a_jp_chars and range_b_jp_chars: + return False + + if "Hangul" in unicode_range_a or "Hangul" in unicode_range_b: + if "CJK" in unicode_range_a or "CJK" in unicode_range_b: + return False + if unicode_range_a == "Basic Latin" or unicode_range_b == "Basic Latin": + return False + + # Chinese/Japanese use dedicated range for punctuation and/or separators. + if ("CJK" in unicode_range_a or "CJK" in unicode_range_b) or ( + unicode_range_a in ["Katakana", "Hiragana"] + and unicode_range_b in ["Katakana", "Hiragana"] + ): + if "Punctuation" in unicode_range_a or "Punctuation" in unicode_range_b: + return False + if "Forms" in unicode_range_a or "Forms" in unicode_range_b: + return False + + return True + + +@lru_cache(maxsize=2048) +def mess_ratio( + decoded_sequence: str, maximum_threshold: float = 0.2, debug: bool = False +) -> float: + """ + Compute a mess ratio given a decoded bytes sequence. The maximum threshold does stop the computation earlier. + """ + + detectors = [ + md_class() for md_class in MessDetectorPlugin.__subclasses__() + ] # type: List[MessDetectorPlugin] + + length = len(decoded_sequence) + 1 # type: int + + mean_mess_ratio = 0.0 # type: float + + if length < 512: + intermediary_mean_mess_ratio_calc = 32 # type: int + elif length <= 1024: + intermediary_mean_mess_ratio_calc = 64 + else: + intermediary_mean_mess_ratio_calc = 128 + + for character, index in zip(decoded_sequence + "\n", range(length)): + for detector in detectors: + if detector.eligible(character): + detector.feed(character) + + if ( + index > 0 and index % intermediary_mean_mess_ratio_calc == 0 + ) or index == length - 1: + mean_mess_ratio = sum(dt.ratio for dt in detectors) + + if mean_mess_ratio >= maximum_threshold: + break + + if debug: + for dt in detectors: # pragma: nocover + print(dt.__class__, dt.ratio) + + return round(mean_mess_ratio, 3) diff --git a/Splunk_TA_paloalto/bin/splunk_ta_paloalto/aob_py3/charset_normalizer/models.py b/Splunk_TA_paloalto/bin/splunk_ta_paloalto/aob_py3/charset_normalizer/models.py new file mode 100644 index 00000000..c38da31f --- /dev/null +++ b/Splunk_TA_paloalto/bin/splunk_ta_paloalto/aob_py3/charset_normalizer/models.py @@ -0,0 +1,392 @@ +import warnings +from collections import Counter +from encodings.aliases import aliases +from hashlib import sha256 +from json import dumps +from re import sub +from typing import Any, Dict, Iterator, List, Optional, Tuple, Union + +from .constant import NOT_PRINTABLE_PATTERN, TOO_BIG_SEQUENCE +from .md import mess_ratio +from .utils import iana_name, is_multi_byte_encoding, unicode_range + + +class CharsetMatch: + def __init__( + self, + payload: bytes, + guessed_encoding: str, + mean_mess_ratio: float, + has_sig_or_bom: bool, + languages: "CoherenceMatches", + decoded_payload: Optional[str] = None, + ): + self._payload = payload # type: bytes + + self._encoding = guessed_encoding # type: str + self._mean_mess_ratio = mean_mess_ratio # type: float + self._languages = languages # type: CoherenceMatches + self._has_sig_or_bom = has_sig_or_bom # type: bool + self._unicode_ranges = None # type: Optional[List[str]] + + self._leaves = [] # type: List[CharsetMatch] + self._mean_coherence_ratio = 0.0 # type: float + + self._output_payload = None # type: Optional[bytes] + self._output_encoding = None # type: Optional[str] + + self._string = decoded_payload # type: Optional[str] + + def __eq__(self, other: object) -> bool: + if not isinstance(other, CharsetMatch): + raise TypeError( + "__eq__ cannot be invoked on {} and {}.".format( + str(other.__class__), str(self.__class__) + ) + ) + return self.encoding == other.encoding and self.fingerprint == other.fingerprint + + def __lt__(self, other: object) -> bool: + """ + Implemented to make sorted available upon CharsetMatches items. + """ + if not isinstance(other, CharsetMatch): + raise ValueError + + chaos_difference = abs(self.chaos - other.chaos) # type: float + coherence_difference = abs(self.coherence - other.coherence) # type: float + + # Bellow 1% difference --> Use Coherence + if chaos_difference < 0.01 and coherence_difference > 0.02: + # When having a tough decision, use the result that decoded as many multi-byte as possible. + if chaos_difference == 0.0 and self.coherence == other.coherence: + return self.multi_byte_usage > other.multi_byte_usage + return self.coherence > other.coherence + + return self.chaos < other.chaos + + @property + def multi_byte_usage(self) -> float: + return 1.0 - len(str(self)) / len(self.raw) + + @property + def chaos_secondary_pass(self) -> float: + """ + Check once again chaos in decoded text, except this time, with full content. + Use with caution, this can be very slow. + Notice: Will be removed in 3.0 + """ + warnings.warn( + "chaos_secondary_pass is deprecated and will be removed in 3.0", + DeprecationWarning, + ) + return mess_ratio(str(self), 1.0) + + @property + def coherence_non_latin(self) -> float: + """ + Coherence ratio on the first non-latin language detected if ANY. + Notice: Will be removed in 3.0 + """ + warnings.warn( + "coherence_non_latin is deprecated and will be removed in 3.0", + DeprecationWarning, + ) + return 0.0 + + @property + def w_counter(self) -> Counter: + """ + Word counter instance on decoded text. + Notice: Will be removed in 3.0 + """ + warnings.warn( + "w_counter is deprecated and will be removed in 3.0", DeprecationWarning + ) + + string_printable_only = sub(NOT_PRINTABLE_PATTERN, " ", str(self).lower()) + + return Counter(string_printable_only.split()) + + def __str__(self) -> str: + # Lazy Str Loading + if self._string is None: + self._string = str(self._payload, self._encoding, "strict") + return self._string + + def __repr__(self) -> str: + return "".format(self.encoding, self.fingerprint) + + def add_submatch(self, other: "CharsetMatch") -> None: + if not isinstance(other, CharsetMatch) or other == self: + raise ValueError( + "Unable to add instance <{}> as a submatch of a CharsetMatch".format( + other.__class__ + ) + ) + + other._string = None # Unload RAM usage; dirty trick. + self._leaves.append(other) + + @property + def encoding(self) -> str: + return self._encoding + + @property + def encoding_aliases(self) -> List[str]: + """ + Encoding name are known by many name, using this could help when searching for IBM855 when it's listed as CP855. + """ + also_known_as = [] # type: List[str] + for u, p in aliases.items(): + if self.encoding == u: + also_known_as.append(p) + elif self.encoding == p: + also_known_as.append(u) + return also_known_as + + @property + def bom(self) -> bool: + return self._has_sig_or_bom + + @property + def byte_order_mark(self) -> bool: + return self._has_sig_or_bom + + @property + def languages(self) -> List[str]: + """ + Return the complete list of possible languages found in decoded sequence. + Usually not really useful. Returned list may be empty even if 'language' property return something != 'Unknown'. + """ + return [e[0] for e in self._languages] + + @property + def language(self) -> str: + """ + Most probable language found in decoded sequence. If none were detected or inferred, the property will return + "Unknown". + """ + if not self._languages: + # Trying to infer the language based on the given encoding + # Its either English or we should not pronounce ourselves in certain cases. + if "ascii" in self.could_be_from_charset: + return "English" + + # doing it there to avoid circular import + from charset_normalizer.cd import encoding_languages, mb_encoding_languages + + languages = ( + mb_encoding_languages(self.encoding) + if is_multi_byte_encoding(self.encoding) + else encoding_languages(self.encoding) + ) + + if len(languages) == 0 or "Latin Based" in languages: + return "Unknown" + + return languages[0] + + return self._languages[0][0] + + @property + def chaos(self) -> float: + return self._mean_mess_ratio + + @property + def coherence(self) -> float: + if not self._languages: + return 0.0 + return self._languages[0][1] + + @property + def percent_chaos(self) -> float: + return round(self.chaos * 100, ndigits=3) + + @property + def percent_coherence(self) -> float: + return round(self.coherence * 100, ndigits=3) + + @property + def raw(self) -> bytes: + """ + Original untouched bytes. + """ + return self._payload + + @property + def submatch(self) -> List["CharsetMatch"]: + return self._leaves + + @property + def has_submatch(self) -> bool: + return len(self._leaves) > 0 + + @property + def alphabets(self) -> List[str]: + if self._unicode_ranges is not None: + return self._unicode_ranges + # list detected ranges + detected_ranges = [ + unicode_range(char) for char in str(self) + ] # type: List[Optional[str]] + # filter and sort + self._unicode_ranges = sorted(list({r for r in detected_ranges if r})) + return self._unicode_ranges + + @property + def could_be_from_charset(self) -> List[str]: + """ + The complete list of encoding that output the exact SAME str result and therefore could be the originating + encoding. + This list does include the encoding available in property 'encoding'. + """ + return [self._encoding] + [m.encoding for m in self._leaves] + + def first(self) -> "CharsetMatch": + """ + Kept for BC reasons. Will be removed in 3.0. + """ + return self + + def best(self) -> "CharsetMatch": + """ + Kept for BC reasons. Will be removed in 3.0. + """ + return self + + def output(self, encoding: str = "utf_8") -> bytes: + """ + Method to get re-encoded bytes payload using given target encoding. Default to UTF-8. + Any errors will be simply ignored by the encoder NOT replaced. + """ + if self._output_encoding is None or self._output_encoding != encoding: + self._output_encoding = encoding + self._output_payload = str(self).encode(encoding, "replace") + + return self._output_payload # type: ignore + + @property + def fingerprint(self) -> str: + """ + Retrieve the unique SHA256 computed using the transformed (re-encoded) payload. Not the original one. + """ + return sha256(self.output()).hexdigest() + + +class CharsetMatches: + """ + Container with every CharsetMatch items ordered by default from most probable to the less one. + Act like a list(iterable) but does not implements all related methods. + """ + + def __init__(self, results: List[CharsetMatch] = None): + self._results = sorted(results) if results else [] # type: List[CharsetMatch] + + def __iter__(self) -> Iterator[CharsetMatch]: + yield from self._results + + def __getitem__(self, item: Union[int, str]) -> CharsetMatch: + """ + Retrieve a single item either by its position or encoding name (alias may be used here). + Raise KeyError upon invalid index or encoding not present in results. + """ + if isinstance(item, int): + return self._results[item] + if isinstance(item, str): + item = iana_name(item, False) + for result in self._results: + if item in result.could_be_from_charset: + return result + raise KeyError + + def __len__(self) -> int: + return len(self._results) + + def __bool__(self) -> bool: + return len(self._results) > 0 + + def append(self, item: CharsetMatch) -> None: + """ + Insert a single match. Will be inserted accordingly to preserve sort. + Can be inserted as a submatch. + """ + if not isinstance(item, CharsetMatch): + raise ValueError( + "Cannot append instance '{}' to CharsetMatches".format( + str(item.__class__) + ) + ) + # We should disable the submatch factoring when the input file is too heavy (conserve RAM usage) + if len(item.raw) <= TOO_BIG_SEQUENCE: + for match in self._results: + if match.fingerprint == item.fingerprint and match.chaos == item.chaos: + match.add_submatch(item) + return + self._results.append(item) + self._results = sorted(self._results) + + def best(self) -> Optional["CharsetMatch"]: + """ + Simply return the first match. Strict equivalent to matches[0]. + """ + if not self._results: + return None + return self._results[0] + + def first(self) -> Optional["CharsetMatch"]: + """ + Redundant method, call the method best(). Kept for BC reasons. + """ + return self.best() + + +CoherenceMatch = Tuple[str, float] +CoherenceMatches = List[CoherenceMatch] + + +class CliDetectionResult: + def __init__( + self, + path: str, + encoding: Optional[str], + encoding_aliases: List[str], + alternative_encodings: List[str], + language: str, + alphabets: List[str], + has_sig_or_bom: bool, + chaos: float, + coherence: float, + unicode_path: Optional[str], + is_preferred: bool, + ): + self.path = path # type: str + self.unicode_path = unicode_path # type: Optional[str] + self.encoding = encoding # type: Optional[str] + self.encoding_aliases = encoding_aliases # type: List[str] + self.alternative_encodings = alternative_encodings # type: List[str] + self.language = language # type: str + self.alphabets = alphabets # type: List[str] + self.has_sig_or_bom = has_sig_or_bom # type: bool + self.chaos = chaos # type: float + self.coherence = coherence # type: float + self.is_preferred = is_preferred # type: bool + + @property + def __dict__(self) -> Dict[str, Any]: # type: ignore + return { + "path": self.path, + "encoding": self.encoding, + "encoding_aliases": self.encoding_aliases, + "alternative_encodings": self.alternative_encodings, + "language": self.language, + "alphabets": self.alphabets, + "has_sig_or_bom": self.has_sig_or_bom, + "chaos": self.chaos, + "coherence": self.coherence, + "unicode_path": self.unicode_path, + "is_preferred": self.is_preferred, + } + + def to_json(self) -> str: + return dumps(self.__dict__, ensure_ascii=True, indent=4) diff --git a/Splunk_TA_paloalto/bin/splunk_ta_paloalto/aob_py3/charset_normalizer/py.typed b/Splunk_TA_paloalto/bin/splunk_ta_paloalto/aob_py3/charset_normalizer/py.typed new file mode 100644 index 00000000..e69de29b diff --git a/Splunk_TA_paloalto/bin/splunk_ta_paloalto/aob_py3/charset_normalizer/utils.py b/Splunk_TA_paloalto/bin/splunk_ta_paloalto/aob_py3/charset_normalizer/utils.py new file mode 100644 index 00000000..dcb14dfe --- /dev/null +++ b/Splunk_TA_paloalto/bin/splunk_ta_paloalto/aob_py3/charset_normalizer/utils.py @@ -0,0 +1,342 @@ +try: + import unicodedata2 as unicodedata +except ImportError: + import unicodedata # type: ignore[no-redef] + +import importlib +import logging +from codecs import IncrementalDecoder +from encodings.aliases import aliases +from functools import lru_cache +from re import findall +from typing import List, Optional, Set, Tuple, Union + +from _multibytecodec import MultibyteIncrementalDecoder # type: ignore + +from .constant import ( + ENCODING_MARKS, + IANA_SUPPORTED_SIMILAR, + RE_POSSIBLE_ENCODING_INDICATION, + UNICODE_RANGES_COMBINED, + UNICODE_SECONDARY_RANGE_KEYWORD, + UTF8_MAXIMAL_ALLOCATION, +) + + +@lru_cache(maxsize=UTF8_MAXIMAL_ALLOCATION) +def is_accentuated(character: str) -> bool: + try: + description = unicodedata.name(character) # type: str + except ValueError: + return False + return ( + "WITH GRAVE" in description + or "WITH ACUTE" in description + or "WITH CEDILLA" in description + or "WITH DIAERESIS" in description + or "WITH CIRCUMFLEX" in description + or "WITH TILDE" in description + ) + + +@lru_cache(maxsize=UTF8_MAXIMAL_ALLOCATION) +def remove_accent(character: str) -> str: + decomposed = unicodedata.decomposition(character) # type: str + if not decomposed: + return character + + codes = decomposed.split(" ") # type: List[str] + + return chr(int(codes[0], 16)) + + +@lru_cache(maxsize=UTF8_MAXIMAL_ALLOCATION) +def unicode_range(character: str) -> Optional[str]: + """ + Retrieve the Unicode range official name from a single character. + """ + character_ord = ord(character) # type: int + + for range_name, ord_range in UNICODE_RANGES_COMBINED.items(): + if character_ord in ord_range: + return range_name + + return None + + +@lru_cache(maxsize=UTF8_MAXIMAL_ALLOCATION) +def is_latin(character: str) -> bool: + try: + description = unicodedata.name(character) # type: str + except ValueError: + return False + return "LATIN" in description + + +def is_ascii(character: str) -> bool: + try: + character.encode("ascii") + except UnicodeEncodeError: + return False + return True + + +@lru_cache(maxsize=UTF8_MAXIMAL_ALLOCATION) +def is_punctuation(character: str) -> bool: + character_category = unicodedata.category(character) # type: str + + if "P" in character_category: + return True + + character_range = unicode_range(character) # type: Optional[str] + + if character_range is None: + return False + + return "Punctuation" in character_range + + +@lru_cache(maxsize=UTF8_MAXIMAL_ALLOCATION) +def is_symbol(character: str) -> bool: + character_category = unicodedata.category(character) # type: str + + if "S" in character_category or "N" in character_category: + return True + + character_range = unicode_range(character) # type: Optional[str] + + if character_range is None: + return False + + return "Forms" in character_range + + +@lru_cache(maxsize=UTF8_MAXIMAL_ALLOCATION) +def is_emoticon(character: str) -> bool: + character_range = unicode_range(character) # type: Optional[str] + + if character_range is None: + return False + + return "Emoticons" in character_range + + +@lru_cache(maxsize=UTF8_MAXIMAL_ALLOCATION) +def is_separator(character: str) -> bool: + if character.isspace() or character in {"|", "+", ",", ";", "<", ">"}: + return True + + character_category = unicodedata.category(character) # type: str + + return "Z" in character_category + + +@lru_cache(maxsize=UTF8_MAXIMAL_ALLOCATION) +def is_case_variable(character: str) -> bool: + return character.islower() != character.isupper() + + +def is_private_use_only(character: str) -> bool: + character_category = unicodedata.category(character) # type: str + + return character_category == "Co" + + +@lru_cache(maxsize=UTF8_MAXIMAL_ALLOCATION) +def is_cjk(character: str) -> bool: + try: + character_name = unicodedata.name(character) + except ValueError: + return False + + return "CJK" in character_name + + +@lru_cache(maxsize=UTF8_MAXIMAL_ALLOCATION) +def is_hiragana(character: str) -> bool: + try: + character_name = unicodedata.name(character) + except ValueError: + return False + + return "HIRAGANA" in character_name + + +@lru_cache(maxsize=UTF8_MAXIMAL_ALLOCATION) +def is_katakana(character: str) -> bool: + try: + character_name = unicodedata.name(character) + except ValueError: + return False + + return "KATAKANA" in character_name + + +@lru_cache(maxsize=UTF8_MAXIMAL_ALLOCATION) +def is_hangul(character: str) -> bool: + try: + character_name = unicodedata.name(character) + except ValueError: + return False + + return "HANGUL" in character_name + + +@lru_cache(maxsize=UTF8_MAXIMAL_ALLOCATION) +def is_thai(character: str) -> bool: + try: + character_name = unicodedata.name(character) + except ValueError: + return False + + return "THAI" in character_name + + +@lru_cache(maxsize=len(UNICODE_RANGES_COMBINED)) +def is_unicode_range_secondary(range_name: str) -> bool: + return any(keyword in range_name for keyword in UNICODE_SECONDARY_RANGE_KEYWORD) + + +def any_specified_encoding(sequence: bytes, search_zone: int = 4096) -> Optional[str]: + """ + Extract using ASCII-only decoder any specified encoding in the first n-bytes. + """ + if not isinstance(sequence, bytes): + raise TypeError + + seq_len = len(sequence) # type: int + + results = findall( + RE_POSSIBLE_ENCODING_INDICATION, + sequence[: min(seq_len, search_zone)].decode("ascii", errors="ignore"), + ) # type: List[str] + + if len(results) == 0: + return None + + for specified_encoding in results: + specified_encoding = specified_encoding.lower().replace("-", "_") + + for encoding_alias, encoding_iana in aliases.items(): + if encoding_alias == specified_encoding: + return encoding_iana + if encoding_iana == specified_encoding: + return encoding_iana + + return None + + +@lru_cache(maxsize=128) +def is_multi_byte_encoding(name: str) -> bool: + """ + Verify is a specific encoding is a multi byte one based on it IANA name + """ + return name in { + "utf_8", + "utf_8_sig", + "utf_16", + "utf_16_be", + "utf_16_le", + "utf_32", + "utf_32_le", + "utf_32_be", + "utf_7", + } or issubclass( + importlib.import_module("encodings.{}".format(name)).IncrementalDecoder, # type: ignore + MultibyteIncrementalDecoder, + ) + + +def identify_sig_or_bom(sequence: bytes) -> Tuple[Optional[str], bytes]: + """ + Identify and extract SIG/BOM in given sequence. + """ + + for iana_encoding in ENCODING_MARKS: + marks = ENCODING_MARKS[iana_encoding] # type: Union[bytes, List[bytes]] + + if isinstance(marks, bytes): + marks = [marks] + + for mark in marks: + if sequence.startswith(mark): + return iana_encoding, mark + + return None, b"" + + +def should_strip_sig_or_bom(iana_encoding: str) -> bool: + return iana_encoding not in {"utf_16", "utf_32"} + + +def iana_name(cp_name: str, strict: bool = True) -> str: + cp_name = cp_name.lower().replace("-", "_") + + for encoding_alias, encoding_iana in aliases.items(): + if cp_name in [encoding_alias, encoding_iana]: + return encoding_iana + + if strict: + raise ValueError("Unable to retrieve IANA for '{}'".format(cp_name)) + + return cp_name + + +def range_scan(decoded_sequence: str) -> List[str]: + ranges = set() # type: Set[str] + + for character in decoded_sequence: + character_range = unicode_range(character) # type: Optional[str] + + if character_range is None: + continue + + ranges.add(character_range) + + return list(ranges) + + +def cp_similarity(iana_name_a: str, iana_name_b: str) -> float: + + if is_multi_byte_encoding(iana_name_a) or is_multi_byte_encoding(iana_name_b): + return 0.0 + + decoder_a = importlib.import_module("encodings.{}".format(iana_name_a)).IncrementalDecoder # type: ignore + decoder_b = importlib.import_module("encodings.{}".format(iana_name_b)).IncrementalDecoder # type: ignore + + id_a = decoder_a(errors="ignore") # type: IncrementalDecoder + id_b = decoder_b(errors="ignore") # type: IncrementalDecoder + + character_match_count = 0 # type: int + + for i in range(255): + to_be_decoded = bytes([i]) # type: bytes + if id_a.decode(to_be_decoded) == id_b.decode(to_be_decoded): + character_match_count += 1 + + return character_match_count / 254 + + +def is_cp_similar(iana_name_a: str, iana_name_b: str) -> bool: + """ + Determine if two code page are at least 80% similar. IANA_SUPPORTED_SIMILAR dict was generated using + the function cp_similarity. + """ + return ( + iana_name_a in IANA_SUPPORTED_SIMILAR + and iana_name_b in IANA_SUPPORTED_SIMILAR[iana_name_a] + ) + + +def set_logging_handler( + name: str = "charset_normalizer", + level: int = logging.INFO, + format_string: str = "%(asctime)s | %(levelname)s | %(message)s", +) -> None: + + logger = logging.getLogger(name) + logger.setLevel(level) + + handler = logging.StreamHandler() + handler.setFormatter(logging.Formatter(format_string)) + logger.addHandler(handler) diff --git a/Splunk_TA_paloalto/bin/splunk_ta_paloalto/aob_py3/charset_normalizer/version.py b/Splunk_TA_paloalto/bin/splunk_ta_paloalto/aob_py3/charset_normalizer/version.py new file mode 100644 index 00000000..77cfff25 --- /dev/null +++ b/Splunk_TA_paloalto/bin/splunk_ta_paloalto/aob_py3/charset_normalizer/version.py @@ -0,0 +1,6 @@ +""" +Expose version +""" + +__version__ = "2.0.12" +VERSION = __version__.split(".") diff --git a/Splunk_TA_paloalto/bin/splunk_ta_paloalto/aob_py3/cloudconnectlib-3.0.1b1.dist-info/INSTALLER b/Splunk_TA_paloalto/bin/splunk_ta_paloalto/aob_py3/cloudconnectlib-3.0.1b1.dist-info/INSTALLER new file mode 100644 index 00000000..a1b589e3 --- /dev/null +++ b/Splunk_TA_paloalto/bin/splunk_ta_paloalto/aob_py3/cloudconnectlib-3.0.1b1.dist-info/INSTALLER @@ -0,0 +1 @@ +pip diff --git a/Splunk_TA_paloalto/bin/splunk_ta_paloalto/aob_py3/cloudconnectlib-3.0.1b1.dist-info/METADATA b/Splunk_TA_paloalto/bin/splunk_ta_paloalto/aob_py3/cloudconnectlib-3.0.1b1.dist-info/METADATA new file mode 100644 index 00000000..2bc50cfa --- /dev/null +++ b/Splunk_TA_paloalto/bin/splunk_ta_paloalto/aob_py3/cloudconnectlib-3.0.1b1.dist-info/METADATA @@ -0,0 +1,32 @@ +Metadata-Version: 2.1 +Name: cloudconnectlib +Version: 3.0.1b1 +Summary: APP Cloud Connect +License: Apache-2.0 +Author: Addon Factory template +Author-email: addonfactory@splunk.com +Requires-Python: >=3.7,<4.0 +Classifier: Intended Audience :: Developers +Classifier: License :: OSI Approved :: Apache Software License +Classifier: Operating System :: OS Independent +Classifier: Programming Language :: Python +Classifier: Programming Language :: Python :: 3 +Classifier: Programming Language :: Python :: 3.7 +Classifier: Programming Language :: Python :: 3.8 +Classifier: Programming Language :: Python :: 3.9 +Requires-Dist: decorator (>=4,<5) +Requires-Dist: future +Requires-Dist: httplib2 (>=0.19.1,<0.20.0) +Requires-Dist: jinja2 (>=2.11.3,<3.0.0) +Requires-Dist: jsl (>=0.2.4,<0.3.0) +Requires-Dist: jsonpath-ng (>=1.5.2,<2.0.0) +Requires-Dist: jsonpath-rw (>=1.4.0,<2.0.0) +Requires-Dist: jsonschema (>=3.2.0,<4.0.0) +Requires-Dist: munch (>=2.3.2,<3.0.0) +Requires-Dist: requests (>=2.25.1,<3.0.0) +Requires-Dist: six +Requires-Dist: solnlib (>=4.1.0,<5.0.0) +Requires-Dist: sortedcontainers (>=2.3.0,<3.0.0) +Requires-Dist: splunk-sdk (>=1.6,<2.0) +Requires-Dist: splunktalib (>=2,<3) +Requires-Dist: splunktaucclib (>=5,<6) diff --git a/Splunk_TA_paloalto/bin/splunk_ta_paloalto/aob_py3/cloudconnectlib-3.0.1b1.dist-info/RECORD b/Splunk_TA_paloalto/bin/splunk_ta_paloalto/aob_py3/cloudconnectlib-3.0.1b1.dist-info/RECORD new file mode 100644 index 00000000..b408d791 --- /dev/null +++ b/Splunk_TA_paloalto/bin/splunk_ta_paloalto/aob_py3/cloudconnectlib-3.0.1b1.dist-info/RECORD @@ -0,0 +1,47 @@ +cloudconnectlib-3.0.1b1.dist-info/INSTALLER,sha256=zuuue4knoyJ-UwPPXg8fezS7VCrXJQrAP7zeNuwvFQg,4 +cloudconnectlib-3.0.1b1.dist-info/METADATA,sha256=zIfypa7ib0nX4_BDQQuIEiF6dQ3zb-Q2nLW5z8sjrwM,1208 +cloudconnectlib-3.0.1b1.dist-info/RECORD,, +cloudconnectlib-3.0.1b1.dist-info/REQUESTED,sha256=47DEQpj8HBSa-_TImW-5JCeuQeRkm5NMpJWZG3hSuFU,0 +cloudconnectlib-3.0.1b1.dist-info/WHEEL,sha256=V7iVckP-GYreevsTDnv1eAinQt_aArwnAxmnP0gygBY,83 +cloudconnectlib/__init__.py,sha256=LX0oPKTm3RdEe2ugCrqBoMIS9iQIS9lcBZ0hUqRObKY,773 +cloudconnectlib/client.py,sha256=KVdsm08ROyYSt0yFUcFrtxbsgEE7vzcpWleAtDSarSk,3180 +cloudconnectlib/common/__init__.py,sha256=TB1W22zY5u1d91XSos8Qa2V5NMCpgLTgIiV2wT4hx3Y,575 +cloudconnectlib/common/lib_util.py,sha256=4XEHpcFs2oOUylqa8KQp_90UQAXMCHIPz7VrwBjzsII,2115 +cloudconnectlib/common/log.py,sha256=3SF9F4IDjcpYaFrrMxgNNI4Yjja1AAFgtVzFGahMUpU,1896 +cloudconnectlib/common/util.py,sha256=cjb2ELvt4dxnCynxQxdvsLsMwBw7m22aT1NKRTRlccA,2058 +cloudconnectlib/configuration/__init__.py,sha256=vDmcGJjnxxkHIICiIOeK0FSJK_hxl3SJGHwdCDbYzMc,617 +cloudconnectlib/configuration/loader.py,sha256=ETALdhUdyOtafdnWQ7vkSRLsTOlzeXrCFYKJkGO9a4o,10699 +cloudconnectlib/configuration/schema_1_0_0.json,sha256=WKWbZJAyWu-rt-OLwvNoMo_UqBi3iwxHbIcRy0aSRy0,11645 +cloudconnectlib/core/__init__.py,sha256=02DdxhQvHpl-ncbu3B3GY1qvoWVHhhpSyYP4Gez9bxU,665 +cloudconnectlib/core/cacerts/ca_certs_locater.py,sha256=PtvX2_8dERSUnANIOTISBT8-QSyf6SOdDSFHH6Ls-og,4889 +cloudconnectlib/core/checkpoint.py,sha256=n45i3EPS8CNkJDIzmTQT0BNpmY8Fn6UNBdk76Wyacpo,1918 +cloudconnectlib/core/defaults.py,sha256=93J1wkfTVW4qVA0GjtDMGtPRhr8Uhd2_WZCdei5eOJI,1288 +cloudconnectlib/core/engine.py,sha256=nChmGutqzx0sfmd5-YaOfDNDXeujgeD5miSFTGNuV5U,10884 +cloudconnectlib/core/engine_v2.py,sha256=vXIhijoWY4p-tNYussNBvwFkIjXCaytdntf2yD_1jFg,4985 +cloudconnectlib/core/exceptions.py,sha256=GLDNVKann_M19kadClH7PLbcgDXaXRRowYUM4c7dWkY,1326 +cloudconnectlib/core/ext.py,sha256=CzhzOz8CFGKG4hbBci4vwmGBl3zI2qQDsr4AA9U_I9g,13104 +cloudconnectlib/core/http.py,sha256=zb7aJ4O2Vmkonqg-9azKcq86bxLwQHRpszHvK1nwRr8,10632 +cloudconnectlib/core/job.py,sha256=FRUs9KQeF-tFFIlBgkdBRNr0uDBvVsY19xpzhVTNtz4,4880 +cloudconnectlib/core/models.py,sha256=bPEMXKCv_UxsWGOvr-ypCQr_ie6DQaQrrzqWBPlmmE8,10190 +cloudconnectlib/core/pipemgr.py,sha256=KHnwa0O1rpnpFAgRS4Xgq5t1pU2Llc7xk6yJDtbd19s,1037 +cloudconnectlib/core/plugin.py,sha256=mI1u7OOfcB04stG_sk5ANUT-KJc8MgINk4RgTIQqVKU,3667 +cloudconnectlib/core/task.py,sha256=YL71H8B6SPoGkhOrodMoQpOwDSp8vIOztp3sGmFhDYs,18687 +cloudconnectlib/core/template.py,sha256=ROHW-eR7FKeyht5_cDlHZt-3f6VM4y4DfCe_FnjEWuo,1152 +cloudconnectlib/splunktacollectorlib/__init__.py,sha256=TB1W22zY5u1d91XSos8Qa2V5NMCpgLTgIiV2wT4hx3Y,575 +cloudconnectlib/splunktacollectorlib/cloud_connect_mod_input.py,sha256=EWttmxmSVLNrwleV65Xf_v-ts6R-Gk3nSYWWOQ7xgFA,2990 +cloudconnectlib/splunktacollectorlib/common/__init__.py,sha256=J2HhI2PL-5WtAl4FYqQBbV7ru86w7FsmVXyFJz9xPlc,1625 +cloudconnectlib/splunktacollectorlib/common/log.py,sha256=SaiYUrl7UmzfqxhmefR-eKSfJ5JHfm_9mN5JwgVd2WU,1974 +cloudconnectlib/splunktacollectorlib/common/rwlock.py,sha256=pBLwQ9u4jcz4K419XW7Lp21zj5QyVpVnXmzZERdEglk,2119 +cloudconnectlib/splunktacollectorlib/common/schema_meta.py,sha256=p3MMcXfrLWkCgfdg_vnPrHIyrAJaAqkDX0gDA1NV1Vk,803 +cloudconnectlib/splunktacollectorlib/config.py,sha256=naUrLtDgMIFQyVyfsqFAtQJIMKL8oQ7MxjkbuLSsplQ,14724 +cloudconnectlib/splunktacollectorlib/data_collection/__init__.py,sha256=OSP5_M7Nh4SvlJT246Qfn-LRRhsVzhhY948E7ffHAKo,596 +cloudconnectlib/splunktacollectorlib/data_collection/ta_checkpoint_manager.py,sha256=erJ4KwUAsCuqOMOfW7Tkns5uI3Ksoe8hSGGDKVw2opc,6025 +cloudconnectlib/splunktacollectorlib/data_collection/ta_config.py,sha256=NKncmmObdJl-Nmd5DOWNS7DBgng3uiV2Xg7UCD1Z8jg,6859 +cloudconnectlib/splunktacollectorlib/data_collection/ta_consts.py,sha256=bvwnVB-1PJLK_FaLiaGpycpzQf4AmRujNvwy28OwQkI,1897 +cloudconnectlib/splunktacollectorlib/data_collection/ta_data_client.py,sha256=209EqJwpeymsAVS7GE2ZwFyPVf1KU9M7yS8VlItaLcA,3345 +cloudconnectlib/splunktacollectorlib/data_collection/ta_data_collector.py,sha256=EI3CPiNWfMFJ8j-Hcq0_9rXE-_WndYVXEOVRdVH6L0Y,5967 +cloudconnectlib/splunktacollectorlib/data_collection/ta_data_loader.py,sha256=RrY2LqAXmqkDtWzYgja4o0ADEBzCkDZy0NV0OYXAQpE,5714 +cloudconnectlib/splunktacollectorlib/data_collection/ta_helper.py,sha256=KMFAPwE8liOPW4sUancUiYJV6wNlbIQ77Uf--EmJLW0,5924 +cloudconnectlib/splunktacollectorlib/data_collection/ta_mod_input.py,sha256=qV530k2jo1mzzolb7-EJcwx2VBq8asZwzWzUlMUFvpA,8474 +cloudconnectlib/splunktacollectorlib/splunk_ta_import_declare.py,sha256=LR4encL187WnyNPp9vtVd6PAZxMUjNCriOEwpN2gR40,1133 +cloudconnectlib/splunktacollectorlib/ta_cloud_connect_client.py,sha256=PgNr7HAeRz8l-B2HnBCHMdv_qAkXRflXAqkn9aTaXqo,2084 diff --git a/Splunk_TA_paloalto/bin/splunk_ta_paloalto/aob_py3/cloudconnectlib-3.0.1b1.dist-info/REQUESTED b/Splunk_TA_paloalto/bin/splunk_ta_paloalto/aob_py3/cloudconnectlib-3.0.1b1.dist-info/REQUESTED new file mode 100644 index 00000000..e69de29b diff --git a/Splunk_TA_paloalto/bin/splunk_ta_paloalto/aob_py3/cloudconnectlib-3.0.1b1.dist-info/WHEEL b/Splunk_TA_paloalto/bin/splunk_ta_paloalto/aob_py3/cloudconnectlib-3.0.1b1.dist-info/WHEEL new file mode 100644 index 00000000..862df1fb --- /dev/null +++ b/Splunk_TA_paloalto/bin/splunk_ta_paloalto/aob_py3/cloudconnectlib-3.0.1b1.dist-info/WHEEL @@ -0,0 +1,4 @@ +Wheel-Version: 1.0 +Generator: poetry 1.0.3 +Root-Is-Purelib: true +Tag: py3-none-any diff --git a/Splunk_TA_paloalto/bin/splunk_ta_paloalto/aob_py3/cloudconnectlib-3.1.3.dist-info/INSTALLER b/Splunk_TA_paloalto/bin/splunk_ta_paloalto/aob_py3/cloudconnectlib-3.1.3.dist-info/INSTALLER new file mode 100644 index 00000000..a1b589e3 --- /dev/null +++ b/Splunk_TA_paloalto/bin/splunk_ta_paloalto/aob_py3/cloudconnectlib-3.1.3.dist-info/INSTALLER @@ -0,0 +1 @@ +pip diff --git a/Splunk_TA_paloalto/bin/splunk_ta_paloalto/aob_py3/cloudconnectlib-3.1.3.dist-info/LICENSE b/Splunk_TA_paloalto/bin/splunk_ta_paloalto/aob_py3/cloudconnectlib-3.1.3.dist-info/LICENSE new file mode 100644 index 00000000..d13065d5 --- /dev/null +++ b/Splunk_TA_paloalto/bin/splunk_ta_paloalto/aob_py3/cloudconnectlib-3.1.3.dist-info/LICENSE @@ -0,0 +1,201 @@ + Apache License + Version 2.0, January 2004 + http://www.apache.org/licenses/ + + TERMS AND CONDITIONS FOR USE, REPRODUCTION, AND DISTRIBUTION + + 1. Definitions. + + "License" shall mean the terms and conditions for use, reproduction, + and distribution as defined by Sections 1 through 9 of this document. + + "Licensor" shall mean the copyright owner or entity authorized by + the copyright owner that is granting the License. + + "Legal Entity" shall mean the union of the acting entity and all + other entities that control, are controlled by, or are under common + control with that entity. For the purposes of this definition, + "control" means (i) the power, direct or indirect, to cause the + direction or management of such entity, whether by contract or + otherwise, or (ii) ownership of fifty percent (50%) or more of the + outstanding shares, or (iii) beneficial ownership of such entity. + + "You" (or "Your") shall mean an individual or Legal Entity + exercising permissions granted by this License. + + "Source" form shall mean the preferred form for making modifications, + including but not limited to software source code, documentation + source, and configuration files. + + "Object" form shall mean any form resulting from mechanical + transformation or translation of a Source form, including but + not limited to compiled object code, generated documentation, + and conversions to other media types. + + "Work" shall mean the work of authorship, whether in Source or + Object form, made available under the License, as indicated by a + copyright notice that is included in or attached to the work + (an example is provided in the Appendix below). + + "Derivative Works" shall mean any work, whether in Source or Object + form, that is based on (or derived from) the Work and for which the + editorial revisions, annotations, elaborations, or other modifications + represent, as a whole, an original work of authorship. For the purposes + of this License, Derivative Works shall not include works that remain + separable from, or merely link (or bind by name) to the interfaces of, + the Work and Derivative Works thereof. + + "Contribution" shall mean any work of authorship, including + the original version of the Work and any modifications or additions + to that Work or Derivative Works thereof, that is intentionally + submitted to Licensor for inclusion in the Work by the copyright owner + or by an individual or Legal Entity authorized to submit on behalf of + the copyright owner. For the purposes of this definition, "submitted" + means any form of electronic, verbal, or written communication sent + to the Licensor or its representatives, including but not limited to + communication on electronic mailing lists, source code control systems, + and issue tracking systems that are managed by, or on behalf of, the + Licensor for the purpose of discussing and improving the Work, but + excluding communication that is conspicuously marked or otherwise + designated in writing by the copyright owner as "Not a Contribution." + + "Contributor" shall mean Licensor and any individual or Legal Entity + on behalf of whom a Contribution has been received by Licensor and + subsequently incorporated within the Work. + + 2. Grant of Copyright License. Subject to the terms and conditions of + this License, each Contributor hereby grants to You a perpetual, + worldwide, non-exclusive, no-charge, royalty-free, irrevocable + copyright license to reproduce, prepare Derivative Works of, + publicly display, publicly perform, sublicense, and distribute the + Work and such Derivative Works in Source or Object form. + + 3. Grant of Patent License. Subject to the terms and conditions of + this License, each Contributor hereby grants to You a perpetual, + worldwide, non-exclusive, no-charge, royalty-free, irrevocable + (except as stated in this section) patent license to make, have made, + use, offer to sell, sell, import, and otherwise transfer the Work, + where such license applies only to those patent claims licensable + by such Contributor that are necessarily infringed by their + Contribution(s) alone or by combination of their Contribution(s) + with the Work to which such Contribution(s) was submitted. If You + institute patent litigation against any entity (including a + cross-claim or counterclaim in a lawsuit) alleging that the Work + or a Contribution incorporated within the Work constitutes direct + or contributory patent infringement, then any patent licenses + granted to You under this License for that Work shall terminate + as of the date such litigation is filed. + + 4. Redistribution. You may reproduce and distribute copies of the + Work or Derivative Works thereof in any medium, with or without + modifications, and in Source or Object form, provided that You + meet the following conditions: + + (a) You must give any other recipients of the Work or + Derivative Works a copy of this License; and + + (b) You must cause any modified files to carry prominent notices + stating that You changed the files; and + + (c) You must retain, in the Source form of any Derivative Works + that You distribute, all copyright, patent, trademark, and + attribution notices from the Source form of the Work, + excluding those notices that do not pertain to any part of + the Derivative Works; and + + (d) If the Work includes a "NOTICE" text file as part of its + distribution, then any Derivative Works that You distribute must + include a readable copy of the attribution notices contained + within such NOTICE file, excluding those notices that do not + pertain to any part of the Derivative Works, in at least one + of the following places: within a NOTICE text file distributed + as part of the Derivative Works; within the Source form or + documentation, if provided along with the Derivative Works; or, + within a display generated by the Derivative Works, if and + wherever such third-party notices normally appear. The contents + of the NOTICE file are for informational purposes only and + do not modify the License. You may add Your own attribution + notices within Derivative Works that You distribute, alongside + or as an addendum to the NOTICE text from the Work, provided + that such additional attribution notices cannot be construed + as modifying the License. + + You may add Your own copyright statement to Your modifications and + may provide additional or different license terms and conditions + for use, reproduction, or distribution of Your modifications, or + for any such Derivative Works as a whole, provided Your use, + reproduction, and distribution of the Work otherwise complies with + the conditions stated in this License. + + 5. Submission of Contributions. Unless You explicitly state otherwise, + any Contribution intentionally submitted for inclusion in the Work + by You to the Licensor shall be under the terms and conditions of + this License, without any additional terms or conditions. + Notwithstanding the above, nothing herein shall supersede or modify + the terms of any separate license agreement you may have executed + with Licensor regarding such Contributions. + + 6. Trademarks. This License does not grant permission to use the trade + names, trademarks, service marks, or product names of the Licensor, + except as required for reasonable and customary use in describing the + origin of the Work and reproducing the content of the NOTICE file. + + 7. Disclaimer of Warranty. Unless required by applicable law or + agreed to in writing, Licensor provides the Work (and each + Contributor provides its Contributions) on an "AS IS" BASIS, + WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or + implied, including, without limitation, any warranties or conditions + of TITLE, NON-INFRINGEMENT, MERCHANTABILITY, or FITNESS FOR A + PARTICULAR PURPOSE. You are solely responsible for determining the + appropriateness of using or redistributing the Work and assume any + risks associated with Your exercise of permissions under this License. + + 8. Limitation of Liability. In no event and under no legal theory, + whether in tort (including negligence), contract, or otherwise, + unless required by applicable law (such as deliberate and grossly + negligent acts) or agreed to in writing, shall any Contributor be + liable to You for damages, including any direct, indirect, special, + incidental, or consequential damages of any character arising as a + result of this License or out of the use or inability to use the + Work (including but not limited to damages for loss of goodwill, + work stoppage, computer failure or malfunction, or any and all + other commercial damages or losses), even if such Contributor + has been advised of the possibility of such damages. + + 9. Accepting Warranty or Additional Liability. While redistributing + the Work or Derivative Works thereof, You may choose to offer, + and charge a fee for, acceptance of support, warranty, indemnity, + or other liability obligations and/or rights consistent with this + License. However, in accepting such obligations, You may act only + on Your own behalf and on Your sole responsibility, not on behalf + of any other Contributor, and only if You agree to indemnify, + defend, and hold each Contributor harmless for any liability + incurred by, or claims asserted against, such Contributor by reason + of your accepting any such warranty or additional liability. + + END OF TERMS AND CONDITIONS + + APPENDIX: How to apply the Apache License to your work. + + To apply the Apache License to your work, attach the following + boilerplate notice, with the fields enclosed by brackets "{}" + replaced with your own identifying information. (Don't include + the brackets!) The text should be enclosed in the appropriate + comment syntax for the file format. We also recommend that a + file or class name and description of purpose be included on the + same "printed page" as the copyright notice for easier + identification within third-party archives. + + Copyright 2021 Splunk Inc. + + Licensed under the Apache License, Version 2.0 (the "License"); + you may not use this file except in compliance with the License. + You may obtain a copy of the License at + + http://www.apache.org/licenses/LICENSE-2.0 + + Unless required by applicable law or agreed to in writing, software + distributed under the License is distributed on an "AS IS" BASIS, + WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. + See the License for the specific language governing permissions and + limitations under the License. diff --git a/Splunk_TA_paloalto/bin/splunk_ta_paloalto/aob_py3/cloudconnectlib-3.1.3.dist-info/METADATA b/Splunk_TA_paloalto/bin/splunk_ta_paloalto/aob_py3/cloudconnectlib-3.1.3.dist-info/METADATA new file mode 100644 index 00000000..21ffdb5a --- /dev/null +++ b/Splunk_TA_paloalto/bin/splunk_ta_paloalto/aob_py3/cloudconnectlib-3.1.3.dist-info/METADATA @@ -0,0 +1,30 @@ +Metadata-Version: 2.1 +Name: cloudconnectlib +Version: 3.1.3 +Summary: APP Cloud Connect +License: Apache-2.0 +Author: Addon Factory template +Author-email: addonfactory@splunk.com +Requires-Python: >=3.7,<4.0 +Classifier: Intended Audience :: Developers +Classifier: License :: OSI Approved :: Apache Software License +Classifier: Operating System :: OS Independent +Classifier: Programming Language :: Python +Classifier: Programming Language :: Python :: 3 +Classifier: Programming Language :: Python :: 3.7 +Classifier: Programming Language :: Python :: 3.8 +Classifier: Programming Language :: Python :: 3.9 +Classifier: Programming Language :: Python :: 3.10 +Classifier: Programming Language :: Python :: 3.11 +Classifier: Programming Language :: Python :: 3.7 +Requires-Dist: PySocks (>=1.7.1,<2.0.0) +Requires-Dist: decorator (==5.1.1) +Requires-Dist: jinja2 (>=2.10.1,<4.0.0) +Requires-Dist: jsonpath-ng (>=1.5.2,<2.0.0) +Requires-Dist: jsonschema (>=4.4.0,<5.0.0) +Requires-Dist: munch (>=2.3.2,<3.0.0) +Requires-Dist: requests (>=2.27.1,<3.0.0) +Requires-Dist: solnlib (>=4.6.0,<5.0.0) +Requires-Dist: splunk-sdk (>=1.6,<2.0) +Requires-Dist: splunktalib (==3.0.0) +Requires-Dist: splunktaucclib (==6.0.0) diff --git a/Splunk_TA_paloalto/bin/splunk_ta_paloalto/aob_py3/cloudconnectlib-3.1.3.dist-info/RECORD b/Splunk_TA_paloalto/bin/splunk_ta_paloalto/aob_py3/cloudconnectlib-3.1.3.dist-info/RECORD new file mode 100644 index 00000000..ffb21148 --- /dev/null +++ b/Splunk_TA_paloalto/bin/splunk_ta_paloalto/aob_py3/cloudconnectlib-3.1.3.dist-info/RECORD @@ -0,0 +1,47 @@ +cloudconnectlib-3.1.3.dist-info/INSTALLER,sha256=zuuue4knoyJ-UwPPXg8fezS7VCrXJQrAP7zeNuwvFQg,4 +cloudconnectlib-3.1.3.dist-info/LICENSE,sha256=Xvvd894DEl8lUHPEeFU-Ya18RW7Hc2IlPTZS_bE3Hgs,11341 +cloudconnectlib-3.1.3.dist-info/METADATA,sha256=b8QBy1ykc1bv6IlFv5cszhuuR6nqNCXOsr61YwA07k4,1188 +cloudconnectlib-3.1.3.dist-info/RECORD,, +cloudconnectlib-3.1.3.dist-info/REQUESTED,sha256=47DEQpj8HBSa-_TImW-5JCeuQeRkm5NMpJWZG3hSuFU,0 +cloudconnectlib-3.1.3.dist-info/WHEEL,sha256=vVCvjcmxuUltf8cYhJ0sJMRDLr1XsPuxEId8YDzbyCY,88 +cloudconnectlib/__init__.py,sha256=KPOnKSsmH-eAGJwZ3LQey4CMmJiCQOLsf8xvKjSiiUE,628 +cloudconnectlib/client.py,sha256=bCb28NRHRmDBkFLLa0eYK-CUsH8m4oU9jR00hQMShsc,3124 +cloudconnectlib/common/__init__.py,sha256=TB1W22zY5u1d91XSos8Qa2V5NMCpgLTgIiV2wT4hx3Y,575 +cloudconnectlib/common/lib_util.py,sha256=GqAdYWwzvpOXnbjY__ioyDILwhZC2LeOaHiKcl0s7I4,1669 +cloudconnectlib/common/log.py,sha256=HJ10AfdwC-hMYkCYkhW0W27FYScGavxMbu3mCUFfv_I,1788 +cloudconnectlib/common/util.py,sha256=p3tTSMAOAEkeT0WmUFNWgXTYspL_vvxsoPCHWX-jWck,1942 +cloudconnectlib/configuration/__init__.py,sha256=vDmcGJjnxxkHIICiIOeK0FSJK_hxl3SJGHwdCDbYzMc,617 +cloudconnectlib/configuration/loader.py,sha256=jgzTkjp-HsHRWdKYRR4dRcGcLa3Fv43sZAQgh-iSnro,10605 +cloudconnectlib/configuration/schema_1_0_0.json,sha256=WKWbZJAyWu-rt-OLwvNoMo_UqBi3iwxHbIcRy0aSRy0,11645 +cloudconnectlib/core/__init__.py,sha256=02DdxhQvHpl-ncbu3B3GY1qvoWVHhhpSyYP4Gez9bxU,665 +cloudconnectlib/core/checkpoint.py,sha256=YkpN8pUFb8Z2iScivQHHW9HN9RqlX7FuqqpqNcGfFK8,1816 +cloudconnectlib/core/defaults.py,sha256=VRwIIKxLGkDQiLB7c776y2rHsb0AmMi3F-hmzJKVdUw,1317 +cloudconnectlib/core/engine.py,sha256=Lt0RQR28YFa_X1cEpOH_LpT3hUcIzGe2lT7tHnw3KGA,10924 +cloudconnectlib/core/engine_v2.py,sha256=FTGYnoVSFUsuvuOh_0AwpsDRN4hxAGwYqdUqSQcUmok,5217 +cloudconnectlib/core/exceptions.py,sha256=pv35SgTgVc6ohLp7sopGYPeIMxcL8N1lWH0Tb6n6_wE,1314 +cloudconnectlib/core/ext.py,sha256=mn_ddquSQ6-6G7bKeSqeg5MYfjRWT0yuxpDY7eDjEVQ,12277 +cloudconnectlib/core/http.py,sha256=vLOnciuoEGt45Ygkrll56KxiSN2DX3tTiKVbiUpbdKw,10992 +cloudconnectlib/core/job.py,sha256=77nHgtyiAZBSvrQiEenb2oiIJ1tbzb3PCvL9AuzrtUc,4873 +cloudconnectlib/core/models.py,sha256=wYrknbeNN6861asiRCKR_5qMpAvHIaDvKE9bFzpW2cU,10002 +cloudconnectlib/core/pipemgr.py,sha256=KAOPjL6ktPQfo1f7pepsiVEOGzN6ut2SjvbyOIZRCIY,929 +cloudconnectlib/core/plugin.py,sha256=GNyOhpT8-HhgAMqG26D5pgE0WcFTkD7QJFUlLEDY6Pc,3592 +cloudconnectlib/core/task.py,sha256=HahqiR6WaS8A4AMBHv4Tss-CNq5HC0Z07l9XJHdqm_A,20996 +cloudconnectlib/core/template.py,sha256=lrnl-xVai45ngNMNg4nfOYYITs9mk16j5nEB9tR3ZpY,1153 +cloudconnectlib/splunktacollectorlib/__init__.py,sha256=TB1W22zY5u1d91XSos8Qa2V5NMCpgLTgIiV2wT4hx3Y,575 +cloudconnectlib/splunktacollectorlib/cloud_connect_mod_input.py,sha256=5tKhEfXcG3OBaZPwFq9d2nuhVkUbjmJdAAg6H54O81A,2915 +cloudconnectlib/splunktacollectorlib/common/__init__.py,sha256=zUQIoODTieWPOAgrAKU0dP1SAS_c6IF1l1LEXO5LvfU,1624 +cloudconnectlib/splunktacollectorlib/common/log.py,sha256=Feu868c98qJYDG2XRZ36aPlm8mLwklVshBFp-2p_W88,1919 +cloudconnectlib/splunktacollectorlib/common/rwlock.py,sha256=PftXe5_OKBCHqueVBzSaAZteSbbJnRypqglxuitpTgo,2065 +cloudconnectlib/splunktacollectorlib/common/schema_meta.py,sha256=k3oKBoU7nvBXIJB5lWvIdykKPpvBV29zyvayd2bv2OQ,803 +cloudconnectlib/splunktacollectorlib/config.py,sha256=QfheX8Z_8HTlSYlqxRlPpLfgdGLMJNL0HZ_6pxdGC4c,14530 +cloudconnectlib/splunktacollectorlib/data_collection/__init__.py,sha256=KD_6X3x7TBHyyDAygJI-_evQkuvkDFtuHHxioYKC9C0,597 +cloudconnectlib/splunktacollectorlib/data_collection/ta_checkpoint_manager.py,sha256=V-QOELo14sNtv48MD_KceEaBhWwBwYdSQ2n2FE2Exs8,6324 +cloudconnectlib/splunktacollectorlib/data_collection/ta_config.py,sha256=iQnPwlcNhW0WzdCDu3TPH8bqSZlMIRrhpHl6D38hbtU,6880 +cloudconnectlib/splunktacollectorlib/data_collection/ta_consts.py,sha256=9ns0y9TOzOzFIfTz0AVHS2Dw66bD1eeFXoH5xGb4-BQ,1897 +cloudconnectlib/splunktacollectorlib/data_collection/ta_data_client.py,sha256=EVzIU1-mYqZ8kg31A_bHD0TVyd_4N7D2T5zWngPnBXk,2876 +cloudconnectlib/splunktacollectorlib/data_collection/ta_data_collector.py,sha256=BhCoFr39FWUDqo6itfcPQy8F9QGXqBV_zzp2KVpAUgA,5701 +cloudconnectlib/splunktacollectorlib/data_collection/ta_data_loader.py,sha256=5LCshOxw4I5esnSwZY-GSEP2Qfo7WwKc_7yU0x7Uyjk,5503 +cloudconnectlib/splunktacollectorlib/data_collection/ta_helper.py,sha256=khCUCJE4XjzXmqYmouyieN6uuFIhDD0D5j87U0ii2lY,5514 +cloudconnectlib/splunktacollectorlib/data_collection/ta_mod_input.py,sha256=W4Wr2aSA1aBuuqLZwRiGl9Dhb187dYxX15206QOEVNE,8355 +cloudconnectlib/splunktacollectorlib/splunk_ta_import_declare.py,sha256=DH8W8j85sDqpVBSoT7uxlp3aqp_LMcG8pyPTblgrPnU,1132 +cloudconnectlib/splunktacollectorlib/ta_cloud_connect_client.py,sha256=88pz8BNSgID2Z_6tmUeLU8wEaZ46GxUHfLi_gEjSc50,1755 diff --git a/Splunk_TA_paloalto/bin/splunk_ta_paloalto/aob_py3/cloudconnectlib-3.1.3.dist-info/REQUESTED b/Splunk_TA_paloalto/bin/splunk_ta_paloalto/aob_py3/cloudconnectlib-3.1.3.dist-info/REQUESTED new file mode 100644 index 00000000..e69de29b diff --git a/Splunk_TA_paloalto/bin/splunk_ta_paloalto/aob_py3/cloudconnectlib-3.1.3.dist-info/WHEEL b/Splunk_TA_paloalto/bin/splunk_ta_paloalto/aob_py3/cloudconnectlib-3.1.3.dist-info/WHEEL new file mode 100644 index 00000000..4ba76714 --- /dev/null +++ b/Splunk_TA_paloalto/bin/splunk_ta_paloalto/aob_py3/cloudconnectlib-3.1.3.dist-info/WHEEL @@ -0,0 +1,4 @@ +Wheel-Version: 1.0 +Generator: poetry-core 1.4.0 +Root-Is-Purelib: true +Tag: py3-none-any diff --git a/Splunk_TA_paloalto/bin/splunk_ta_paloalto/aob_py3/cloudconnectlib/__init__.py b/Splunk_TA_paloalto/bin/splunk_ta_paloalto/aob_py3/cloudconnectlib/__init__.py index ab2b02fe..5aa568b8 100755 --- a/Splunk_TA_paloalto/bin/splunk_ta_paloalto/aob_py3/cloudconnectlib/__init__.py +++ b/Splunk_TA_paloalto/bin/splunk_ta_paloalto/aob_py3/cloudconnectlib/__init__.py @@ -1,10 +1,20 @@ +# +# Copyright 2021 Splunk Inc. +# +# Licensed under the Apache License, Version 2.0 (the "License"); +# you may not use this file except in compliance with the License. +# You may obtain a copy of the License at +# +# http://www.apache.org/licenses/LICENSE-2.0 +# +# Unless required by applicable law or agreed to in writing, software +# distributed under the License is distributed on an "AS IS" BASIS, +# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. +# See the License for the specific language governing permissions and +# limitations under the License. +# """ -APP Cloud Connect +Cloud Connect library """ -import os -from .common.lib_util import register_cacert_locater - -register_cacert_locater(os.path.join(os.path.dirname(__file__), 'core', 'cacerts')) - -__version__ = '2.0.2' +__version__ = "3.1.3" diff --git a/Splunk_TA_paloalto/bin/splunk_ta_paloalto/aob_py3/cloudconnectlib/client.py b/Splunk_TA_paloalto/bin/splunk_ta_paloalto/aob_py3/cloudconnectlib/client.py index 1807db47..03832143 100755 --- a/Splunk_TA_paloalto/bin/splunk_ta_paloalto/aob_py3/cloudconnectlib/client.py +++ b/Splunk_TA_paloalto/bin/splunk_ta_paloalto/aob_py3/cloudconnectlib/client.py @@ -1,4 +1,18 @@ -from builtins import object +# +# Copyright 2021 Splunk Inc. +# +# Licensed under the Apache License, Version 2.0 (the "License"); +# you may not use this file except in compliance with the License. +# You may obtain a copy of the License at +# +# http://www.apache.org/licenses/LICENSE-2.0 +# +# Unless required by applicable law or agreed to in writing, software +# distributed under the License is distributed on an "AS IS" BASIS, +# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. +# See the License for the specific language governing permissions and +# limitations under the License. +# import copy import os.path import traceback @@ -12,9 +26,8 @@ _logger = get_cc_logger() -class CloudConnectClient(object): - """The client of cloud connect used to start a cloud connect engine instance. - """ +class CloudConnectClient: + """The client of cloud connect used to start a cloud connect engine instance.""" def __init__(self, context, config_file, checkpoint_mgr): """ @@ -37,19 +50,20 @@ def _load_config(self): conf = load_json_file(self._config_file) except: raise ConfigException( - 'Unable to load configuration file %s: %s' + "Unable to load configuration file %s: %s" % (self._config_file, traceback.format_exc()) ) - version = conf.get('meta', {'apiVersion', None}).get('apiVersion', None) + version = conf.get("meta", {"apiVersion", None}).get("apiVersion", None) if not version: raise ConfigException( - 'Config meta or api version not present in {}'.format( - self._config_file)) + f"Config meta or api version not present in {self._config_file}" + ) config_loader, schema_file = get_loader_by_version(version) schema_path = os.path.join( - os.path.dirname(__file__), 'configuration', schema_file) + os.path.dirname(__file__), "configuration", schema_file + ) return config_loader.load(conf, schema_path, self._context) @@ -65,14 +79,13 @@ def start(self): self._engine.start( context=copy.deepcopy(self._context), config=self._config, - checkpoint_mgr=self._checkpoint_mgr + checkpoint_mgr=self._checkpoint_mgr, ) except Exception as ex: - _logger.exception('Error while starting client') + _logger.exception("Error while starting client") raise ex def stop(self): - """Stop the current cloud connect engine. - """ + """Stop the current cloud connect engine.""" if self._engine: self._engine.stop() diff --git a/Splunk_TA_paloalto/bin/splunk_ta_paloalto/aob_py3/cloudconnectlib/common/__init__.py b/Splunk_TA_paloalto/bin/splunk_ta_paloalto/aob_py3/cloudconnectlib/common/__init__.py index e69de29b..72d45097 100755 --- a/Splunk_TA_paloalto/bin/splunk_ta_paloalto/aob_py3/cloudconnectlib/common/__init__.py +++ b/Splunk_TA_paloalto/bin/splunk_ta_paloalto/aob_py3/cloudconnectlib/common/__init__.py @@ -0,0 +1,15 @@ +# +# Copyright 2021 Splunk Inc. +# +# Licensed under the Apache License, Version 2.0 (the "License"); +# you may not use this file except in compliance with the License. +# You may obtain a copy of the License at +# +# http://www.apache.org/licenses/LICENSE-2.0 +# +# Unless required by applicable law or agreed to in writing, software +# distributed under the License is distributed on an "AS IS" BASIS, +# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. +# See the License for the specific language governing permissions and +# limitations under the License. +# diff --git a/Splunk_TA_paloalto/bin/splunk_ta_paloalto/aob_py3/cloudconnectlib/common/lib_util.py b/Splunk_TA_paloalto/bin/splunk_ta_paloalto/aob_py3/cloudconnectlib/common/lib_util.py index 08014a49..8710e540 100755 --- a/Splunk_TA_paloalto/bin/splunk_ta_paloalto/aob_py3/cloudconnectlib/common/lib_util.py +++ b/Splunk_TA_paloalto/bin/splunk_ta_paloalto/aob_py3/cloudconnectlib/common/lib_util.py @@ -1,3 +1,18 @@ +# +# Copyright 2021 Splunk Inc. +# +# Licensed under the Apache License, Version 2.0 (the "License"); +# you may not use this file except in compliance with the License. +# You may obtain a copy of the License at +# +# http://www.apache.org/licenses/LICENSE-2.0 +# +# Unless required by applicable law or agreed to in writing, software +# distributed under the License is distributed on an "AS IS" BASIS, +# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. +# See the License for the specific language governing permissions and +# limitations under the License. +# import os import os.path as op import platform @@ -5,8 +20,6 @@ import __main__ -from ..splunktacollectorlib.common import log as stulog - def get_main_file(): """Return the running mod input file""" @@ -21,34 +34,25 @@ def get_app_root_dir(): def get_mod_input_script_name(): """Return the name of running mod input""" script_name = os.path.basename(get_main_file()) - if script_name.lower().endswith('.py'): + if script_name.lower().endswith(".py"): script_name = script_name[:-3] return script_name def register_module(new_path): - """ register_module(new_path): adds a directory to sys.path. + """register_module(new_path): adds a directory to sys.path. Do nothing if it does not exist or if it's already in sys.path. """ if not os.path.exists(new_path): return new_path = os.path.abspath(new_path) - if platform.system() == 'Windows': + if platform.system() == "Windows": new_path = new_path.lower() for x in sys.path: x = os.path.abspath(x) - if platform.system() == 'Windows': + if platform.system() == "Windows": x = x.lower() if new_path in (x, x + os.sep): return sys.path.insert(0, new_path) - - -def register_cacert_locater(cacerts_locater_path): - for x in sys.modules: - if (x == "httplib2" or x.endswith(".httplib2")) and sys.modules[x] \ - is not None: - stulog.logger.warning("Httplib2 module '{}' is already installed. " - "The ca_certs_locater may not work".format(x)) - register_module(cacerts_locater_path) diff --git a/Splunk_TA_paloalto/bin/splunk_ta_paloalto/aob_py3/cloudconnectlib/common/log.py b/Splunk_TA_paloalto/bin/splunk_ta_paloalto/aob_py3/cloudconnectlib/common/log.py index f310e7d1..50a24622 100755 --- a/Splunk_TA_paloalto/bin/splunk_ta_paloalto/aob_py3/cloudconnectlib/common/log.py +++ b/Splunk_TA_paloalto/bin/splunk_ta_paloalto/aob_py3/cloudconnectlib/common/log.py @@ -1,20 +1,35 @@ +# +# Copyright 2021 Splunk Inc. +# +# Licensed under the Apache License, Version 2.0 (the "License"); +# you may not use this file except in compliance with the License. +# You may obtain a copy of the License at +# +# http://www.apache.org/licenses/LICENSE-2.0 +# +# Unless required by applicable law or agreed to in writing, software +# distributed under the License is distributed on an "AS IS" BASIS, +# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. +# See the License for the specific language governing permissions and +# limitations under the License. +# import logging from solnlib.pattern import Singleton + from ..splunktacollectorlib.common import log as stulog from ..splunktacollectorlib.data_collection import ta_helper as th from .lib_util import get_mod_input_script_name -from future.utils import with_metaclass -class CloudClientLogAdapter(with_metaclass(Singleton, logging.LoggerAdapter)): +class CloudClientLogAdapter(logging.LoggerAdapter, metaclass=Singleton): def __init__(self, logger=None, extra=None, prefix=""): - super(CloudClientLogAdapter, self).__init__(logger, extra) + super().__init__(logger, extra) self.cc_prefix = prefix if prefix else "" def process(self, msg, kwargs): - msg = "{} {}".format(self.cc_prefix, msg) - return super(CloudClientLogAdapter, self).process(msg, kwargs) + msg = f"{self.cc_prefix} {msg}" + return super().process(msg, kwargs) def set_level(self, val): self.logger.setLevel(val) @@ -23,20 +38,20 @@ def set_level(self, val): _adapter = CloudClientLogAdapter(stulog.logger) -def set_cc_logger(logger, logger_prefix=''): +def set_cc_logger(logger, logger_prefix=""): global _adapter _adapter.logger = logger - _adapter.cc_prefix = logger_prefix or '' + _adapter.cc_prefix = logger_prefix or "" def get_cc_logger(): return _adapter -def reset_cc_logger(stanza_name, logging_level, logger_prefix=''): +def reset_cc_logger(stanza_name, logging_level, logger_prefix=""): script_name = get_mod_input_script_name() logger_name = script_name + "_" + th.format_name_for_file(stanza_name) stulog.reset_logger(logger_name) stulog.set_log_level(logging_level) set_cc_logger(stulog.logger, logger_prefix) - return get_cc_logger() \ No newline at end of file + return get_cc_logger() diff --git a/Splunk_TA_paloalto/bin/splunk_ta_paloalto/aob_py3/cloudconnectlib/common/util.py b/Splunk_TA_paloalto/bin/splunk_ta_paloalto/aob_py3/cloudconnectlib/common/util.py index 1d1133ea..1e188aeb 100755 --- a/Splunk_TA_paloalto/bin/splunk_ta_paloalto/aob_py3/cloudconnectlib/common/util.py +++ b/Splunk_TA_paloalto/bin/splunk_ta_paloalto/aob_py3/cloudconnectlib/common/util.py @@ -1,5 +1,21 @@ +# +# Copyright 2021 Splunk Inc. +# +# Licensed under the Apache License, Version 2.0 (the "License"); +# you may not use this file except in compliance with the License. +# You may obtain a copy of the License at +# +# http://www.apache.org/licenses/LICENSE-2.0 +# +# Unless required by applicable law or agreed to in writing, software +# distributed under the License is distributed on an "AS IS" BASIS, +# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. +# See the License for the specific language governing permissions and +# limitations under the License. +# import json -from ..splunktalib.common import util + +from solnlib import utils from solnlib.modular_input.event import XMLEvent @@ -8,11 +24,11 @@ def is_valid_bool(val): :param val: value as string. :return: `True` if value can be convert to bool else `False`. """ - return util.is_true(val) or util.is_false(val) + return utils.is_true(val) or utils.is_false(val) def is_true(val): - return util.is_true(val) + return utils.is_true(val) def is_valid_port(port): @@ -32,17 +48,32 @@ def load_json_file(file_path): :param file_path: JSON file path. :return: A `dict` object. """ - with open(file_path, 'r') as file_pointer: + with open(file_path) as file_pointer: return json.load(file_pointer) -def format_events(raw_events, time=None, - index=None, host=None, source=None, sourcetype=None, - stanza=None, unbroken=False, done=False): - return XMLEvent.format_events(XMLEvent(data, time=time, - index=index, host=host, - source=source, - sourcetype=sourcetype, - stanza=stanza, unbroken=unbroken, - done=done) for data in - raw_events) +def format_events( + raw_events, + time=None, + index=None, + host=None, + source=None, + sourcetype=None, + stanza=None, + unbroken=False, + done=False, +): + return XMLEvent.format_events( + XMLEvent( + data, + time=time, + index=index, + host=host, + source=source, + sourcetype=sourcetype, + stanza=stanza, + unbroken=unbroken, + done=done, + ) + for data in raw_events + ) diff --git a/Splunk_TA_paloalto/bin/splunk_ta_paloalto/aob_py3/cloudconnectlib/configuration/__init__.py b/Splunk_TA_paloalto/bin/splunk_ta_paloalto/aob_py3/cloudconnectlib/configuration/__init__.py index 4b1c2b18..65f309eb 100755 --- a/Splunk_TA_paloalto/bin/splunk_ta_paloalto/aob_py3/cloudconnectlib/configuration/__init__.py +++ b/Splunk_TA_paloalto/bin/splunk_ta_paloalto/aob_py3/cloudconnectlib/configuration/__init__.py @@ -1 +1,16 @@ +# +# Copyright 2021 Splunk Inc. +# +# Licensed under the Apache License, Version 2.0 (the "License"); +# you may not use this file except in compliance with the License. +# You may obtain a copy of the License at +# +# http://www.apache.org/licenses/LICENSE-2.0 +# +# Unless required by applicable law or agreed to in writing, software +# distributed under the License is distributed on an "AS IS" BASIS, +# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. +# See the License for the specific language governing permissions and +# limitations under the License. +# from .loader import get_loader_by_version diff --git a/Splunk_TA_paloalto/bin/splunk_ta_paloalto/aob_py3/cloudconnectlib/configuration/loader.py b/Splunk_TA_paloalto/bin/splunk_ta_paloalto/aob_py3/cloudconnectlib/configuration/loader.py index deaed280..c9ac7a6d 100755 --- a/Splunk_TA_paloalto/bin/splunk_ta_paloalto/aob_py3/cloudconnectlib/configuration/loader.py +++ b/Splunk_TA_paloalto/bin/splunk_ta_paloalto/aob_py3/cloudconnectlib/configuration/loader.py @@ -1,59 +1,74 @@ -from builtins import str -from builtins import object +# +# Copyright 2021 Splunk Inc. +# +# Licensed under the Apache License, Version 2.0 (the "License"); +# you may not use this file except in compliance with the License. +# You may obtain a copy of the License at +# +# http://www.apache.org/licenses/LICENSE-2.0 +# +# Unless required by applicable law or agreed to in writing, software +# distributed under the License is distributed on an "AS IS" BASIS, +# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. +# See the License for the specific language governing permissions and +# limitations under the License. +# import logging import re import traceback from abc import abstractmethod -import six -from jsonschema import validate, ValidationError +from jsonschema import ValidationError, validate from munch import munchify + from ..common.log import get_cc_logger -from ..common.util import ( - load_json_file, is_valid_bool, is_valid_port, is_true -) +from ..common.util import is_true, is_valid_bool, is_valid_port, load_json_file from ..core.exceptions import ConfigException from ..core.ext import lookup_method from ..core.models import ( - BasicAuthorization, RequestParams, Processor, - Condition, Task, Checkpoint, IterationMode, - DictToken + BasicAuthorization, + Checkpoint, + Condition, + DictToken, + IterationMode, + Processor, + RequestParams, + Task, ) _logger = get_cc_logger() -_PROXY_TYPES = ['http', 'socks4', 'socks5', 'http_no_tunnel'] -_AUTH_TYPES = { - 'basic_auth': BasicAuthorization -} +_PROXY_TYPES = ["http", "socks4", "socks5", "http_no_tunnel"] +_AUTH_TYPES = {"basic_auth": BasicAuthorization} _LOGGING_LEVELS = { - 'DEBUG': logging.DEBUG, - 'INFO': logging.INFO, - 'WARNING': logging.WARNING, - 'ERROR': logging.ERROR, - 'FATAL': logging.FATAL, - 'CRITICAL': logging.CRITICAL + "DEBUG": logging.DEBUG, + "INFO": logging.INFO, + "WARNING": logging.WARNING, + "ERROR": logging.ERROR, + "FATAL": logging.FATAL, + "CRITICAL": logging.CRITICAL, } # FIXME Make this configurable -_DEFAULT_LOG_LEVEL = 'INFO' +_DEFAULT_LOG_LEVEL = "INFO" -class CloudConnectConfigLoader(object): +class CloudConnectConfigLoader: """The Base cloud connect configuration loader""" @staticmethod def _get_schema_from_file(schema_file): - """ Load JSON based schema definition from schema file path. + """Load JSON based schema definition from schema file path. :return: A `dict` contains schema. """ try: return load_json_file(schema_file) except: raise ConfigException( - 'Cannot load schema from file {}: {}'.format( - schema_file, traceback.format_exc()) + "Cannot load schema from file {}: {}".format( + schema_file, traceback.format_exc() + ) ) @abstractmethod @@ -66,8 +81,7 @@ class CloudConnectConfigLoaderV1(CloudConnectConfigLoader): def _render_from_dict(source, ctx): rendered = DictToken(source).render(ctx) - return dict((k, v.strip() if isinstance(v, six.string_types) else v) - for k, v in rendered.items()) + return {k: v.strip() if isinstance(v, str) else v for k, v in rendered.items()} def _load_proxy(self, candidate, variables): """ @@ -81,15 +95,13 @@ def _load_proxy(self, candidate, variables): proxy = self._render_from_dict(candidate, variables) - enabled = proxy.get('enabled', '0') + enabled = proxy.get("enabled", "0") if not is_valid_bool(enabled): - raise ValueError( - 'Proxy "enabled" expect to be bool type: {}'.format(enabled) - ) + raise ValueError(f'Proxy "enabled" expect to be bool type: {enabled}') - proxy['enabled'] = is_true(enabled) + proxy["enabled"] = is_true(enabled) - host, port = proxy.get('host'), proxy.get('port') + host, port = proxy.get("host"), proxy.get("port") if host or port: if not host: @@ -101,24 +113,23 @@ def _load_proxy(self, candidate, variables): ) # proxy type default to 'http' - proxy_type = proxy.get('type') - proxy_type = proxy_type.lower() if proxy_type else 'http' + proxy_type = proxy.get("type") + proxy_type = proxy_type.lower() if proxy_type else "http" if proxy_type not in _PROXY_TYPES: raise ValueError( 'Proxy "type" expect to be one of [{}]: {}'.format( - ','.join(_PROXY_TYPES), proxy_type) + ",".join(_PROXY_TYPES), proxy_type + ) ) else: - proxy['type'] = proxy_type + proxy["type"] = proxy_type # proxy rdns default to '0' - proxy_rdns = proxy.get('rdns', '0') + proxy_rdns = proxy.get("rdns", "0") if not is_valid_bool(proxy_rdns): - raise ValueError( - 'Proxy "rdns" expect to be bool type: {}'.format(proxy_rdns) - ) + raise ValueError(f'Proxy "rdns" expect to be bool type: {proxy_rdns}') else: - proxy['rdns'] = is_true(proxy_rdns) + proxy["rdns"] = is_true(proxy_rdns) return proxy @@ -133,7 +144,8 @@ def _get_log_level(level_name): _logger.warning( 'The log level "%s" is invalid, set it to default: "%s"', - level_name, _DEFAULT_LOG_LEVEL + level_name, + _DEFAULT_LOG_LEVEL, ) return _LOGGING_LEVELS[_DEFAULT_LOG_LEVEL] @@ -141,7 +153,7 @@ def _get_log_level(level_name): def _load_logging(self, log_setting, variables): logger = self._render_from_dict(log_setting, variables) - logger['level'] = self._get_log_level(logger.get('level')) + logger["level"] = self._get_log_level(logger.get("level")) return logger @@ -153,98 +165,94 @@ def _load_global_setting(self, candidate, variables): :return: A `Munch` object """ candidate = candidate or {} - proxy_setting = self._load_proxy(candidate.get('proxy'), variables) - log_setting = self._load_logging(candidate.get('logging'), variables) + proxy_setting = self._load_proxy(candidate.get("proxy"), variables) + log_setting = self._load_logging(candidate.get("logging"), variables) - return munchify({'proxy': proxy_setting, 'logging': log_setting}) + return munchify({"proxy": proxy_setting, "logging": log_setting}) @staticmethod def _load_authorization(candidate): if candidate is None: return None - auth_type = candidate['type'].lower() + auth_type = candidate["type"].lower() if auth_type not in _AUTH_TYPES: raise ValueError( - 'Auth type expect to be one of [{}]: {}'.format( - ','.join(list(_AUTH_TYPES.keys())), auth_type) + "Auth type expect to be one of [{}]: {}".format( + ",".join(list(_AUTH_TYPES.keys())), auth_type + ) ) - return _AUTH_TYPES[auth_type](candidate['options']) + return _AUTH_TYPES[auth_type](candidate["options"]) def _load_options(self, options): return RequestParams( - auth=self._load_authorization(options.get('auth')), - url=options['url'], - method=options.get('method', 'GET'), - header=options.get('headers', {}), - body=options.get('body', {}) + auth=self._load_authorization(options.get("auth")), + url=options["url"], + method=options.get("method", "GET"), + header=options.get("headers", {}), + body=options.get("body", {}), ) @staticmethod def _validate_method(method): if lookup_method(method) is None: - raise ValueError('Unimplemented method: {}'.format(method)) + raise ValueError(f"Unimplemented method: {method}") def _parse_tasks(self, raw_tasks): tasks = [] for item in raw_tasks: - self._validate_method(item['method']) - tasks.append(Task(item['input'], item['method'], item.get('output'))) + self._validate_method(item["method"]) + tasks.append(Task(item["input"], item["method"], item.get("output"))) return tasks def _parse_conditions(self, raw_conditions): conditions = [] for item in raw_conditions: - self._validate_method(item['method']) - conditions.append(Condition(item['input'], item['method'])) + self._validate_method(item["method"]) + conditions.append(Condition(item["input"], item["method"])) return conditions @staticmethod def _load_checkpoint(checkpoint): if not checkpoint: return None - return Checkpoint( - checkpoint.get('namespace', []), checkpoint['content']) + return Checkpoint(checkpoint.get("namespace", []), checkpoint["content"]) def _load_iteration_mode(self, iteration_mode): - count = iteration_mode.get('iteration_count', '0') + count = iteration_mode.get("iteration_count", "0") try: iteration_count = int(count) except ValueError: - raise ValueError( - '"iteration_count" must be an integer: %s' % count) + raise ValueError('"iteration_count" must be an integer: %s' % count) - stop_conditions = self._parse_conditions( - iteration_mode['stop_conditions']) + stop_conditions = self._parse_conditions(iteration_mode["stop_conditions"]) - return IterationMode(iteration_count=iteration_count, - conditions=stop_conditions) + return IterationMode( + iteration_count=iteration_count, conditions=stop_conditions + ) def _load_processor(self, processor): - skip_conditions = self._parse_conditions( - processor.get('skip_conditions', []) - ) - pipeline = self._parse_tasks(processor.get('pipeline', [])) - return Processor( - skip_conditions=skip_conditions, - pipeline=pipeline - ) + skip_conditions = self._parse_conditions(processor.get("skip_conditions", [])) + pipeline = self._parse_tasks(processor.get("pipeline", [])) + return Processor(skip_conditions=skip_conditions, pipeline=pipeline) def _load_request(self, request): - options = self._load_options(request['request']) - - pre_process = self._load_processor(request.get('pre_process', {})) - post_process = self._load_processor(request['post_process']) - checkpoint = self._load_checkpoint(request.get('checkpoint')) - iteration_mode = self._load_iteration_mode(request['iteration_mode']) - - return munchify({ - 'request': options, - 'pre_process': pre_process, - 'post_process': post_process, - 'checkpoint': checkpoint, - 'iteration_mode': iteration_mode, - }) + options = self._load_options(request["request"]) + + pre_process = self._load_processor(request.get("pre_process", {})) + post_process = self._load_processor(request["post_process"]) + checkpoint = self._load_checkpoint(request.get("checkpoint")) + iteration_mode = self._load_iteration_mode(request["iteration_mode"]) + + return munchify( + { + "request": options, + "pre_process": pre_process, + "post_process": post_process, + "checkpoint": checkpoint, + "iteration_mode": iteration_mode, + } + ) def load(self, definition, schema_file, context): """Load cloud connect configuration from a `dict` and validate @@ -258,35 +266,39 @@ def load(self, definition, schema_file, context): validate(definition, self._get_schema_from_file(schema_file)) except ValidationError: raise ConfigException( - 'Failed to validate interface with schema: {}'.format( - traceback.format_exc())) + "Failed to validate interface with schema: {}".format( + traceback.format_exc() + ) + ) try: global_settings = self._load_global_setting( - definition.get('global_settings'), context + definition.get("global_settings"), context ) - requests = [self._load_request(item) for item in definition['requests']] + requests = [self._load_request(item) for item in definition["requests"]] - return munchify({ - 'meta': munchify(definition['meta']), - 'tokens': definition['tokens'], - 'global_settings': global_settings, - 'requests': requests, - }) + return munchify( + { + "meta": munchify(definition["meta"]), + "tokens": definition["tokens"], + "global_settings": global_settings, + "requests": requests, + } + ) except Exception as ex: - error = 'Unable to load configuration: %s' % str(ex) + error = "Unable to load configuration: %s" % str(ex) _logger.exception(error) raise ConfigException(error) _loader_and_schema_by_version = { - r'1\.0\.0': (CloudConnectConfigLoaderV1, 'schema_1_0_0.json'), + r"1\.0\.0": (CloudConnectConfigLoaderV1, "schema_1_0_0.json"), } def get_loader_by_version(version): - """ Instantiate a configuration loader on basis of a given version. + """Instantiate a configuration loader on basis of a given version. A `ConfigException` will raised if the version is not supported. :param version: Version to lookup config loader. :return: A config loader. @@ -297,7 +309,8 @@ def get_loader_by_version(version): return loader_cls(), schema raise ConfigException( - 'Unsupported schema version {}, current supported' - ' versions should match these regex [{}]'.format(version, ','.join( - _loader_and_schema_by_version)) + "Unsupported schema version {}, current supported" + " versions should match these regex [{}]".format( + version, ",".join(_loader_and_schema_by_version) + ) ) diff --git a/Splunk_TA_paloalto/bin/splunk_ta_paloalto/aob_py3/cloudconnectlib/core/__init__.py b/Splunk_TA_paloalto/bin/splunk_ta_paloalto/aob_py3/cloudconnectlib/core/__init__.py index c2424fb1..3cd87a05 100755 --- a/Splunk_TA_paloalto/bin/splunk_ta_paloalto/aob_py3/cloudconnectlib/core/__init__.py +++ b/Splunk_TA_paloalto/bin/splunk_ta_paloalto/aob_py3/cloudconnectlib/core/__init__.py @@ -1,2 +1,17 @@ +# +# Copyright 2021 Splunk Inc. +# +# Licensed under the Apache License, Version 2.0 (the "License"); +# you may not use this file except in compliance with the License. +# You may obtain a copy of the License at +# +# http://www.apache.org/licenses/LICENSE-2.0 +# +# Unless required by applicable law or agreed to in writing, software +# distributed under the License is distributed on an "AS IS" BASIS, +# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. +# See the License for the specific language governing permissions and +# limitations under the License. +# from .engine import CloudConnectEngine from .exceptions import ConfigException, HTTPError diff --git a/Splunk_TA_paloalto/bin/splunk_ta_paloalto/aob_py3/cloudconnectlib/core/cacerts/ca_certs_locater.py b/Splunk_TA_paloalto/bin/splunk_ta_paloalto/aob_py3/cloudconnectlib/core/cacerts/ca_certs_locater.py index c05f8752..44040b24 100755 --- a/Splunk_TA_paloalto/bin/splunk_ta_paloalto/aob_py3/cloudconnectlib/core/cacerts/ca_certs_locater.py +++ b/Splunk_TA_paloalto/bin/splunk_ta_paloalto/aob_py3/cloudconnectlib/core/cacerts/ca_certs_locater.py @@ -1,3 +1,18 @@ +# +# Copyright 2021 Splunk Inc. +# +# Licensed under the Apache License, Version 2.0 (the "License"); +# you may not use this file except in compliance with the License. +# You may obtain a copy of the License at +# +# http://www.apache.org/licenses/LICENSE-2.0 +# +# Unless required by applicable law or agreed to in writing, software +# distributed under the License is distributed on an "AS IS" BASIS, +# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. +# See the License for the specific language governing permissions and +# limitations under the License. +# """ `ca_certs_locater` is a lib for extending httplib2 to allow system certificate store to be used when verifying SSL certificates, to enable this lib, you should add it to your python import path before diff --git a/Splunk_TA_paloalto/bin/splunk_ta_paloalto/aob_py3/cloudconnectlib/core/checkpoint.py b/Splunk_TA_paloalto/bin/splunk_ta_paloalto/aob_py3/cloudconnectlib/core/checkpoint.py old mode 100755 new mode 100644 index 9312d37b..6e8b8b90 --- a/Splunk_TA_paloalto/bin/splunk_ta_paloalto/aob_py3/cloudconnectlib/core/checkpoint.py +++ b/Splunk_TA_paloalto/bin/splunk_ta_paloalto/aob_py3/cloudconnectlib/core/checkpoint.py @@ -1,6 +1,21 @@ +# +# Copyright 2021 Splunk Inc. +# +# Licensed under the Apache License, Version 2.0 (the "License"); +# you may not use this file except in compliance with the License. +# You may obtain a copy of the License at +# +# http://www.apache.org/licenses/LICENSE-2.0 +# +# Unless required by applicable law or agreed to in writing, software +# distributed under the License is distributed on an "AS IS" BASIS, +# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. +# See the License for the specific language governing permissions and +# limitations under the License. +# import cloudconnectlib.splunktacollectorlib.data_collection.ta_checkpoint_manager as tacm from cloudconnectlib.common.log import get_cc_logger -from cloudconnectlib.core.models import _Token, DictToken +from cloudconnectlib.core.models import DictToken, _Token logger = get_cc_logger() @@ -9,7 +24,7 @@ class CheckpointManagerAdapter(tacm.TACheckPointMgr): """Wrap TACheckPointMgr for custom usage""" def __init__(self, namespaces, content, meta_config, task_config): - super(CheckpointManagerAdapter, self).__init__(meta_config, task_config) + super().__init__(meta_config, task_config) if isinstance(namespaces, (list, tuple)): self.namespaces = (_Token(t) for t in namespaces) else: @@ -21,16 +36,15 @@ def _namespaces_for(self, ctx): def save(self, ctx): """Save checkpoint""" - super(CheckpointManagerAdapter, self).update_ckpt( - ckpt=self.content.render(ctx), - namespaces=self._namespaces_for(ctx) + super().update_ckpt( + ckpt=self.content.render(ctx), namespaces=self._namespaces_for(ctx) ) def load(self, ctx): """Load checkpoint""" namespaces = self._namespaces_for(ctx) - checkpoint = super(CheckpointManagerAdapter, self).get_ckpt(namespaces) + checkpoint = super().get_ckpt(namespaces) if checkpoint is None: - logger.info('No existing checkpoint found') + logger.info("No existing checkpoint found") checkpoint = {} return checkpoint diff --git a/Splunk_TA_paloalto/bin/splunk_ta_paloalto/aob_py3/cloudconnectlib/core/defaults.py b/Splunk_TA_paloalto/bin/splunk_ta_paloalto/aob_py3/cloudconnectlib/core/defaults.py index 84d79897..52710e53 100755 --- a/Splunk_TA_paloalto/bin/splunk_ta_paloalto/aob_py3/cloudconnectlib/core/defaults.py +++ b/Splunk_TA_paloalto/bin/splunk_ta_paloalto/aob_py3/cloudconnectlib/core/defaults.py @@ -1,3 +1,18 @@ +# +# Copyright 2021 Splunk Inc. +# +# Licensed under the Apache License, Version 2.0 (the "License"); +# you may not use this file except in compliance with the License. +# You may obtain a copy of the License at +# +# http://www.apache.org/licenses/LICENSE-2.0 +# +# Unless required by applicable law or agreed to in writing, software +# distributed under the License is distributed on an "AS IS" BASIS, +# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. +# See the License for the specific language governing permissions and +# limitations under the License. +# """Default config for cloud connect""" timeout = 120 # request timeout is two minutes @@ -7,14 +22,29 @@ success_statuses = (200, 201) # statuses be treated as success. # response status which need to retry. -retry_statuses = (429, 500, 501, 502, 503, 504, 505, 506, 507, - 509, 510, 511) +retry_statuses = (429, 500, 501, 502, 503, 504, 505, 506, 507, 509, 510, 511) # response status which need print a warning log. -warning_statuses = (203, 204, 205, 206, 207, 208, 226, - 300, 301, 302, 303, 304, 305, 306, 307, 308) +warning_statuses = ( + 203, + 204, + 205, + 206, + 207, + 208, + 226, + 300, + 301, + 302, + 303, + 304, + 305, + 306, + 307, + 308, +) retries = 3 # Default maximum retry times. max_iteration_count = 100 # maximum iteration loop count -charset = 'utf-8' # Default response charset if not found in response header +charset = "utf-8" # Default response charset if not found in response header diff --git a/Splunk_TA_paloalto/bin/splunk_ta_paloalto/aob_py3/cloudconnectlib/core/engine.py b/Splunk_TA_paloalto/bin/splunk_ta_paloalto/aob_py3/cloudconnectlib/core/engine.py index 1ea24134..41974519 100755 --- a/Splunk_TA_paloalto/bin/splunk_ta_paloalto/aob_py3/cloudconnectlib/core/engine.py +++ b/Splunk_TA_paloalto/bin/splunk_ta_paloalto/aob_py3/cloudconnectlib/core/engine.py @@ -1,17 +1,31 @@ -from builtins import object +# +# Copyright 2021 Splunk Inc. +# +# Licensed under the Apache License, Version 2.0 (the "License"); +# you may not use this file except in compliance with the License. +# You may obtain a copy of the License at +# +# http://www.apache.org/licenses/LICENSE-2.0 +# +# Unless required by applicable law or agreed to in writing, software +# distributed under the License is distributed on an "AS IS" BASIS, +# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. +# See the License for the specific language governing permissions and +# limitations under the License. +# import threading +from ..common.log import get_cc_logger from . import defaults from .exceptions import HTTPError, StopCCEIteration from .http import HttpClient -from ..common.log import get_cc_logger _logger = get_cc_logger() -class CloudConnectEngine(object): +class CloudConnectEngine: """The cloud connect engine to process request instantiated - from user options.""" + from user options.""" def __init__(self): self._stopped = False @@ -23,17 +37,17 @@ def _set_logging(log_setting): def start(self, context, config, checkpoint_mgr): """Start current client instance to execute each request parsed - from config. + from config. """ if not config: - raise ValueError('Config must not be empty') + raise ValueError("Config must not be empty") context = context or {} global_setting = config.global_settings CloudConnectEngine._set_logging(global_setting.logging) - _logger.info('Start to execute requests jobs.') + _logger.info("Start to execute requests jobs.") processed = 0 for request in config.requests: @@ -47,34 +61,33 @@ def start(self, context, config, checkpoint_mgr): job.run() processed += 1 - _logger.info('%s job(s) process finished', processed) + _logger.info("%s job(s) process finished", processed) if self._stopped: - _logger.info( - 'Engine has been stopped, stopping to execute jobs.') + _logger.info("Engine has been stopped, stopping to execute jobs.") break self._stopped = True - _logger.info('Engine executing finished') + _logger.info("Engine executing finished") def stop(self): """Stops engine and running job. Do nothing if engine already been stopped.""" if self._stopped: - _logger.info('Engine already stopped, do nothing.') + _logger.info("Engine already stopped, do nothing.") return - _logger.info('Stopping engine') + _logger.info("Stopping engine") if self._running_job: - _logger.info('Attempting to stop the running job.') + _logger.info("Attempting to stop the running job.") self._running_job.terminate() - _logger.info('Stopping job finished.') + _logger.info("Stopping job finished.") self._stopped = True -class Job(object): +class Job: """Job class represents a single request to send HTTP request until reached it's stop condition. """ @@ -106,27 +119,30 @@ def __init__(self, request, context, checkpoint_mgr, proxy=None): def _get_max_iteration_count(self): mode_max_count = self._iteration_mode.iteration_count default_max_count = defaults.max_iteration_count - return min(default_max_count, mode_max_count) \ - if mode_max_count > 0 else default_max_count + return ( + min(default_max_count, mode_max_count) + if mode_max_count > 0 + else default_max_count + ) def terminate(self, block=True, timeout=30): """Terminate this job, the current thread will blocked util - the job is terminate finished if block is True """ + the job is terminate finished if block is True""" if self.is_stopped(): - _logger.info('Job already been stopped.') + _logger.info("Job already been stopped.") return if self._running_thread == threading.current_thread(): - _logger.warning('Job cannot terminate itself.') + _logger.warning("Job cannot terminate itself.") return - _logger.info('Stopping job') + _logger.info("Stopping job") self._should_stop = True if not block: return if not self._terminated.wait(timeout): - _logger.warning('Terminating job timeout.') + _logger.warning("Terminating job timeout.") def _set_context(self, key, value): self._context[key] = value @@ -146,12 +162,11 @@ def _on_pre_process(self): pre_processor = self._request.pre_process if pre_processor.should_skipped(self._context): - _logger.info('Skip pre process condition satisfied, do nothing') + _logger.info("Skip pre process condition satisfied, do nothing") return tasks = pre_processor.pipeline - _logger.debug( - 'Got %s tasks need be executed before process', len(tasks)) + _logger.debug("Got %s tasks need be executed before process", len(tasks)) self._execute_tasks(tasks) def _on_post_process(self): @@ -161,21 +176,18 @@ def _on_post_process(self): post_processor = self._request.post_process if post_processor.should_skipped(self._context): - _logger.info('Skip post process condition satisfied, ' - 'do nothing') + _logger.info("Skip post process condition satisfied, " "do nothing") return tasks = post_processor.pipeline - _logger.debug( - 'Got %s tasks need to be executed after process', len(tasks) - ) + _logger.debug("Got %s tasks need to be executed after process", len(tasks)) self._execute_tasks(tasks) def _update_checkpoint(self): """Updates checkpoint based on checkpoint namespace and content.""" checkpoint = self._request.checkpoint if not checkpoint: - _logger.info('Checkpoint not specified, do not update it.') + _logger.info("Checkpoint not specified, do not update it.") return self._checkpoint_mgr.update_ckpt( @@ -186,7 +198,7 @@ def _update_checkpoint(self): def _get_checkpoint(self): checkpoint = self._request.checkpoint if not checkpoint: - _logger.info('Checkpoint not specified, do not read it.') + _logger.info("Checkpoint not specified, do not read it.") return namespaces = checkpoint.normalize_namespace(self._context) @@ -198,14 +210,15 @@ def _is_stoppable(self): """Check if repeat mode conditions satisfied.""" if self._request_iterated_count >= self._max_iteration_count: _logger.info( - 'Job iteration count is %s, current request count is %s,' - ' stop condition satisfied.', - self._max_iteration_count, self._request_iterated_count + "Job iteration count is %s, current request count is %s," + " stop condition satisfied.", + self._max_iteration_count, + self._request_iterated_count, ) return True if self._iteration_mode.passed(self._context): - _logger.info('Job stop condition satisfied.') + _logger.info("Job stop condition satisfied.") return True return False @@ -215,25 +228,25 @@ def is_stopped(self): return self._stopped def run(self): - """Start job and exit util meet stop condition. """ - _logger.info('Start to process job') + """Start job and exit util meet stop condition.""" + _logger.info("Start to process job") self._stopped = False try: self._running_thread = threading.current_thread() self._run() except Exception: - _logger.exception('Error encountered while running job.') + _logger.exception("Error encountered while running job.") raise finally: self._terminated.set() self._stopped = True - _logger.info('Job processing finished') + _logger.info("Job processing finished") def _check_should_stop(self): if self._should_stop: - _logger.info('Job should been stopped.') + _logger.info("Job should been stopped.") return self._should_stop def _run(self): @@ -248,7 +261,9 @@ def _run(self): try: self._on_pre_process() except StopCCEIteration: - _logger.info('Stop iteration command in pre process is received, exit job now.') + _logger.info( + "Stop iteration command in pre process is received, exit job now." + ) return r = request.render(self._context) @@ -262,18 +277,20 @@ def _run(self): response, need_terminate = self._send_request(r) if need_terminate: - _logger.info('This job need to be terminated.') + _logger.info("This job need to be terminated.") break self._request_iterated_count += 1 - self._set_context('__response__', response) + self._set_context("__response__", response) if self._check_should_stop(): return try: self._on_post_process() except StopCCEIteration: - _logger.info('Stop iteration command in post process is received, exit job now.') + _logger.info( + "Stop iteration command in post process is received, exit job now." + ) return if self._check_should_stop(): @@ -281,7 +298,7 @@ def _run(self): self._update_checkpoint() if self._is_stoppable(): - _logger.info('Stop condition reached, exit job now') + _logger.info("Stop condition reached, exit job now") break def _send_request(self, request): @@ -291,26 +308,30 @@ def _send_request(self, request): response = self._client.send(request) except HTTPError as error: _logger.exception( - 'HTTPError reason=%s when sending request to ' - 'url=%s method=%s', error.reason, request.url, request.method) + "HTTPError reason=%s when sending request to " "url=%s method=%s", + error.reason, + request.url, + request.method, + ) return None, True status = response.status_code if status in defaults.success_statuses: - if not (response.body or '').strip(): + if not (response.body or "").strip(): _logger.info( - 'The response body of request which url=%s and' - ' method=%s is empty, status=%s.', - request.url, request.method, status + "The response body of request which url=%s and" + " method=%s is empty, status=%s.", + request.url, + request.method, + status, ) return None, True return response, False - error_log = ('The response status=%s for request which url=%s and' - ' method=%s.') % ( - status, request.url, request.method - ) + error_log = ( + "The response status=%s for request which url=%s and" " method=%s." + ) % (status, request.url, request.method) if status in defaults.warning_statuses: _logger.warning(error_log) diff --git a/Splunk_TA_paloalto/bin/splunk_ta_paloalto/aob_py3/cloudconnectlib/core/engine_v2.py b/Splunk_TA_paloalto/bin/splunk_ta_paloalto/aob_py3/cloudconnectlib/core/engine_v2.py old mode 100755 new mode 100644 index 56999caf..250ade62 --- a/Splunk_TA_paloalto/bin/splunk_ta_paloalto/aob_py3/cloudconnectlib/core/engine_v2.py +++ b/Splunk_TA_paloalto/bin/splunk_ta_paloalto/aob_py3/cloudconnectlib/core/engine_v2.py @@ -1,24 +1,44 @@ -from builtins import object +# +# Copyright 2021 Splunk Inc. +# +# Licensed under the Apache License, Version 2.0 (the "License"); +# you may not use this file except in compliance with the License. +# You may obtain a copy of the License at +# +# http://www.apache.org/licenses/LICENSE-2.0 +# +# Unless required by applicable law or agreed to in writing, software +# distributed under the License is distributed on an "AS IS" BASIS, +# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. +# See the License for the specific language governing permissions and +# limitations under the License. +# import concurrent.futures as cf import threading from collections import Iterable -from ..common.log import get_cc_logger from os import path as op + +from ..common.log import get_cc_logger from .plugin import init_pipeline_plugins -logger = get_cc_logger() +logger = get_cc_logger() -class CloudConnectEngine(object): - def __init__(self, max_workers=4): +class CloudConnectEngine: + def __init__(self, max_workers=4, plugin_dir=""): + """ + Initialize CloudConnectEngine object + :param max_workers: maximum number of Threads to execute the given calls + :param plugin_dir: Absolute path of directory containing cce_plugin_*.py + """ self._executor = cf.ThreadPoolExecutor(max_workers) self._pending_job_results = set() self._shutdown = False self._pending_jobs = [] self._counter = 0 self._lock = threading.RLock() - init_pipeline_plugins( - op.join(op.dirname(op.dirname(__file__)), "plugin")) + plugin_dir = plugin_dir or op.join(op.dirname(op.dirname(__file__)), "plugin") + init_pipeline_plugins(plugin_dir) def start(self, jobs=None): """ @@ -39,8 +59,9 @@ def start(self, jobs=None): break # check the intermediate results to find the done jobs and not # done jobs - done_and_not_done_jobs = cf.wait(self._pending_job_results, - return_when=cf.FIRST_COMPLETED) + done_and_not_done_jobs = cf.wait( + self._pending_job_results, return_when=cf.FIRST_COMPLETED + ) self._pending_job_results = done_and_not_done_jobs.not_done done_job_results = done_and_not_done_jobs.done for future in done_job_results: @@ -53,7 +74,7 @@ def start(self, jobs=None): self._add_job(temp) else: self._add_job(result) - except: + except Exception: logger.exception("CloudConnectEngine encountered exception") finally: self._teardown() @@ -71,8 +92,7 @@ def _add_job(self, job): result = self._executor.submit(self._invoke_job, job) self._pending_job_results.add(result) self._counter += 1 - logger.debug("%s job(s) have been added to the engine now", - self._counter) + logger.debug("%s job(s) have been added to the engine now", self._counter) return True def _invoke_job(self, job): @@ -87,7 +107,7 @@ def _invoke_job(self, job): return None invoke_result = job.run() return invoke_result - except: + except Exception: logger.exception("job %s is invoked with exception", job) return None finally: diff --git a/Splunk_TA_paloalto/bin/splunk_ta_paloalto/aob_py3/cloudconnectlib/core/exceptions.py b/Splunk_TA_paloalto/bin/splunk_ta_paloalto/aob_py3/cloudconnectlib/core/exceptions.py index 039f2d41..3b0ccf78 100755 --- a/Splunk_TA_paloalto/bin/splunk_ta_paloalto/aob_py3/cloudconnectlib/core/exceptions.py +++ b/Splunk_TA_paloalto/bin/splunk_ta_paloalto/aob_py3/cloudconnectlib/core/exceptions.py @@ -1,3 +1,18 @@ +# +# Copyright 2021 Splunk Inc. +# +# Licensed under the Apache License, Version 2.0 (the "License"); +# you may not use this file except in compliance with the License. +# You may obtain a copy of the License at +# +# http://www.apache.org/licenses/LICENSE-2.0 +# +# Unless required by applicable law or agreed to in writing, software +# distributed under the License is distributed on an "AS IS" BASIS, +# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. +# See the License for the specific language governing permissions and +# limitations under the License. +# """APP Cloud Connect errors""" @@ -7,32 +22,36 @@ class CCEError(Exception): class ConfigException(CCEError): """Config exception""" + pass class FuncException(CCEError): """Ext function call exception""" + pass class HTTPError(CCEError): - """ HTTPError raised when HTTP request returned a error.""" + """HTTPError raised when HTTP request returned a error.""" def __init__(self, reason=None): """ Initialize HTTPError with `response` object and `status`. """ self.reason = reason - super(HTTPError, self).__init__(reason) + super().__init__(reason) class StopCCEIteration(CCEError): """Exception to exit from the engine iteration.""" + pass class CCESplitError(CCEError): """Exception to exit the job in Split Task""" + pass diff --git a/Splunk_TA_paloalto/bin/splunk_ta_paloalto/aob_py3/cloudconnectlib/core/ext.py b/Splunk_TA_paloalto/bin/splunk_ta_paloalto/aob_py3/cloudconnectlib/core/ext.py index 9773b94a..abe08756 100755 --- a/Splunk_TA_paloalto/bin/splunk_ta_paloalto/aob_py3/cloudconnectlib/core/ext.py +++ b/Splunk_TA_paloalto/bin/splunk_ta_paloalto/aob_py3/cloudconnectlib/core/ext.py @@ -1,31 +1,48 @@ -from builtins import str -from builtins import range +# +# Copyright 2021 Splunk Inc. +# +# Licensed under the Apache License, Version 2.0 (the "License"); +# you may not use this file except in compliance with the License. +# You may obtain a copy of the License at +# +# http://www.apache.org/licenses/LICENSE-2.0 +# +# Unless required by applicable law or agreed to in writing, software +# distributed under the License is distributed on an "AS IS" BASIS, +# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. +# See the License for the specific language governing permissions and +# limitations under the License. +# import calendar import json import re import traceback from collections import Iterable from datetime import datetime -import six -from jsonpath_rw import parse -from .exceptions import FuncException, StopCCEIteration, QuitJobError +from jsonpath_ng import parse + +from ..common import log, util +from .exceptions import FuncException, QuitJobError, StopCCEIteration from .pipemgr import PipeManager -from ..common import util, log _logger = log.get_cc_logger() def regex_search(pattern, source, flags=0): """Search substring in source through regex""" - if not isinstance(source, six.string_types): - _logger.warning('Cannot apply regex search on non-string: %s', type(source)) + if not isinstance(source, str): + _logger.warning("Cannot apply regex search on non-string: %s", type(source)) return {} try: matches = re.search(pattern=pattern, string=source, flags=flags) except Exception: - _logger.warning('Unable to search pattern=%s and flags=%s in string, error=%s', - pattern, flags, traceback.format_exc()) + _logger.warning( + "Unable to search pattern=%s and flags=%s in string, error=%s", + pattern, + flags, + traceback.format_exc(), + ) return {} else: return matches.groupdict() if matches else {} @@ -44,49 +61,36 @@ def regex_match(pattern, source, flags=0): return re.match(pattern, source, flags) is not None except Exception: _logger.warning( - 'Unable to match source with pattern=%s, cause=%s', + "Unable to match source with pattern=%s, cause=%s", pattern, - traceback.format_exc() + traceback.format_exc(), ) return False -def regex_not_match(pattern, source, flags=0): - """ - Determine whether a string is not match a regex pattern. - - :param pattern: regex expression - :param source: candidate to match regex - :param flags: flags for regex match - :return: `True` if candidate not match pattern else `False` - """ - return not regex_match(pattern, source, flags) - - def json_path(source, json_path_expr): - """ Extract value from string with JSONPATH expression. + """Extract value from string with JSONPATH expression. :param json_path_expr: JSONPATH expression :param source: string to extract value :return: A `list` contains all values extracted """ if not source: - _logger.debug('source to apply JSONPATH is empty, return empty.') - return '' + _logger.debug("source to apply JSONPATH is empty, return empty.") + return "" - if isinstance(source, six.string_types): + if isinstance(source, str): _logger.debug( - 'source expected is a JSON, not %s. Attempt to' - ' convert it to JSON', - type(source) + "source expected is a JSON, not %s. Attempt to" " convert it to JSON", + type(source), ) try: source = json.loads(source) except Exception as ex: _logger.warning( - 'Unable to load JSON from source: %s. ' + "Unable to load JSON from source: %s. " 'Attempt to apply JSONPATH "%s" on source directly.', ex, - json_path_expr + json_path_expr, ) try: @@ -95,31 +99,29 @@ def json_path(source, json_path_expr): _logger.debug( 'Got %s elements extracted with JSONPATH expression "%s"', - len(results), json_path_expr + len(results), + json_path_expr, ) if not results: - return '' + return "" - return results[0] or '' if len(results) == 1 else results + return results[0] or "" if len(results) == 1 else results except Exception as ex: _logger.warning( 'Unable to apply JSONPATH expression "%s" on source,' - ' message=%s cause=%s', + " message=%s cause=%s", json_path_expr, ex, - traceback.format_exc() + traceback.format_exc(), ) - return '' + return "" -def splunk_xml(candidates, - time=None, - index=None, - host=None, - source=None, - sourcetype=None): - """ Wrap a event with splunk xml format. +def splunk_xml( + candidates, time=None, index=None, host=None, source=None, sourcetype=None +): + """Wrap a event with splunk xml format. :param candidates: data used to wrap as event :param time: timestamp which must be empty or a valid float :param index: index name for event @@ -137,8 +139,7 @@ def splunk_xml(candidates, time = float(time) except ValueError: _logger.warning( - '"time" %s is expected to be a float, set "time" to None', - time + '"time" %s is expected to be a float, set "time" to None', time ) time = None xml_events = util.format_events( @@ -147,60 +148,58 @@ def splunk_xml(candidates, index=index, host=host, source=source, - sourcetype=sourcetype + sourcetype=sourcetype, ) - _logger.info( - "[%s] events are formated as splunk stream xml", - len(candidates) - ) + _logger.info("[%s] events are formated as splunk stream xml", len(candidates)) return xml_events def std_output(candidates): - """ Output a string to stdout. + """Output a string to stdout. :param candidates: List of string to output to stdout or a single string. """ - if isinstance(candidates, six.string_types): + if isinstance(candidates, str): candidates = [candidates] all_str = True for candidate in candidates: - if all_str and not isinstance(candidate, six.string_types): + if all_str and not isinstance(candidate, str): all_str = False _logger.debug( - 'The type of data needs to print is "%s" rather than %s', + 'The type of data needs to print is "%s" rather than str', type(candidate), - str(six.string_types) ) try: candidate = json.dumps(candidate) except: - _logger.exception('The type of data needs to print is "%s"' - ' rather than %s', - type(candidate), - str(six.string_types)) + _logger.exception( + 'The type of data needs to print is "%s"' " rather than str", + type(candidate), + ) if not PipeManager().write_events(candidate): - raise FuncException('Fail to output data to stdout. The event' - ' writer is stopped or encountered exception') + raise FuncException( + "Fail to output data to stdout. The event" + " writer is stopped or encountered exception" + ) - _logger.debug('Writing events to stdout finished.') + _logger.debug("Writing events to stdout finished.") return True def _parse_json(source, json_path_expr=None): if not source: - _logger.debug('Unable to parse JSON from empty source, return empty.') + _logger.debug("Unable to parse JSON from empty source, return empty.") return {} if json_path_expr: _logger.debug( - 'Try to extract JSON from source with JSONPATH expression: %s, ', - json_path_expr + "Try to extract JSON from source with JSONPATH expression: %s, ", + json_path_expr, ) source = json_path(source, json_path_expr) - elif isinstance(source, six.string_types): + elif isinstance(source, str): source = json.loads(source) return source @@ -221,8 +220,9 @@ def json_empty(source, json_path_expr=None): return len(data) == 0 except Exception as ex: _logger.warning( - 'Unable to determine whether source is json_empty, treat it as ' - 'not json_empty: %s', ex + "Unable to determine whether source is json_empty, treat it as " + "not json_empty: %s", + ex, ) return False @@ -243,9 +243,9 @@ def json_not_empty(source, json_path_expr=None): return len(data) > 0 except Exception as ex: _logger.warning( - 'Unable to determine whether source is json_not_empty, ' - 'treat it as not json_not_empty: %s', - ex + "Unable to determine whether source is json_not_empty, " + "treat it as not json_not_empty: %s", + ex, ) return False @@ -267,24 +267,25 @@ def _fix_microsecond_format(fmt, micros): def do_replacement(x, micros): if int(x.group(1)) in range(1, 7) and len(x.group()) % 2: - return x.group().replace('%' + x.group(1) + 'f', - micros[:min(int(x.group(1)), len(micros))]) + return x.group().replace( + "%" + x.group(1) + "f", micros[: min(int(x.group(1)), len(micros))] + ) return x.group() - return re.sub(r'%+([1-6])f', lambda x: do_replacement(x, micros), fmt) + return re.sub(r"%+([1-6])f", lambda x: do_replacement(x, micros), fmt) def _fix_timestamp_format(fmt, timestamp): """Replace '%s' in time format with timestamp if the number - of '%' before 's' is odd.""" + of '%' before 's' is odd.""" return re.sub( - r'%+s', + r"%+s", ( - lambda x: - x.group() if len(x.group()) % 2 else x.group().replace('%s', - timestamp) + lambda x: x.group() + if len(x.group()) % 2 + else x.group().replace("%s", timestamp) ), - fmt + fmt, ) @@ -292,11 +293,11 @@ def time_str2str(date_string, from_format, to_format): """Convert a date string with given format to another format. Return the original date string if it's type is not string or failed to parse or convert it with format.""" - if not isinstance(date_string, six.string_types): + if not isinstance(date_string, str): _logger.warning( '"date_string" must be a string type, found %s,' - ' return the original date_string directly.', - type(date_string) + " return the original date_string directly.", + type(date_string), ) return date_string @@ -314,18 +315,18 @@ def time_str2str(date_string, from_format, to_format): except Exception: _logger.warning( 'Unable to convert date_string "%s" from format "%s" to "%s",' - ' return the original date_string, cause=%s', + " return the original date_string, cause=%s", date_string, from_format, to_format, - traceback.format_exc() + traceback.format_exc(), ) return date_string def is_true(value): """Determine whether value is True""" - return str(value).strip().lower() == 'true' + return str(value).strip().lower() == "true" def exit_if_true(value): @@ -343,9 +344,7 @@ def exit_job_if_true(value): def assert_true(value, message=None): """Assert value is True""" if not is_true(value): - raise AssertionError( - message or '"{value}" is not true'.format(value=value) - ) + raise AssertionError(message or f'"{value}" is not true') def split_by(source, target, separator=None): @@ -353,43 +352,45 @@ def split_by(source, target, separator=None): try: if not source: return [] - elif isinstance(source, six.string_types) and separator: + elif isinstance(source, str) and separator: values = source.split(separator) return [{target: value.strip()} for value in values] - elif isinstance(source, six.string_types): + elif isinstance(source, str): return [{target: source}] elif isinstance(source, Iterable): return [{target: value} for value in source] else: return [{target: source}] except Exception as ex: - _logger.warning("split_by method encountered exception " - "source=%s message=%s cause=%s", source, ex, - traceback.format_exc()) + _logger.warning( + "split_by method encountered exception " "source=%s message=%s cause=%s", + source, + ex, + traceback.format_exc(), + ) return [] _extension_functions = { - 'assert_true': assert_true, - 'exit_if_true': exit_if_true, - 'exit_job_if_true': exit_job_if_true, - 'is_true': is_true, - 'regex_match': regex_match, - 'regex_not_match': regex_not_match, - 'regex_search': regex_search, - 'set_var': set_var, - 'splunk_xml': splunk_xml, - 'std_output': std_output, - 'json_path': json_path, - 'json_empty': json_empty, - 'json_not_empty': json_not_empty, - 'time_str2str': time_str2str, - 'split_by': split_by + "assert_true": assert_true, + "exit_if_true": exit_if_true, + "exit_job_if_true": exit_job_if_true, + "is_true": is_true, + "regex_match": regex_match, + "regex_search": regex_search, + "set_var": set_var, + "splunk_xml": splunk_xml, + "std_output": std_output, + "json_path": json_path, + "json_empty": json_empty, + "json_not_empty": json_not_empty, + "time_str2str": time_str2str, + "split_by": split_by, } def lookup_method(name): - """ Find a predefined function with given function name. + """Find a predefined function with given function name. :param name: function name. :return: A function with given name. """ diff --git a/Splunk_TA_paloalto/bin/splunk_ta_paloalto/aob_py3/cloudconnectlib/core/http.py b/Splunk_TA_paloalto/bin/splunk_ta_paloalto/aob_py3/cloudconnectlib/core/http.py index 761d0ce4..e9d4675c 100755 --- a/Splunk_TA_paloalto/bin/splunk_ta_paloalto/aob_py3/cloudconnectlib/core/http.py +++ b/Splunk_TA_paloalto/bin/splunk_ta_paloalto/aob_py3/cloudconnectlib/core/http.py @@ -1,72 +1,73 @@ -from builtins import str -from builtins import range -from builtins import object +# +# Copyright 2021 Splunk Inc. +# +# Licensed under the Apache License, Version 2.0 (the "License"); +# you may not use this file except in compliance with the License. +# You may obtain a copy of the License at +# +# http://www.apache.org/licenses/LICENSE-2.0 +# +# Unless required by applicable law or agreed to in writing, software +# distributed under the License is distributed on an "AS IS" BASIS, +# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. +# See the License for the specific language governing permissions and +# limitations under the License. +# import time import traceback -import six +import requests + import munch +from requests import PreparedRequest, Session, utils +from solnlib.utils import is_true from cloudconnectlib.common import util from cloudconnectlib.common.log import get_cc_logger from cloudconnectlib.core import defaults from cloudconnectlib.core.exceptions import HTTPError -from httplib2 import Http, socks, ProxyInfo -from solnlib.packages.requests import PreparedRequest, utils -from solnlib.utils import is_true - -try: # Python2 environment support - from httplib2 import SSLHandshakeError -except: # Python3 environment support - from ssl import SSLError as SSLHandshakeError _logger = get_cc_logger() -_PROXY_TYPE_MAP = { - 'http': socks.PROXY_TYPE_HTTP, - 'http_no_tunnel': socks.PROXY_TYPE_HTTP_NO_TUNNEL, - 'socks4': socks.PROXY_TYPE_SOCKS4, - 'socks5': socks.PROXY_TYPE_SOCKS5, -} - -class HTTPResponse(object): +class HTTPResponse: """ HTTPResponse class wraps response of HTTP request for later use. """ def __init__(self, response, content): """Construct a HTTPResponse from response and content returned - with httplib2 request""" - self._status_code = response.status + with requests.Session() request""" + self._status_code = response.status_code self._header = response - self._body = self._decode_content(response, content) + self._body = self._decode_content(response.headers, content) @staticmethod def _decode_content(response, content): if not content: - return '' + return "" charset = utils.get_encoding_from_headers(response) if charset is None: charset = defaults.charset _logger.info( - 'Unable to find charset in response headers,' - ' set it to default "%s"', charset + "Unable to find charset in response headers," ' set it to default "%s"', + charset, ) - _logger.info('Decoding response content with charset=%s', charset) + _logger.info("Decoding response content with charset=%s", charset) try: - return content.decode(charset, errors='replace') + return content.decode(charset, errors="replace") except Exception as ex: _logger.warning( - 'Failure decoding response content with charset=%s,' - ' decode it with utf-8: %s', - charset, ex + "Failure decoding response content with charset=%s," + " decode it with utf-8: %s", + charset, + ex, ) - return content.decode('utf-8', errors='replace') + return content.decode("utf-8", errors="replace") @property def header(self): @@ -103,139 +104,160 @@ def prepare_url(url, params=None): return prepare_url -def get_proxy_info(proxy_config): - if not proxy_config or not is_true(proxy_config.get('proxy_enabled')): - _logger.info('Proxy is not enabled') - return None +def get_proxy_info(proxy_config: dict) -> dict: + """ + @proxy_config: dict like object of the format - + { + "proxy_url": my-proxy.server.com, + "proxy_port": 0000, + "proxy_username": username, + "proxy_password": password, + "proxy_type": http or sock5, + "proxy_rdns": 0 or 1, + } + """ + proxy_info = {} + + if not proxy_config or not is_true(proxy_config.get("proxy_enabled")): + _logger.info("Proxy is not enabled") + return {} + + proxy_type = proxy_config.get("proxy_type", "").lower() + if proxy_type not in ("http", "socks5"): + proxy_type = "http" + _logger.info('Proxy type not found, set to "HTTP"') + + if is_true(proxy_config.get("proxy_rdns")) and proxy_type == "socks5": + proxy_type = "socks5h" - url = proxy_config.get('proxy_url') - port = proxy_config.get('proxy_port') + url = proxy_config.get("proxy_url") + port = proxy_config.get("proxy_port") if url or port: if not url: raise ValueError('Proxy "url" must not be empty') if not util.is_valid_port(port): - raise ValueError( - 'Proxy "port" must be in range [1,65535]: %s' % port - ) - - user = proxy_config.get('proxy_username') - password = proxy_config.get('proxy_password') + raise ValueError('Proxy "port" must be in range [1,65535]: %s' % port) - if not all((user, password)): - _logger.info('Proxy has no credentials found') - user, password = None, None + proxy_info["http"] = f"{proxy_type}://{url}:{int(port)}" + user = proxy_config.get("proxy_username") + password = proxy_config.get("proxy_password") - proxy_type = proxy_config.get('proxy_type') - proxy_type = proxy_type.lower() if proxy_type else 'http' - - if proxy_type in _PROXY_TYPE_MAP: - ptv = _PROXY_TYPE_MAP[proxy_type] - elif proxy_type in list(_PROXY_TYPE_MAP.values()): - ptv = proxy_type + if all((user, password)): + proxy_info["http"] = f"{proxy_type}://{user}:{password}@{url}:{int(port)}" else: - ptv = socks.PROXY_TYPE_HTTP - _logger.info('Proxy type not found, set to "HTTP"') + _logger.info("Proxy has no credentials found") - rdns = is_true(proxy_config.get('proxy_rdns')) - - proxy_info = ProxyInfo( - proxy_host=url, - proxy_port=int(port), - proxy_type=ptv, - proxy_user=user, - proxy_pass=password, - proxy_rdns=rdns - ) + proxy_info["https"] = proxy_info["http"] return proxy_info + def standardize_proxy_config(proxy_config): """ - This function is used to standardize the proxy information structure to get it evaluated through `get_proxy_info` function + This function is used to standardize the proxy information structure to + get it evaluated through `get_proxy_info` function """ if not isinstance(proxy_config, dict): - raise ValueError("Received unexpected format of proxy configuration. Expected format: object, Actual format: {}".format(type(proxy_config))) + raise ValueError( + "Received unexpected format of proxy configuration. " + "Expected format: object, Actual format: {}".format(type(proxy_config)) + ) standard_proxy_config = { "proxy_enabled": proxy_config.get("enabled", proxy_config.get("proxy_enabled")), - "proxy_username": proxy_config.get("username", proxy_config.get("proxy_username")), - "proxy_password": proxy_config.get("password", proxy_config.get("proxy_password")), + "proxy_username": proxy_config.get( + "username", proxy_config.get("proxy_username") + ), + "proxy_password": proxy_config.get( + "password", proxy_config.get("proxy_password") + ), "proxy_url": proxy_config.get("host", proxy_config.get("proxy_url")), "proxy_type": proxy_config.get("type", proxy_config.get("proxy_type")), "proxy_port": proxy_config.get("port", proxy_config.get("proxy_port")), - "proxy_rdns": proxy_config.get("rdns", proxy_config.get("proxy_rdns")) + "proxy_rdns": proxy_config.get("rdns", proxy_config.get("proxy_rdns")), } return standard_proxy_config -class HttpClient(object): - def __init__(self, proxy_info=None): - """Constructs a `HTTPRequest` with a optional proxy setting. +class HttpClient: + def __init__(self, proxy_info=None, verify=True): + """ + Constructs a `HTTPRequest` with a optional proxy setting. + :param proxy_info: a dictionary of proxy details. It could directly match the input signature + of `requests` library, otherwise will be standardized and converted to match the input signature. + :param verify: same as the `verify` parameter of requests.request() method """ self._connection = None - + self.requests_verify = verify + if proxy_info: if isinstance(proxy_info, munch.Munch): proxy_info = dict(proxy_info) - # Updating the proxy_info object to make it compatible for getting evaluated through `get_proxy_info` function - proxy_info = standardize_proxy_config(proxy_info) - self._proxy_info = get_proxy_info(proxy_info) + if all((len(proxy_info) == 2, "http" in proxy_info, "https" in proxy_info)): + # when `proxy_info` already matches the input signature of `requests` library's proxy dict + self._proxy_info = proxy_info + else: + # Updating the proxy_info object to make it compatible for getting evaluated + # through `get_proxy_info` function + proxy_info = standardize_proxy_config(proxy_info) + self._proxy_info = get_proxy_info(proxy_info) else: self._proxy_info = proxy_info self._url_preparer = PreparedRequest() - def _send_internal(self, uri, method, headers=None, body=None, proxy_info=None): - """Do send request to target URL and validate SSL cert by default. - If validation failed, disable it and try again.""" - try: - return self._connection.request( - uri, body=body, method=method, headers=headers - ) - except SSLHandshakeError: - _logger.warning( - "[SSL: CERTIFICATE_VERIFY_FAILED] certificate verification failed. " - "The certificate of the https server [%s] is not trusted, " - "this add-on will proceed to connect with this certificate. " - "You may need to check the certificate and " - "refer to the documentation and add it to the trust list. %s", - uri, - traceback.format_exc() - ) - - self._connection = self._build_http_connection( - proxy_info=proxy_info, - disable_ssl_cert_validation=True - ) - return self._connection.request( - uri, body=body, method=method, headers=headers - ) + def _send_internal(self, uri, method, headers=None, body=None): + """Do send request to target URL, validate SSL cert by default and return the response.""" + return self._connection.request( + url=uri, + data=body, + method=method, + headers=headers, + timeout=defaults.timeout, + verify=self.requests_verify, + ) - def _retry_send_request_if_needed(self, uri, method='GET', headers=None, body=None): + def _retry_send_request_if_needed(self, uri, method="GET", headers=None, body=None): """Invokes request and auto retry with an exponential backoff if the response status is configured in defaults.retry_statuses.""" retries = max(defaults.retries, 0) - _logger.info('Invoking request to [%s] using [%s] method', uri, method) + _logger.info("Invoking request to [%s] using [%s] method", uri, method) for i in range(retries + 1): try: - response, content = self._send_internal( + resp = self._send_internal( uri=uri, body=body, method=method, headers=headers ) + content = resp.content + response = resp + except requests.exceptions.SSLError as err: + _logger.error( + "[SSL: CERTIFICATE_VERIFY_FAILED] certificate verification failed. " + "The certificate of the https server [%s] is not trusted, " + "You may need to check the certificate and " + "refer to the documentation and add it to the trust list. %s", + uri, + traceback.format_exc(), + ) + raise HTTPError(f"HTTP Error {err}") from err except Exception as err: _logger.exception( - 'Could not send request url=%s method=%s', uri, method) - raise HTTPError('HTTP Error %s' % str(err)) + "Could not send request url=%s method=%s", uri, method + ) + raise HTTPError(f"HTTP Error {err}") from err - status = response.status + status = resp.status_code if self._is_need_retry(status, i, retries): - delay = 2 ** i + delay = 2**i _logger.warning( - 'The response status=%s of request which url=%s and' - ' method=%s. Retry after %s seconds.', - status, uri, method, delay, + "The response status=%s of request which url=%s and" + " method=%s. Retry after %s seconds.", + status, + uri, + method, + delay, ) time.sleep(delay) continue @@ -248,16 +270,16 @@ def _prepare_url(self, url, params=None): def _initialize_connection(self): if self._proxy_info: - _logger.info('Proxy is enabled for http connection.') + _logger.info("Proxy is enabled for http connection.") else: - _logger.info('Proxy is not enabled for http connection.') + _logger.info("Proxy is not enabled for http connection.") self._connection = self._build_http_connection(self._proxy_info) def send(self, request): if not request: - raise ValueError('The request is none') - if request.body and not isinstance(request.body, six.string_types): - raise TypeError('Invalid request body type: {}'.format(request.body)) + raise ValueError("The request is none") + if request.body and not isinstance(request.body, str): + raise TypeError(f"Invalid request body type: {request.body}") if self._connection is None: self._initialize_connection() @@ -266,8 +288,7 @@ def send(self, request): url = self._prepare_url(request.url) except Exception: _logger.warning( - 'Failed to encode url=%s: %s', - request.url, traceback.format_exc() + "Failed to encode url=%s: %s", request.url, traceback.format_exc() ) url = request.url @@ -277,15 +298,18 @@ def send(self, request): @staticmethod def _build_http_connection( - proxy_info=None, - timeout=defaults.timeout, - disable_ssl_cert_validation=defaults.disable_ssl_cert_validation): - return Http( - proxy_info=proxy_info, - timeout=timeout, - disable_ssl_certificate_validation=disable_ssl_cert_validation) + proxy_info=None, + disable_ssl_cert_validation=defaults.disable_ssl_cert_validation, + ): + """ + Creates a `request.Session()` object, sets the verify + and proxy_info parameter and returns this object + """ + s = Session() + s.verify = not disable_ssl_cert_validation + s.proxies = proxy_info or {} + return s @staticmethod def _is_need_retry(status, retried, maximum_retries): - return retried < maximum_retries \ - and status in defaults.retry_statuses + return retried < maximum_retries and status in defaults.retry_statuses diff --git a/Splunk_TA_paloalto/bin/splunk_ta_paloalto/aob_py3/cloudconnectlib/core/job.py b/Splunk_TA_paloalto/bin/splunk_ta_paloalto/aob_py3/cloudconnectlib/core/job.py old mode 100755 new mode 100644 index 49c111b1..090eacd9 --- a/Splunk_TA_paloalto/bin/splunk_ta_paloalto/aob_py3/cloudconnectlib/core/job.py +++ b/Splunk_TA_paloalto/bin/splunk_ta_paloalto/aob_py3/cloudconnectlib/core/job.py @@ -1,15 +1,28 @@ -from builtins import object +# +# Copyright 2021 Splunk Inc. +# +# Licensed under the Apache License, Version 2.0 (the "License"); +# you may not use this file except in compliance with the License. +# You may obtain a copy of the License at +# +# http://www.apache.org/licenses/LICENSE-2.0 +# +# Unless required by applicable law or agreed to in writing, software +# distributed under the License is distributed on an "AS IS" BASIS, +# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. +# See the License for the specific language governing permissions and +# limitations under the License. +# import threading +from ..common import log from .exceptions import QuitJobError from .task import BaseTask -from ..common import log - logger = log.get_cc_logger() -class CCEJob(object): +class CCEJob: """ One CCEJob is composed of a list of tasks. The task could be HTTP task or Split task(currently supported task types). @@ -47,15 +60,17 @@ def set_proxy(self, proxy_setting): :type proxy_setting: ``dict`` """ self._proxy_info = proxy_setting - logger.debug("CCEJob proxy info: proxy_enabled='%s', proxy_url='%s', " - "proxy_port='%s', proxy_rdns='%s', proxy_type='%s', " - "proxy_username='%s'", - proxy_setting.get("proxy_enabled"), - proxy_setting.get("proxy_url"), - proxy_setting.get("proxy_port"), - proxy_setting.get("proxy_rdns"), - proxy_setting.get("proxy_type"), - proxy_setting.get("proxy_username")) + logger.debug( + "CCEJob proxy info: proxy_enabled='%s', proxy_url='%s', " + "proxy_port='%s', proxy_rdns='%s', proxy_type='%s', " + "proxy_username='%s'", + proxy_setting.get("proxy_enabled"), + proxy_setting.get("proxy_url"), + proxy_setting.get("proxy_port"), + proxy_setting.get("proxy_rdns"), + proxy_setting.get("proxy_type"), + proxy_setting.get("proxy_username"), + ) def add_task(self, task): """ @@ -65,14 +80,14 @@ def add_task(self, task): :type task: TBD """ if not isinstance(task, BaseTask): - raise ValueError('Unsupported task type: {}'.format(type(task))) + raise ValueError(f"Unsupported task type: {type(task)}") if callable(getattr(task, "set_proxy", None)) and self._proxy_info: task.set_proxy(self._proxy_info) self._rest_tasks.append(task) def _check_if_stop_needed(self): if self._stop_signal_received: - logger.info('Stop job signal received, stopping job.') + logger.info("Stop job signal received, stopping job.") self._stopped.set() return True return False @@ -81,10 +96,10 @@ def run(self): """ Run current job, which executes tasks in it sequentially. """ - logger.debug('Start to run job') + logger.debug("Start to run job") if not self._rest_tasks: - logger.info('No task found in job') + logger.info("No task found in job") return if self._check_if_stop_needed(): @@ -96,29 +111,35 @@ def run(self): try: contexts = list(self._running_task.perform(self._context) or ()) except QuitJobError: - logger.info('Quit job signal received, exiting job') + logger.info("Quit job signal received, exiting job") return if self._check_if_stop_needed(): return if not self._rest_tasks: - logger.info('No more task need to perform, exiting job') + logger.info("No more task need to perform, exiting job") return - jobs = [CCEJob(context=ctx, tasks=self._rest_tasks) for ctx in contexts] + count = 0 + + for ctx in contexts: + count += 1 + yield CCEJob(context=ctx, tasks=self._rest_tasks) + + if self._check_if_stop_needed(): + break - logger.debug('Generated %s job in total', len(jobs)) - logger.debug('Job execution finished successfully.') + logger.debug("Generated %s job in total", count) + logger.debug("Job execution finished successfully.") self._stopped.set() - return jobs def stop(self, block=False, timeout=30): """ Stop current job. """ if self._stopped.is_set(): - logger.info('Job is not running, cannot stop it.') + logger.info("Job is not running, cannot stop it.") return self._stop_signal_received = True @@ -128,12 +149,12 @@ def stop(self, block=False, timeout=30): return if not self._stopped.wait(timeout): - logger.info('Waiting for stop job timeout') + logger.info("Waiting for stop job timeout") def __str__(self): if self._running_task: - return 'Job(running task={})'.format(self._running_task) - return 'Job(no running task)' + return f"Job(running task={self._running_task})" + return "Job(no running task)" def __repr__(self): return self.__str__() diff --git a/Splunk_TA_paloalto/bin/splunk_ta_paloalto/aob_py3/cloudconnectlib/core/models.py b/Splunk_TA_paloalto/bin/splunk_ta_paloalto/aob_py3/cloudconnectlib/core/models.py index 82e09004..603e1d07 100755 --- a/Splunk_TA_paloalto/bin/splunk_ta_paloalto/aob_py3/cloudconnectlib/core/models.py +++ b/Splunk_TA_paloalto/bin/splunk_ta_paloalto/aob_py3/cloudconnectlib/core/models.py @@ -1,19 +1,31 @@ -from builtins import str -from builtins import object +# +# Copyright 2021 Splunk Inc. +# +# Licensed under the Apache License, Version 2.0 (the "License"); +# you may not use this file except in compliance with the License. +# You may obtain a copy of the License at +# +# http://www.apache.org/licenses/LICENSE-2.0 +# +# Unless required by applicable law or agreed to in writing, software +# distributed under the License is distributed on an "AS IS" BASIS, +# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. +# See the License for the specific language governing permissions and +# limitations under the License. +# import base64 import json import sys import traceback -import six +from ..common.log import get_cc_logger from .ext import lookup_method from .template import compile_template -from ..common.log import get_cc_logger _logger = get_cc_logger() -class _Token(object): +class _Token: """Token class wraps a template expression""" def __init__(self, source): @@ -21,8 +33,7 @@ def __init__(self, source): will be created if source is string type because Jinja template must be a string.""" self._source = source - self._value_for = compile_template(source) \ - if isinstance(source, six.string_types) else None + self._value_for = compile_template(source) if isinstance(source, str) else None def render(self, variables): """Render value with variables if source is a string. @@ -34,31 +45,30 @@ def render(self, variables): except Exception as ex: _logger.warning( 'Unable to render template "%s". Please make sure template is' - ' a valid Jinja2 template and token is exist in variables. ' - 'message=%s cause=%s', + " a valid Jinja2 template and token is exist in variables. " + "message=%s cause=%s", self._source, ex, - traceback.format_exc() + traceback.format_exc(), ) return self._source -class DictToken(object): +class DictToken: """DictToken wraps a dict which value is template expression""" def __init__(self, template_expr): - self._tokens = {k: _Token(v) - for k, v in (template_expr or {}).items()} + self._tokens = {k: _Token(v) for k, v in (template_expr or {}).items()} def render(self, variables): return {k: v.render(variables) for k, v in self._tokens.items()} -class BaseAuth(object): +class BaseAuth: """A base class for all authorization classes""" def __call__(self, headers, context): - raise NotImplementedError('Auth must be callable.') + raise NotImplementedError("Auth must be callable.") class BasicAuthorization(BaseAuth): @@ -66,19 +76,19 @@ class BasicAuthorization(BaseAuth): def __init__(self, options): if not options: - raise ValueError('Options for basic auth unexpected to be empty') + raise ValueError("Options for basic auth unexpected to be empty") - username = options.get('username') + username = options.get("username") if not username: - raise ValueError('Username is mandatory for basic auth') - password = options.get('password') + raise ValueError("Username is mandatory for basic auth") + password = options.get("password") if not password: - raise ValueError('Password is mandatory for basic auth') + raise ValueError("Password is mandatory for basic auth") self._username = _Token(username) self._password = _Token(password) - def to_native_string(self, string, encoding='ascii'): + def to_native_string(self, string, encoding="ascii"): """ According to rfc7230: Historically, HTTP has allowed field content with text in the @@ -89,8 +99,8 @@ def to_native_string(self, string, encoding='ascii'): US-ASCII octets. A recipient SHOULD treat other octets in field content (obs-text) as opaque data. """ - is_py2 = (sys.version_info[0] == 2) - if isinstance(string, six.text_type): + is_py2 = sys.version_info[0] == 2 + if isinstance(string, str): out = string else: if is_py2: @@ -103,12 +113,15 @@ def to_native_string(self, string, encoding='ascii'): def __call__(self, headers, context): username = self._username.render(context) password = self._password.render(context) - headers['Authorization'] = 'Basic %s' % self.to_native_string( - base64.b64encode((username + ':' + password).encode('latin1')) - ).strip() + headers["Authorization"] = ( + "Basic %s" + % self.to_native_string( + base64.b64encode((username + ":" + password).encode("latin1")) + ).strip() + ) -class RequestParams(object): +class RequestParams: def __init__(self, url, method, header=None, auth=None, body=None): self._header = DictToken(header) self._url = _Token(url) @@ -141,7 +154,7 @@ def render(self, ctx): url=self._url.render(ctx), method=self._method, headers=self.normalize_headers(ctx), - body=self.body.render(ctx) + body=self.body.render(ctx), ) def normalize_url(self, context): @@ -159,19 +172,19 @@ def normalize_body(self, context): return self.body.render(context) -class Request(object): +class Request: def __init__(self, method, url, headers, body): self.method = method self.url = url self.headers = headers if not body: body = None - elif not isinstance(body, six.string_types): + elif not isinstance(body, str): body = json.dumps(body) self.body = body -class _Function(object): +class _Function: def __init__(self, inputs, function): self._inputs = tuple(_Token(expr) for expr in inputs or []) self._function = function @@ -196,7 +209,7 @@ class Task(_Function): """Task class wraps a task in processor pipeline""" def __init__(self, inputs, function, output=None): - super(Task, self).__init__(inputs, function) + super().__init__(inputs, function) self._output = output @property @@ -204,14 +217,16 @@ def output(self): return self._output def execute(self, context): - """Execute task with arguments which rendered from context """ + """Execute task with arguments which rendered from context""" args = [arg for arg in self.inputs_values(context)] caller = lookup_method(self.function) output = self._output _logger.info( - 'Executing task method: [%s], input size: [%s], output: [%s]', - self.function, len(args), output + "Executing task method: [%s], input size: [%s], output: [%s]", + self.function, + len(args), + output, ) if output is None: @@ -234,8 +249,9 @@ def calculate(self, context): callable_method = lookup_method(self.function) _logger.debug( - 'Calculating condition with method: [%s], input size: [%s]', - self.function, len(args) + "Calculating condition with method: [%s], input size: [%s]", + self.function, + len(args), ) result = callable_method(*args) @@ -245,7 +261,7 @@ def calculate(self, context): return result -class _Conditional(object): +class _Conditional: """A base class for all conditional action""" def __init__(self, conditions): @@ -260,16 +276,14 @@ def passed(self, context): :param context: variables to render template :return: `True` if all passed else `False` """ - return any( - condition.calculate(context) for condition in self._conditions - ) + return any(condition.calculate(context) for condition in self._conditions) class Processor(_Conditional): """Processor class contains a conditional data process pipeline""" def __init__(self, skip_conditions, pipeline): - super(Processor, self).__init__(skip_conditions) + super().__init__(skip_conditions) self._pipeline = pipeline or [] @property @@ -283,7 +297,7 @@ def should_skipped(self, context): class IterationMode(_Conditional): def __init__(self, iteration_count, conditions): - super(IterationMode, self).__init__(conditions) + super().__init__(conditions) self._iteration_count = iteration_count @property @@ -295,14 +309,14 @@ def conditions(self): return self._conditions -class Checkpoint(object): +class Checkpoint: """A checkpoint includes a namespace to determine the checkpoint location and a content defined the format of content stored in checkpoint.""" def __init__(self, namespace, content): - """Constructs checkpoint with given namespace and content template. """ + """Constructs checkpoint with given namespace and content template.""" if not content: - raise ValueError('Checkpoint content must not be empty') + raise ValueError("Checkpoint content must not be empty") self._namespace = tuple(_Token(expr) for expr in namespace or ()) self._content = DictToken(content) diff --git a/Splunk_TA_paloalto/bin/splunk_ta_paloalto/aob_py3/cloudconnectlib/core/pipemgr.py b/Splunk_TA_paloalto/bin/splunk_ta_paloalto/aob_py3/cloudconnectlib/core/pipemgr.py index 97f2744b..e62ac544 100755 --- a/Splunk_TA_paloalto/bin/splunk_ta_paloalto/aob_py3/cloudconnectlib/core/pipemgr.py +++ b/Splunk_TA_paloalto/bin/splunk_ta_paloalto/aob_py3/cloudconnectlib/core/pipemgr.py @@ -1,15 +1,27 @@ -from __future__ import print_function -from builtins import object +# +# Copyright 2021 Splunk Inc. +# +# Licensed under the Apache License, Version 2.0 (the "License"); +# you may not use this file except in compliance with the License. +# You may obtain a copy of the License at +# +# http://www.apache.org/licenses/LICENSE-2.0 +# +# Unless required by applicable law or agreed to in writing, software +# distributed under the License is distributed on an "AS IS" BASIS, +# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. +# See the License for the specific language governing permissions and +# limitations under the License. +# from solnlib.pattern import Singleton -from future.utils import with_metaclass -class PipeManager(with_metaclass(Singleton, object)): +class PipeManager(metaclass=Singleton): def __init__(self, event_writer=None): self._event_writer = event_writer def write_events(self, events): if not self._event_writer: - print(events) + print(events, flush=True) return True return self._event_writer.write_events(events) diff --git a/Splunk_TA_paloalto/bin/splunk_ta_paloalto/aob_py3/cloudconnectlib/core/plugin.py b/Splunk_TA_paloalto/bin/splunk_ta_paloalto/aob_py3/cloudconnectlib/core/plugin.py old mode 100755 new mode 100644 index 5c4fc619..c40451f5 --- a/Splunk_TA_paloalto/bin/splunk_ta_paloalto/aob_py3/cloudconnectlib/core/plugin.py +++ b/Splunk_TA_paloalto/bin/splunk_ta_paloalto/aob_py3/cloudconnectlib/core/plugin.py @@ -1,19 +1,33 @@ -from __future__ import absolute_import -from builtins import next -from .ext import _extension_functions +# +# Copyright 2021 Splunk Inc. +# +# Licensed under the Apache License, Version 2.0 (the "License"); +# you may not use this file except in compliance with the License. +# You may obtain a copy of the License at +# +# http://www.apache.org/licenses/LICENSE-2.0 +# +# Unless required by applicable law or agreed to in writing, software +# distributed under the License is distributed on an "AS IS" BASIS, +# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. +# See the License for the specific language governing permissions and +# limitations under the License. +# +import importlib +import sys +import traceback from os import path as op from os import walk -import sys + from ..common import log -import traceback -import importlib +from .ext import _extension_functions logger = log.get_cc_logger() def cce_pipeline_plugin(func): """ - Decorator for pipepline plugin functions. + Decorator for pipeline plugin functions. This docorator helps to register user defined pipeline function into CCE engine so that it could be looked up when executing jobs. @@ -27,19 +41,23 @@ def cce_pipeline_plugin(func): >>> do_work() """ if not callable(func): - logger.debug("Function %s is not callable, don't add it as a pipeline" - " function", func.__name__) + logger.debug( + "Function %s is not callable, don't add it as a pipeline function", + func.__name__, + ) else: if func.__name__ in list(_extension_functions.keys()): - logger.warning("Pipeline function %s already exists, please rename" - "it!", func.__name__) + logger.warning( + "Pipeline function %s already exists, please rename it!", + func.__name__, + ) else: _extension_functions[func.__name__] = func - logger.debug("Added function %s to pipeline plugin system", - func.__name__) + logger.debug("Added function %s to pipeline plugin system", func.__name__) def pipeline_func(*args, **kwargs): return func(*args, **kwargs) + return pipeline_func @@ -53,21 +71,24 @@ def import_plugin_file(file_name): if file_name.endswith(".py"): module_name = file_name[:-3] else: - logger.warning("Plugin file %s is with unsupported extenstion, the " - "supported are py", file_name) + logger.warning( + "Plugin file %s is with unsupported extension, the supported are py", + file_name, + ) return if module_name in list(sys.modules.keys()): - logger.warning("Module %s aleady exists and it won't be reload, " - "please rename your plugin module if it is required.", - module_name) + logger.debug( + "Module %s already exists and it won't be reload, " + "please rename your plugin module if it is required.", + module_name, + ) return try: importlib.import_module(module_name) except Exception: - logger.warning("Failed to load module {}, {}".format( - module_name, traceback.format_exc())) + logger.warning(f"Failed to load module {module_name}, {traceback.format_exc()}") return logger.info("Module %s is imported", module_name) @@ -83,11 +104,14 @@ def init_pipeline_plugins(plugin_dir): with ".py" """ if not op.isdir(plugin_dir): - logger.warning("%s is not a directory! Pipeline plugin files won't be loaded.", - plugin_dir) + logger.warning( + "%s is not a directory! Pipeline plugin files won't be loaded.", plugin_dir + ) return - sys.path.append(plugin_dir) + if plugin_dir not in sys.path: + sys.path.append(plugin_dir) + for file_name in next(walk(plugin_dir))[2]: if file_name == "__init__.py" or not file_name.startswith("cce_plugin_"): continue diff --git a/Splunk_TA_paloalto/bin/splunk_ta_paloalto/aob_py3/cloudconnectlib/core/task.py b/Splunk_TA_paloalto/bin/splunk_ta_paloalto/aob_py3/cloudconnectlib/core/task.py old mode 100755 new mode 100644 index 2a516664..e8246eb0 --- a/Splunk_TA_paloalto/bin/splunk_ta_paloalto/aob_py3/cloudconnectlib/core/task.py +++ b/Splunk_TA_paloalto/bin/splunk_ta_paloalto/aob_py3/cloudconnectlib/core/task.py @@ -1,27 +1,42 @@ -from builtins import object +# +# Copyright 2021 Splunk Inc. +# +# Licensed under the Apache License, Version 2.0 (the "License"); +# you may not use this file except in compliance with the License. +# You may obtain a copy of the License at +# +# http://www.apache.org/licenses/LICENSE-2.0 +# +# Unless required by applicable law or agreed to in writing, software +# distributed under the License is distributed on an "AS IS" BASIS, +# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. +# See the License for the specific language governing permissions and +# limitations under the License. +# import copy import threading from abc import abstractmethod -import six from cloudconnectlib.common.log import get_cc_logger from cloudconnectlib.core import defaults from cloudconnectlib.core.checkpoint import CheckpointManagerAdapter -from cloudconnectlib.core.exceptions import HTTPError -from cloudconnectlib.core.exceptions import StopCCEIteration, CCESplitError +from cloudconnectlib.core.exceptions import ( + CCESplitError, + HTTPError, + QuitJobError, + StopCCEIteration, +) from cloudconnectlib.core.ext import lookup_method -from cloudconnectlib.core.http import get_proxy_info, HttpClient -from cloudconnectlib.core.models import DictToken, _Token, BasicAuthorization, Request +from cloudconnectlib.core.http import HttpClient, get_proxy_info +from cloudconnectlib.core.models import BasicAuthorization, DictToken, Request, _Token logger = get_cc_logger() -_RESPONSE_KEY = '__response__' -_AUTH_TYPES = { - 'basic_auth': BasicAuthorization -} +_RESPONSE_KEY = "__response__" +_AUTH_TYPES = {"basic_auth": BasicAuthorization} -class ProcessHandler(object): +class ProcessHandler: def __init__(self, method, arguments, output): self.method = method self.arguments = [_Token(arg) for arg in arguments or ()] @@ -29,7 +44,7 @@ def __init__(self, method, arguments, output): def execute(self, context): args = [arg.render(context) for arg in self.arguments] - logger.debug('%s arguments found for method %s', len(args), self.method) + logger.debug("%s arguments found for method %s", len(args), self.method) callable_method = lookup_method(self.method) result = callable_method(*args) @@ -40,7 +55,7 @@ def execute(self, context): return data -class Condition(object): +class Condition: def __init__(self, method, arguments): self.method = method self.arguments = [_Token(arg) for arg in arguments or ()] @@ -48,11 +63,11 @@ def __init__(self, method, arguments): def is_meet(self, context): args = [arg.render(context) for arg in self.arguments] callable_method = lookup_method(self.method) - logger.debug('%s arguments found for method %s', len(args), self.method) + logger.debug("%s arguments found for method %s", len(args), self.method) return callable_method(*args) -class ConditionGroup(object): +class ConditionGroup: def __init__(self): self._conditions = [] @@ -60,12 +75,10 @@ def add(self, condition): self._conditions.append(condition) def is_meet(self, context): - return any( - cdn.is_meet(context) for cdn in self._conditions - ) + return any(cdn.is_meet(context) for cdn in self._conditions) -class ProxyTemplate(object): +class ProxyTemplate: def __init__(self, proxy_setting): self._proxy = DictToken(proxy_setting or {}) @@ -74,31 +87,31 @@ def render(self, context): return get_proxy_info(rendered) -class RequestTemplate(object): +class RequestTemplate: def __init__(self, request): if not request: - raise ValueError('The request is none') - url = request.get('url') + raise ValueError("The request is none") + url = request.get("url") if not url: raise ValueError("The request doesn't contain a url or it's empty") self.url = _Token(url) - self.nextpage_url = _Token(request.get('nextpage_url', url)) - self.headers = DictToken(request.get('headers', {})) + self.nextpage_url = _Token(request.get("nextpage_url", url)) + self.headers = DictToken(request.get("headers", {})) # Request body could be string or dict - body = request.get('body') + body = request.get("body") if isinstance(body, dict): self.body = DictToken(body) - elif isinstance(body, six.string_types): + elif isinstance(body, str): self.body = _Token(body) else: if body: - logger.warning('Invalid request body: %s', body) + logger.warning("Invalid request body: %s", body) self.body = None - method = request.get('method', 'GET') - if not method or method.upper() not in ('GET', 'POST'): - raise ValueError('Unsupported value for request method: {}'.format(method)) + method = request.get("method", "GET") + if not method or method.upper() not in ("GET", "POST"): + raise ValueError(f"Unsupported value for request method: {method}") self.method = _Token(method) self.count = 0 @@ -117,11 +130,11 @@ def render(self, context): url=url, method=self.method.render(context), headers=self.headers.render(context), - body=self.body.render(context) if self.body else None + body=self.body.render(context) if self.body else None, ) -class BaseTask(object): +class BaseTask: def __init__(self, name): self._name = name self._pre_process_handler = [] @@ -143,6 +156,16 @@ def add_preprocess_handler(self, method, input, output=None): handler = ProcessHandler(method, input, output) self._pre_process_handler.append(handler) + def add_preprocess_handler_batch(self, handlers): + """ + Add multiple preprocess handlers. All handlers will be maintained and + executed sequentially. + :param handlers: preprocess handler list + :type handlers: tuple + """ + for method, args, output in handlers: + self.add_preprocess_handler(method, args, output) + def add_preprocess_skip_condition(self, method, input): """ Add a preprocess skip condition. The skip_conditions for preprocess @@ -171,6 +194,16 @@ def add_postprocess_handler(self, method, input, output=None): handler = ProcessHandler(method, input, output) self._post_process_handler.append(handler) + def add_postprocess_handler_batch(self, handlers): + """ + Add multiple postprocess handlers. All handlers will be maintained and + executed sequentially. + :param handlers: postprocess handler list + :type handlers: tuple + """ + for method, args, output in handlers: + self.add_postprocess_handler(method, args, output) + def add_postprocess_skip_condition(self, method, input): """ Add a preprocess skip condition. The skip_conditions for postprocess @@ -188,10 +221,10 @@ def add_postprocess_skip_condition(self, method, input): @staticmethod def _execute_handlers(skip_conditions, handlers, context, phase): if skip_conditions.is_meet(context): - logger.debug('%s process skip conditions are met', phase.capitalize()) + logger.debug("%s process skip conditions are met", phase.capitalize()) return if not handlers: - logger.debug('No handler found in %s process', phase) + logger.debug("No handler found in %s process", phase) return for handler in handlers: @@ -199,19 +232,23 @@ def _execute_handlers(skip_conditions, handlers, context, phase): if data: # FIXME context.update(data) - logger.debug('Execute handlers finished successfully.') + if context.get("is_token_refreshed"): + # In case of OAuth flow after refreshing access token retrying again with the query to collect records + logger.info( + "The access token is refreshed hence skipping the rest post process handler tasks. Retrying again." + ) + return + logger.debug("Execute handlers finished successfully.") def _pre_process(self, context): - self._execute_handlers(self._skip_pre_conditions, - self._pre_process_handler, - context, - 'pre') + self._execute_handlers( + self._skip_pre_conditions, self._pre_process_handler, context, "pre" + ) def _post_process(self, context): - self._execute_handlers(self._skip_post_conditions, - self._post_process_handler, - context, - 'post') + self._execute_handlers( + self._skip_post_conditions, self._post_process_handler, context, "post" + ) @abstractmethod def perform(self, context): @@ -231,43 +268,43 @@ class CCESplitTask(BaseTask): OUTPUT_KEY = "__cce_split_result__" def __init__(self, name): - super(CCESplitTask, self).__init__(name) + super().__init__(name) self._process_handler = None self._source = None def configure_split(self, method, source, output, separator=None): arguments = [source, output, separator] self._source = source - self._process_handler = ProcessHandler(method, arguments, - CCESplitTask.OUTPUT_KEY) + self._process_handler = ProcessHandler( + method, arguments, CCESplitTask.OUTPUT_KEY + ) def perform(self, context): - logger.debug('Task=%s start to run', self) + logger.debug("Task=%s start to run", self) try: self._pre_process(context) except StopCCEIteration: - logger.info('Task=%s exits in pre_process stage', self) + logger.info("Task=%s exits in pre_process stage", self) yield context return if not self._process_handler: - logger.info('Task=%s has no split method', self) + logger.info("Task=%s has no split method", self) raise CCESplitError try: invoke_results = self._process_handler.execute(context) - except: + except Exception: logger.exception("Task=%s encountered exception", self) raise CCESplitError - if not invoke_results or not \ - invoke_results.get(CCESplitTask.OUTPUT_KEY): + if not invoke_results or not invoke_results.get(CCESplitTask.OUTPUT_KEY): raise CCESplitError for invoke_result in invoke_results[CCESplitTask.OUTPUT_KEY]: new_context = copy.deepcopy(context) new_context.update(invoke_result) yield new_context - logger.debug('Task=%s finished', self) + logger.debug("Task=%s finished", self) class CCEHTTPRequestTask(BaseTask): @@ -279,8 +316,20 @@ class CCEHTTPRequestTask(BaseTask): from context when executing. """ - def __init__(self, request, name, meta_config=None, task_config=None): - super(CCEHTTPRequestTask, self).__init__(name) + def __init__(self, request, name, meta_config=None, task_config=None, **kwargs): + """ + :param verify: Absolute path to server certificate, otherwise uses + requests' default certificate to verify server's TLS certificate. + Explicitly set it to False to not verify TLS certificate. + :type verify: ``string or bool`` + :param custom_func: Custom error code handling for HTTP codes: + the function should accept `request`, `response` and `logger` parameters + To let the library handle the status code, return a non-list object + To handle status code using custom logic, return (response, bool). + Bool decides whether to break or continue the code flow + :type custom_func: ``function`` + """ + super().__init__(name) self._request = RequestTemplate(request) self._stop_conditions = ConditionGroup() self._proxy_info = None @@ -290,16 +339,20 @@ def __init__(self, request, name, meta_config=None, task_config=None): self._task_config = task_config self._meta_config = meta_config + self._http_client = None self._authorizer = None self._stopped = threading.Event() self._stop_signal_received = False + if kwargs.get("custom_func"): + self.custom_handle_status_code = kwargs["custom_func"] + self.requests_verify = kwargs.get("verify", True) def stop(self, block=False, timeout=30): """ Stop current task. """ if self._stopped.is_set(): - logger.info('Task=%s is not running, cannot stop it.', self) + logger.info("Task=%s is not running, cannot stop it.", self) return self._stop_signal_received = True @@ -307,11 +360,11 @@ def stop(self, block=False, timeout=30): return if not self._stopped.wait(timeout): - logger.info('Waiting for stop task %s timeout', self) + logger.info("Waiting for stop task %s timeout", self) def _check_if_stop_needed(self): if self._stop_signal_received: - logger.info('Stop task signal received, stopping task %s.', self) + logger.info("Stop task signal received, stopping task %s.", self) self._stopped.set() return True return False @@ -344,10 +397,10 @@ def set_auth(self, auth_type, settings): :type settings: ``dict`` """ if not auth_type: - raise ValueError('Invalid auth type={}'.format(auth_type)) + raise ValueError(f"Invalid auth type={auth_type}") authorizer_cls = _AUTH_TYPES.get(auth_type.lower()) if not authorizer_cls: - raise ValueError('Unsupported auth type={}'.format(auth_type)) + raise ValueError(f"Unsupported auth type={auth_type}") self._authorizer = authorizer_cls(settings) def set_iteration_count(self, count): @@ -364,8 +417,10 @@ def set_iteration_count(self, count): except ValueError: self._max_iteration_count = defaults.max_iteration_count logger.warning( - 'Invalid iteration count: %s, using default max iteration count: %s', - count, self._max_iteration_count) + "Invalid iteration count: %s, using default max iteration count: %s", + count, + self._max_iteration_count, + ) def add_stop_condition(self, method, input): """ @@ -389,53 +444,61 @@ def configure_checkpoint(self, name, content): :type content: ``dict`` """ if not name or not name.strip(): - raise ValueError('Invalid checkpoint name: "{}"'.format(name)) + raise ValueError(f'Invalid checkpoint name: "{name}"') if not content: - raise ValueError('Invalid checkpoint content: {}'.format(content)) + raise ValueError(f"Invalid checkpoint content: {content}") self._checkpointer = CheckpointManagerAdapter( namespaces=name, content=content, meta_config=self._meta_config, - task_config=self._task_config + task_config=self._task_config, ) def _should_exit(self, done_count, context): if 0 < self._max_iteration_count <= done_count: - logger.info('Iteration count reached %s', self._max_iteration_count) + logger.info("Iteration count reached %s", self._max_iteration_count) return True if self._stop_conditions.is_meet(context): - logger.info('Stop conditions are met') + logger.info("Stop conditions are met") return True return False - @staticmethod - def _send_request(client, request): + def _send_request(self, request): try: - response = client.send(request) + response = self._http_client.send(request) except HTTPError as error: logger.exception( - 'Error occurred in request url=%s method=%s reason=%s', - request.url, request.method, error.reason + "Error occurred in request url=%s method=%s reason=%s", + request.url, + request.method, + error.reason, ) return None, True status = response.status_code if status in defaults.success_statuses: - if not (response.body or '').strip(): + if not (response.body or "").strip(): logger.info( - 'The response body of request which url=%s and' - ' method=%s is empty, status=%s.', - request.url, request.method, status + "The response body of request which url=%s and" + " method=%s is empty, status=%s.", + request.url, + request.method, + status, ) return None, True return response, False - error_log = ('The response status=%s for request which url=%s and' - ' method=%s.') % ( - status, request.url, request.method - ) + if "custom_handle_status_code" in dir(self): + returned_items = self.custom_handle_status_code(request, response, logger) + if isinstance(returned_items, (list, tuple)): + return returned_items[0], returned_items[1] + + error_log = ( + "The response status=%s for request which url=%s and" + " method=%s and message=%s " + ) % (status, request.url, request.method, response.body) if status in defaults.warning_statuses: logger.warning(error_log) @@ -446,33 +509,38 @@ def _send_request(client, request): def _persist_checkpoint(self, context): if not self._checkpointer: - logger.debug('Checkpoint is not configured. Skip persisting checkpoint.') + logger.debug("Checkpoint is not configured. Skip persisting checkpoint.") return try: self._checkpointer.save(context) except Exception: - logger.exception('Error while persisting checkpoint') + logger.exception("Error while persisting checkpoint") else: - logger.debug('Checkpoint has been updated successfully.') + logger.debug("Checkpoint has been updated successfully.") def _load_checkpoint(self, ctx): if not self._checkpointer: - logger.debug('Checkpoint is not configured. Skip loading checkpoint.') + logger.debug("Checkpoint is not configured. Skip loading checkpoint.") return {} return self._checkpointer.load(ctx=ctx) def _prepare_http_client(self, ctx): proxy = self._proxy_info.render(ctx) if self._proxy_info else None - return HttpClient(proxy) + self._http_client = HttpClient(proxy, self.requests_verify) + + def _flush_checkpoint(self): + if self._checkpointer: + # Flush checkpoint cache to disk + self._checkpointer.close() def perform(self, context): - logger.info('Starting to perform task=%s', self) + logger.info("Starting to perform task=%s", self) - client = self._prepare_http_client(context) + self._prepare_http_client(context) done_count = 0 context.update(self._load_checkpoint(context)) - update_source = False if context.get('source') else True + update_source = False if context.get("source") else True self._request.reset() while True: @@ -481,6 +549,9 @@ def perform(self, context): except StopCCEIteration: logger.info("Task=%s exits in pre_process stage", self) break + except QuitJobError: + self._flush_checkpoint() + raise if self._check_if_stop_needed(): break @@ -489,23 +560,28 @@ def perform(self, context): if self._authorizer: self._authorizer(r.headers, context) - response, need_exit = self._send_request(client, r) + response, need_exit = self._send_request(r) context[_RESPONSE_KEY] = response if need_exit: - logger.info('Task=%s need been terminated due to request response', self) + logger.info( + "Task=%s need been terminated due to request response", self + ) break if self._check_if_stop_needed(): break if update_source: - context['source'] = r.url.split('?')[0] + context["source"] = r.url.split("?")[0] try: self._post_process(context) except StopCCEIteration: logger.info("Task=%s exits in post_process stage", self) break + except QuitJobError: + self._flush_checkpoint() + raise self._persist_checkpoint(context) @@ -515,12 +591,10 @@ def perform(self, context): done_count += 1 if self._should_exit(done_count, context): break - if update_source and context.get('source'): - del context['source'] + if update_source and context.get("source"): + del context["source"] yield context self._stopped.set() - if self._checkpointer: - # Flush checkpoint cache to disk - self._checkpointer.close() - logger.info('Perform task=%s finished', self) + self._flush_checkpoint() + logger.info("Perform task=%s finished", self) diff --git a/Splunk_TA_paloalto/bin/splunk_ta_paloalto/aob_py3/cloudconnectlib/core/template.py b/Splunk_TA_paloalto/bin/splunk_ta_paloalto/aob_py3/cloudconnectlib/core/template.py index 8cef6e90..fe8c1d4c 100755 --- a/Splunk_TA_paloalto/bin/splunk_ta_paloalto/aob_py3/cloudconnectlib/core/template.py +++ b/Splunk_TA_paloalto/bin/splunk_ta_paloalto/aob_py3/cloudconnectlib/core/template.py @@ -1,6 +1,22 @@ -from jinja2 import Template +# +# Copyright 2021 Splunk Inc. +# +# Licensed under the Apache License, Version 2.0 (the "License"); +# you may not use this file except in compliance with the License. +# You may obtain a copy of the License at +# +# http://www.apache.org/licenses/LICENSE-2.0 +# +# Unless required by applicable law or agreed to in writing, software +# distributed under the License is distributed on an "AS IS" BASIS, +# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. +# See the License for the specific language governing permissions and +# limitations under the License. +# import re +from jinja2 import Template + # This pattern matches the template with only one token inside like "{{ # token1}}", "{{ token2 }" PATTERN = re.compile(r"^\{\{\s*(\w+)\s*\}\}$") @@ -14,7 +30,7 @@ def translate_internal(context): match = re.match(PATTERN, _origin_template) if match: context_var = context.get(match.groups()[0]) - return context_var if context_var else '' + return context_var if context_var else "" return _template.render(context) return translate_internal diff --git a/Splunk_TA_paloalto/bin/splunk_ta_paloalto/aob_py3/cloudconnectlib/splunktacollectorlib/__init__.py b/Splunk_TA_paloalto/bin/splunk_ta_paloalto/aob_py3/cloudconnectlib/splunktacollectorlib/__init__.py index e69de29b..72d45097 100755 --- a/Splunk_TA_paloalto/bin/splunk_ta_paloalto/aob_py3/cloudconnectlib/splunktacollectorlib/__init__.py +++ b/Splunk_TA_paloalto/bin/splunk_ta_paloalto/aob_py3/cloudconnectlib/splunktacollectorlib/__init__.py @@ -0,0 +1,15 @@ +# +# Copyright 2021 Splunk Inc. +# +# Licensed under the Apache License, Version 2.0 (the "License"); +# you may not use this file except in compliance with the License. +# You may obtain a copy of the License at +# +# http://www.apache.org/licenses/LICENSE-2.0 +# +# Unless required by applicable law or agreed to in writing, software +# distributed under the License is distributed on an "AS IS" BASIS, +# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. +# See the License for the specific language governing permissions and +# limitations under the License. +# diff --git a/Splunk_TA_paloalto/bin/splunk_ta_paloalto/aob_py3/cloudconnectlib/splunktacollectorlib/cloud_connect_mod_input.py b/Splunk_TA_paloalto/bin/splunk_ta_paloalto/aob_py3/cloudconnectlib/splunktacollectorlib/cloud_connect_mod_input.py index 380288e3..3af567a3 100755 --- a/Splunk_TA_paloalto/bin/splunk_ta_paloalto/aob_py3/cloudconnectlib/splunktacollectorlib/cloud_connect_mod_input.py +++ b/Splunk_TA_paloalto/bin/splunk_ta_paloalto/aob_py3/cloudconnectlib/splunktacollectorlib/cloud_connect_mod_input.py @@ -1,18 +1,29 @@ -from future import standard_library -standard_library.install_aliases() +# +# Copyright 2021 Splunk Inc. +# +# Licensed under the Apache License, Version 2.0 (the "License"); +# you may not use this file except in compliance with the License. +# You may obtain a copy of the License at +# +# http://www.apache.org/licenses/LICENSE-2.0 +# +# Unless required by applicable law or agreed to in writing, software +# distributed under the License is distributed on an "AS IS" BASIS, +# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. +# See the License for the specific language governing permissions and +# limitations under the License. +# import configparser import os.path as op +from ..common.lib_util import get_app_root_dir, get_main_file, get_mod_input_script_name from .data_collection import ta_mod_input as ta_input from .ta_cloud_connect_client import TACloudConnectClient as CollectorCls -from ..common.lib_util import ( - get_main_file, get_app_root_dir, get_mod_input_script_name -) def _load_options_from_inputs_spec(app_root, stanza_name): - input_spec_file = 'inputs.conf.spec' - file_path = op.join(app_root, 'README', input_spec_file) + input_spec_file = "inputs.conf.spec" + file_path = op.join(app_root, "README", input_spec_file) if not op.isfile(file_path): raise RuntimeError("README/%s doesn't exist" % input_spec_file) @@ -20,7 +31,7 @@ def _load_options_from_inputs_spec(app_root, stanza_name): parser = configparser.RawConfigParser(allow_no_value=True) parser.read(file_path) options = list(parser.defaults().keys()) - stanza_prefix = '%s://' % stanza_name + stanza_prefix = "%s://" % stanza_name stanza_exist = False for section in parser.sections(): @@ -34,21 +45,24 @@ def _load_options_from_inputs_spec(app_root, stanza_name): def _find_ucc_global_config_json(app_root, ucc_config_filename): """Find UCC config file from all possible directories""" - candidates = ['local', 'default', 'bin', - op.join('appserver', 'static', 'js', 'build')] + candidates = [ + "local", + "default", + "bin", + op.join("appserver", "static", "js", "build"), + ] for candidate in candidates: file_path = op.join(app_root, candidate, ucc_config_filename) if op.isfile(file_path): return file_path raise RuntimeError( - 'Unable to load %s from [%s]' - % (ucc_config_filename, ','.join(candidates)) + "Unable to load {} from [{}]".format(ucc_config_filename, ",".join(candidates)) ) def _get_cloud_connect_config_json(script_name): - config_file_name = '.'.join([script_name, 'cc.json']) + config_file_name = ".".join([script_name, "cc.json"]) return op.join(op.dirname(get_main_file()), config_file_name) @@ -58,9 +72,7 @@ def run(single_instance=False): cce_config_file = _get_cloud_connect_config_json(script_name) app_root = get_app_root_dir() - ucc_config_path = _find_ucc_global_config_json( - app_root, 'globalConfig.json' - ) + ucc_config_path = _find_ucc_global_config_json(app_root, "globalConfig.json") schema_params = _load_options_from_inputs_spec(app_root, script_name) ta_input.main( @@ -69,5 +81,5 @@ def run(single_instance=False): log_suffix=script_name, cc_json_file=cce_config_file, schema_para_list=schema_params, - single_instance=single_instance + single_instance=single_instance, ) diff --git a/Splunk_TA_paloalto/bin/splunk_ta_paloalto/aob_py3/cloudconnectlib/splunktacollectorlib/common/__init__.py b/Splunk_TA_paloalto/bin/splunk_ta_paloalto/aob_py3/cloudconnectlib/splunktacollectorlib/common/__init__.py index 4230727e..4c58c45a 100755 --- a/Splunk_TA_paloalto/bin/splunk_ta_paloalto/aob_py3/cloudconnectlib/splunktacollectorlib/common/__init__.py +++ b/Splunk_TA_paloalto/bin/splunk_ta_paloalto/aob_py3/cloudconnectlib/splunktacollectorlib/common/__init__.py @@ -1,5 +1,20 @@ -import json +# +# Copyright 2021 Splunk Inc. +# +# Licensed under the Apache License, Version 2.0 (the "License"); +# you may not use this file except in compliance with the License. +# You may obtain a copy of the License at +# +# http://www.apache.org/licenses/LICENSE-2.0 +# +# Unless required by applicable law or agreed to in writing, software +# distributed under the License is distributed on an "AS IS" BASIS, +# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. +# See the License for the specific language governing permissions and +# limitations under the License. +# import hashlib +import json def load_schema_file(schema_file): @@ -49,4 +64,3 @@ class UCCException(Exception): """ pass - diff --git a/Splunk_TA_paloalto/bin/splunk_ta_paloalto/aob_py3/cloudconnectlib/splunktacollectorlib/common/log.py b/Splunk_TA_paloalto/bin/splunk_ta_paloalto/aob_py3/cloudconnectlib/splunktacollectorlib/common/log.py index 4d1ecf03..826e5f29 100755 --- a/Splunk_TA_paloalto/bin/splunk_ta_paloalto/aob_py3/cloudconnectlib/splunktacollectorlib/common/log.py +++ b/Splunk_TA_paloalto/bin/splunk_ta_paloalto/aob_py3/cloudconnectlib/splunktacollectorlib/common/log.py @@ -1,6 +1,21 @@ +# +# Copyright 2021 Splunk Inc. +# +# Licensed under the Apache License, Version 2.0 (the "License"); +# you may not use this file except in compliance with the License. +# You may obtain a copy of the License at +# +# http://www.apache.org/licenses/LICENSE-2.0 +# +# Unless required by applicable law or agreed to in writing, software +# distributed under the License is distributed on an "AS IS" BASIS, +# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. +# See the License for the specific language governing permissions and +# limitations under the License. +# import logging -from ...splunktalib.common import log as stclog -import six + +from solnlib import log def set_log_level(log_level): @@ -8,33 +23,39 @@ def set_log_level(log_level): Set log level. """ - if isinstance(log_level, six.string_types): + if isinstance(log_level, str): if log_level.upper() == "DEBUG": - stclog.Logs().set_level(logging.DEBUG) + log.Logs().set_level(logging.DEBUG) elif log_level.upper() == "INFO": - stclog.Logs().set_level(logging.INFO) + log.Logs().set_level(logging.INFO) elif log_level.upper() == "WARN": - stclog.Logs().set_level(logging.WARN) + log.Logs().set_level(logging.WARN) elif log_level.upper() == "ERROR": - stclog.Logs().set_level(logging.ERROR) + log.Logs().set_level(logging.ERROR) elif log_level.upper() == "WARNING": - stclog.Logs().set_level(logging.WARNING) + log.Logs().set_level(logging.WARNING) elif log_level.upper() == "CRITICAL": - stclog.Logs().set_level(logging.CRITICAL) + log.Logs().set_level(logging.CRITICAL) else: - stclog.Logs().set_level(logging.INFO) + log.Logs().set_level(logging.INFO) elif isinstance(log_level, int): - if log_level in [logging.DEBUG, logging.INFO, logging.ERROR, - logging.WARN, logging.WARNING, logging.CRITICAL]: - stclog.Logs().set_level(log_level) + if log_level in [ + logging.DEBUG, + logging.INFO, + logging.ERROR, + logging.WARN, + logging.WARNING, + logging.CRITICAL, + ]: + log.Logs().set_level(log_level) else: - stclog.Logs().set_level(logging.INFO) + log.Logs().set_level(logging.INFO) else: - stclog.Logs().set_level(logging.INFO) + log.Logs().set_level(logging.INFO) # Global logger -logger = stclog.Logs().get_logger("cloud_connect_engine") +logger = log.Logs().get_logger("cloud_connect_engine") def reset_logger(name): @@ -42,9 +63,5 @@ def reset_logger(name): Reset logger. """ - stclog.reset_logger(name) - global logger - logger = stclog.Logs().get_logger(name) - - + logger = log.Logs().get_logger(name) diff --git a/Splunk_TA_paloalto/bin/splunk_ta_paloalto/aob_py3/cloudconnectlib/splunktacollectorlib/common/rwlock.py b/Splunk_TA_paloalto/bin/splunk_ta_paloalto/aob_py3/cloudconnectlib/splunktacollectorlib/common/rwlock.py index 74b3063b..e91ba85d 100755 --- a/Splunk_TA_paloalto/bin/splunk_ta_paloalto/aob_py3/cloudconnectlib/splunktacollectorlib/common/rwlock.py +++ b/Splunk_TA_paloalto/bin/splunk_ta_paloalto/aob_py3/cloudconnectlib/splunktacollectorlib/common/rwlock.py @@ -1,12 +1,26 @@ +# +# Copyright 2021 Splunk Inc. +# +# Licensed under the Apache License, Version 2.0 (the "License"); +# you may not use this file except in compliance with the License. +# You may obtain a copy of the License at +# +# http://www.apache.org/licenses/LICENSE-2.0 +# +# Unless required by applicable law or agreed to in writing, software +# distributed under the License is distributed on an "AS IS" BASIS, +# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. +# See the License for the specific language governing permissions and +# limitations under the License. +# """ This module provides Read-Write lock. """ -from builtins import object import threading -class _ReadLocker(object): +class _ReadLocker: def __init__(self, lock): self.lock = lock @@ -18,7 +32,7 @@ def __exit__(self, exc_type, exc_val, exc_tb): return False -class _WriteLocker(object): +class _WriteLocker: def __init__(self, lock): self.lock = lock @@ -30,8 +44,8 @@ def __exit__(self, exc_type, exc_val, exc_tb): return False -class RWLock(object): - """ Simple Read-Write lock. +class RWLock: + """Simple Read-Write lock. Allow multiple read but only one writing concurrently. """ @@ -69,4 +83,3 @@ def reader_lock(self): @property def writer_lock(self): return _WriteLocker(self) - diff --git a/Splunk_TA_paloalto/bin/splunk_ta_paloalto/aob_py3/cloudconnectlib/splunktacollectorlib/common/schema_meta.py b/Splunk_TA_paloalto/bin/splunk_ta_paloalto/aob_py3/cloudconnectlib/splunktacollectorlib/common/schema_meta.py index ec6ce889..1a7f1fa1 100755 --- a/Splunk_TA_paloalto/bin/splunk_ta_paloalto/aob_py3/cloudconnectlib/splunktacollectorlib/common/schema_meta.py +++ b/Splunk_TA_paloalto/bin/splunk_ta_paloalto/aob_py3/cloudconnectlib/splunktacollectorlib/common/schema_meta.py @@ -1,7 +1,21 @@ - -FIELD_PRODUCT = '_product' -FIELD_REST_NAMESPACE = '_rest_namespace' -FIELD_REST_PREFIX = '_rest_prefix' -FIELD_PROTOCOL_VERSION = '_protocol_version' -FIELD_VERSION = '_version' -FIELD_ENCRYPTION_FORMATTER = '_encryption_formatter' +# +# Copyright 2021 Splunk Inc. +# +# Licensed under the Apache License, Version 2.0 (the "License"); +# you may not use this file except in compliance with the License. +# You may obtain a copy of the License at +# +# http://www.apache.org/licenses/LICENSE-2.0 +# +# Unless required by applicable law or agreed to in writing, software +# distributed under the License is distributed on an "AS IS" BASIS, +# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. +# See the License for the specific language governing permissions and +# limitations under the License. +# +FIELD_PRODUCT = "_product" +FIELD_REST_NAMESPACE = "_rest_namespace" +FIELD_REST_PREFIX = "_rest_prefix" +FIELD_PROTOCOL_VERSION = "_protocol_version" +FIELD_VERSION = "_version" +FIELD_ENCRYPTION_FORMATTER = "_encryption_formatter" diff --git a/Splunk_TA_paloalto/bin/splunk_ta_paloalto/aob_py3/cloudconnectlib/splunktacollectorlib/config.py b/Splunk_TA_paloalto/bin/splunk_ta_paloalto/aob_py3/cloudconnectlib/splunktacollectorlib/config.py index 590ed1e5..a88c75c5 100755 --- a/Splunk_TA_paloalto/bin/splunk_ta_paloalto/aob_py3/cloudconnectlib/splunktacollectorlib/config.py +++ b/Splunk_TA_paloalto/bin/splunk_ta_paloalto/aob_py3/cloudconnectlib/splunktacollectorlib/config.py @@ -1,27 +1,33 @@ +# +# Copyright 2021 Splunk Inc. +# +# Licensed under the Apache License, Version 2.0 (the "License"); +# you may not use this file except in compliance with the License. +# You may obtain a copy of the License at +# +# http://www.apache.org/licenses/LICENSE-2.0 +# +# Unless required by applicable law or agreed to in writing, software +# distributed under the License is distributed on an "AS IS" BASIS, +# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. +# See the License for the specific language governing permissions and +# limitations under the License. +# """UCC Config Module This is for load/save configuration in UCC server or TA. The load/save action is based on specified schema. """ - -from __future__ import absolute_import - -from future import standard_library -standard_library.install_aliases() -from builtins import str -from builtins import range -from builtins import object import json import logging -import traceback import time -import six +import traceback +from urllib.parse import quote -from ..splunktalib.rest import splunkd_request, code_to_msg -from ..splunktalib.common import util as sc_util +from solnlib import utils +from splunktalib.rest import code_to_msg, splunkd_request -from .common import log as stulog from .common import UCCException -from urllib.parse import quote +from .common import log as stulog LOGGING_STOPPED = False @@ -36,7 +42,7 @@ def stop_logging(): LOGGING_STOPPED = True -def log(msg, msgx='', level=logging.INFO, need_tb=False): +def log(msg, msgx="", level=logging.INFO, need_tb=False): """ Logging in UCC Config Module. :param msg: message content @@ -49,42 +55,45 @@ def log(msg, msgx='', level=logging.INFO, need_tb=False): if LOGGING_STOPPED: return - msgx = ' - ' + msgx if msgx else '' - content = 'UCC Config Module: %s%s' % (msg, msgx) + msgx = " - " + msgx if msgx else "" + content = f"UCC Config Module: {msg}{msgx}" if need_tb: - stack = ''.join(traceback.format_stack()) - content = '%s\r\n%s' % (content, stack) + stack = "".join(traceback.format_stack()) + content = f"{content}\r\n{stack}" stulog.logger.log(level, content, exc_info=1) class ConfigException(UCCException): - """Exception for UCC Config Exception - """ + """Exception for UCC Config Exception""" + pass -class Config(object): - """UCC Config Module - """ +class Config: + """UCC Config Module""" # Placeholder stands for any field - FIELD_PLACEHOLDER = '*' + FIELD_PLACEHOLDER = "*" # Head of non-processing endpoint - NON_PROC_ENDPOINT = '#' + NON_PROC_ENDPOINT = "#" # Some meta fields in UCC Config schema - META_FIELDS = ('_product', '_rest_namespace', '_rest_prefix', - '_protocol_version', '_version', - '_encryption_formatter') + META_FIELDS = ( + "_product", + "_rest_namespace", + "_rest_prefix", + "_protocol_version", + "_version", + "_encryption_formatter", + ) # Default Values for Meta fields META_FIELDS_DEFAULT = { - '_encryption_formatter': '', + "_encryption_formatter": "", } - def __init__(self, splunkd_uri, session_key, schema, - user='nobody', app='-'): + def __init__(self, splunkd_uri, session_key, schema, user="nobody", app="-"): """ :param splunkd_uri: the root uri of Splunk server, like https://127.0.0.1:8089 @@ -94,7 +103,7 @@ def __init__(self, splunkd_uri, session_key, schema, :param app: namespace of the resources requested :return: """ - self.splunkd_uri = splunkd_uri.strip('/') + self.splunkd_uri = splunkd_uri.strip("/") self.session_key = session_key self.user, self.app = user, app self._parse_schema(schema) @@ -107,27 +116,27 @@ def load(self): """ log('"load" method in', level=logging.DEBUG) - ret = {meta_field: getattr(self, meta_field) - for meta_field in Config.META_FIELDS} + ret = { + meta_field: getattr(self, meta_field) for meta_field in Config.META_FIELDS + } for ep_id, ep in self._endpoints.items(): - data = {'output_mode': 'json', '--cred--': '1'} + data = {"output_mode": "json", "--cred--": "1"} retries = 4 waiting_time = [1, 2, 2] for retry in range(retries): - resp, cont = splunkd_request( + response = splunkd_request( splunkd_uri=self.make_uri(ep_id), session_key=self.session_key, data=data, - retry=3 + retry=3, ) - - if resp is None or resp.status != 200: - msg = 'Fail to load endpoint "{ep_id}" - {err}' \ - ''.format(ep_id=ep_id, - err=code_to_msg(resp, cont) - if resp else cont) + cont = response.text + if response is None or response.status_code != 200: + msg = 'Fail to load endpoint "{ep_id}" - {err}' "".format( + ep_id=ep_id, err=code_to_msg(response) + ) log(msg, level=logging.ERROR, need_tb=True) raise ConfigException(msg) @@ -135,7 +144,7 @@ def load(self): ret[ep_id] = self._parse_content(ep_id, cont) except ConfigException as exc: log(exc, level=logging.WARNING, need_tb=True) - if retry < retries-1: + if retry < retries - 1: time.sleep(waiting_time[retry]) else: break @@ -146,8 +155,9 @@ def load(self): log('"load" method out', level=logging.DEBUG) return ret - def update_items(self, endpoint_id, item_names, field_names, data, - raise_if_failed=False): + def update_items( + self, endpoint_id, item_names, field_names, data, raise_if_failed=False + ): """Update items in specified endpoint with given fields in data :param endpoint_id: endpoint id in schema, the key name in schema :param item_names: a list of item name @@ -168,40 +178,49 @@ def update_items(self, endpoint_id, item_names, field_names, data, If raise_if_failed is True, it will exist with an exception on any updating failed. """ - log('"update_items" method in', - msgx='endpoint_id=%s, item_names=%s, field_names=%s' - % (endpoint_id, item_names, field_names), - level=logging.DEBUG) - - assert endpoint_id in self._endpoints, \ - 'Unexpected endpoint id in given schema - {ep_id}' \ - ''.format(ep_id=endpoint_id) + log( + '"update_items" method in', + msgx="endpoint_id=%s, item_names=%s, field_names=%s" + % (endpoint_id, item_names, field_names), + level=logging.DEBUG, + ) + + assert ( + endpoint_id in self._endpoints + ), "Unexpected endpoint id in given schema - {ep_id}" "".format( + ep_id=endpoint_id + ) item_names_failed = [] for item_name in item_names: item_data = data.get(item_name, {}) - item_data = {field_name: self.dump_value(endpoint_id, - item_name, - field_name, - item_data[field_name]) - for field_name in field_names - if field_name in item_data} + item_data = { + field_name: self.dump_value( + endpoint_id, item_name, field_name, item_data[field_name] + ) + for field_name in field_names + if field_name in item_data + } if not item_data: continue item_uri = self.make_uri(endpoint_id, item_name=item_name) - resp, cont = splunkd_request(splunkd_uri=item_uri, - session_key=self.session_key, - data=item_data, - method="POST", - retry=3 - ) - if resp is None or resp.status not in (200, 201): - msg = 'Fail to update item "{item}" in endpoint "{ep_id}"' \ - ' - {err}'.format(ep_id=endpoint_id, - item=item_name, - err=code_to_msg(resp, cont) - if resp else cont) + response = splunkd_request( + splunkd_uri=item_uri, + session_key=self.session_key, + data=item_data, + method="POST", + retry=3, + ) + if response is None or response.status_code not in (200, 201): + msg = ( + 'Fail to update item "{item}" in endpoint "{ep_id}"' + " - {err}".format( + ep_id=endpoint_id, + item=item_name, + err=code_to_msg(response), + ) + ) log(msg, level=logging.ERROR) if raise_if_failed: raise ConfigException(msg) @@ -216,28 +235,39 @@ def make_uri(self, endpoint_id, item_name=None): :param item_name: item name for given endpoint. None for listing all :return: """ - endpoint = self._endpoints[endpoint_id]['endpoint'] - ep_full = endpoint[1:].strip('/') \ - if endpoint.startswith(Config.NON_PROC_ENDPOINT) else \ - '{admin_match}/{protocol_version}/{endpoint}' \ - ''.format(admin_match=self._rest_namespace, - protocol_version=self._protocol_version, - endpoint=(self._rest_prefix + - self._endpoints[endpoint_id]['endpoint'])) - ep_uri = None if endpoint_id not in self._endpoints else \ - '{splunkd_uri}/servicesNS/{user}/{app}/{endpoint_full}' \ - ''.format(splunkd_uri=self.splunkd_uri, - user=self.user, - app=self.app, - endpoint_full=ep_full - ) - - url = ep_uri if item_name is None else "{ep_uri}/{item_name}"\ - .format(ep_uri=ep_uri, item_name=quote(item_name)) + endpoint = self._endpoints[endpoint_id]["endpoint"] + ep_full = ( + endpoint[1:].strip("/") + if endpoint.startswith(Config.NON_PROC_ENDPOINT) + else "{admin_match}/{protocol_version}/{endpoint}" + "".format( + admin_match=self._rest_namespace, + protocol_version=self._protocol_version, + endpoint=(self._rest_prefix + self._endpoints[endpoint_id]["endpoint"]), + ) + ) + ep_uri = ( + None + if endpoint_id not in self._endpoints + else "{splunkd_uri}/servicesNS/{user}/{app}/{endpoint_full}" + "".format( + splunkd_uri=self.splunkd_uri, + user=self.user, + app=self.app, + endpoint_full=ep_full, + ) + ) + + url = ( + ep_uri + if item_name is None + else "{ep_uri}/{item_name}".format( + ep_uri=ep_uri, item_name=quote(item_name) + ) + ) if item_name is None: - url += '?count=-1' - log('"make_uri" method', msgx='url=%s' % url, - level=logging.DEBUG) + url += "?count=-1" + log('"make_uri" method', msgx="url=%s" % url, level=logging.DEBUG) return url def _parse_content(self, endpoint_id, content): @@ -245,49 +275,59 @@ def _parse_content(self, endpoint_id, content): :param content: a JSON string returned from REST. """ try: - content = json.loads(content)['entry'] - ret = {ent['name']: ent['content'] for ent in content} + content = json.loads(content)["entry"] + ret = {ent["name"]: ent["content"] for ent in content} except Exception as exc: - msg = 'Fail to parse content from endpoint_id=%s' \ - ' - %s' % (endpoint_id, exc) + msg = "Fail to parse content from endpoint_id=%s" " - %s" % ( + endpoint_id, + exc, + ) log(msg, level=logging.ERROR, need_tb=True) raise ConfigException(msg) - ret = {name: {key: self.load_value(endpoint_id, name, key, val) - for key, val in ent.items() - if not key.startswith('eai:')} - for name, ent in ret.items()} + ret = { + name: { + key: self.load_value(endpoint_id, name, key, val) + for key, val in ent.items() + if not key.startswith("eai:") + } + for name, ent in ret.items() + } return ret def _parse_schema(self, ucc_config_schema): try: ucc_config_schema = json.loads(ucc_config_schema) except ValueError: - msg = 'Invalid JSON content of schema' + msg = "Invalid JSON content of schema" log(msg, level=logging.ERROR, need_tb=True) raise ConfigException(msg) except Exception as exc: log(exc, level=logging.ERROR, need_tb=True) raise ConfigException(exc) - ucc_config_schema.update({key: val for key, val in - Config.META_FIELDS_DEFAULT.items() - if key not in ucc_config_schema}) + ucc_config_schema.update( + { + key: val + for key, val in Config.META_FIELDS_DEFAULT.items() + if key not in ucc_config_schema + } + ) for field in Config.META_FIELDS: - assert field in ucc_config_schema and \ - isinstance(ucc_config_schema[field], six.string_types), \ - 'Missing or invalid field "%s" in given schema' % field + assert field in ucc_config_schema and isinstance( + ucc_config_schema[field], str + ), ('Missing or invalid field "%s" in given schema' % field) setattr(self, field, ucc_config_schema[field]) self._endpoints = {} for key, val in ucc_config_schema.items(): - if key.startswith('_'): + if key.startswith("_"): continue - assert isinstance(val, dict), \ + assert isinstance(val, dict), ( 'The schema of endpoint "%s" should be dict' % key - assert 'endpoint' in val, \ - 'The endpoint "%s" has no endpoint entry' % key + ) + assert "endpoint" in val, 'The endpoint "%s" has no endpoint entry' % key self._endpoints[key] = val @@ -298,58 +338,68 @@ def _check_protocol_version(self): """ if not self._protocol_version: return - if not self._protocol_version.startswith('1.'): - raise ConfigException('Unsupported protocol version "%s" ' - 'in given schema' % self._protocol_version) + if not self._protocol_version.startswith("1."): + raise ConfigException( + 'Unsupported protocol version "%s" ' + "in given schema" % self._protocol_version + ) def load_value(self, endpoint_id, item_name, fname, fval): field_type = self._get_field_type(endpoint_id, item_name, fname) - if field_type == '': + if field_type == "": return fval try: field_type = field_type.lower() - if field_type == 'bool': - return True if sc_util.is_true(fval) else False - elif field_type == 'int': + if field_type == "bool": + return True if utils.is_true(fval) else False + elif field_type == "int": return int(fval) - elif field_type == 'json': + elif field_type == "json": return json.loads(fval) except Exception as exc: - msg = 'Fail to load value of "{type_name}" - ' \ - 'endpoint={endpoint}, item={item}, field={field}' \ - ''.format(type_name=field_type, - endpoint=endpoint_id, - item=item_name, - field=fname) + msg = ( + 'Fail to load value of "{type_name}" - ' + "endpoint={endpoint}, item={item}, field={field}" + "".format( + type_name=field_type, + endpoint=endpoint_id, + item=item_name, + field=fname, + ) + ) log(msg, msgx=str(exc), level=logging.WARNING, need_tb=True) raise ConfigException(msg) def dump_value(self, endpoint_id, item_name, fname, fval): field_type = self._get_field_type(endpoint_id, item_name, fname) - if field_type == '': + if field_type == "": return fval try: field_type = field_type.lower() - if field_type == 'bool': + if field_type == "bool": return str(fval).lower() - elif field_type == 'json': + elif field_type == "json": return json.dumps(fval) else: return fval except Exception as exc: - msg = 'Fail to dump value of "{type_name}" - ' \ - 'endpoint={endpoint}, item={item}, field={field}' \ - ''.format(type_name=field_type, - endpoint=endpoint_id, - item=item_name, - field=fname) + msg = ( + 'Fail to dump value of "{type_name}" - ' + "endpoint={endpoint}, item={item}, field={field}" + "".format( + type_name=field_type, + endpoint=endpoint_id, + item=item_name, + field=fname, + ) + ) log(msg, msgx=str(exc), level=logging.ERROR, need_tb=True) raise ConfigException(msg) def _get_field_type(self, endpoint_id, item_name, fname): - field_types = self._endpoints[endpoint_id].get('field_types', {}) + field_types = self._endpoints[endpoint_id].get("field_types", {}) if item_name in field_types: fields = field_types[item_name] elif Config.FIELD_PLACEHOLDER in field_types: @@ -357,14 +407,18 @@ def _get_field_type(self, endpoint_id, item_name, fname): else: fields = {} - field_type = fields.get(fname, '') - if field_type not in ('', 'bool', 'int', 'json'): - msg = 'Unsupported type "{type_name}" for value in schema - ' \ - 'endpoint={endpoint}, item={item}, field={field}' \ - ''.format(type_name=field_type, - endpoint=endpoint_id, - item=item_name, - field=fname) + field_type = fields.get(fname, "") + if field_type not in ("", "bool", "int", "json"): + msg = ( + 'Unsupported type "{type_name}" for value in schema - ' + "endpoint={endpoint}, item={item}, field={field}" + "".format( + type_name=field_type, + endpoint=endpoint_id, + item=item_name, + field=fname, + ) + ) log(msg, level=logging.ERROR, need_tb=True) raise ConfigException(msg) return field_type diff --git a/Splunk_TA_paloalto/bin/splunk_ta_paloalto/aob_py3/cloudconnectlib/splunktacollectorlib/data_collection/__init__.py b/Splunk_TA_paloalto/bin/splunk_ta_paloalto/aob_py3/cloudconnectlib/splunktacollectorlib/data_collection/__init__.py index bb35ee15..e1dc0ca1 100755 --- a/Splunk_TA_paloalto/bin/splunk_ta_paloalto/aob_py3/cloudconnectlib/splunktacollectorlib/data_collection/__init__.py +++ b/Splunk_TA_paloalto/bin/splunk_ta_paloalto/aob_py3/cloudconnectlib/splunktacollectorlib/data_collection/__init__.py @@ -1 +1,16 @@ -__version__ = "1.0.2" \ No newline at end of file +# +# Copyright 2021 Splunk Inc. +# +# Licensed under the Apache License, Version 2.0 (the "License"); +# you may not use this file except in compliance with the License. +# You may obtain a copy of the License at +# +# http://www.apache.org/licenses/LICENSE-2.0 +# +# Unless required by applicable law or agreed to in writing, software +# distributed under the License is distributed on an "AS IS" BASIS, +# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. +# See the License for the specific language governing permissions and +# limitations under the License. +# +__version__ = "1.0.2" diff --git a/Splunk_TA_paloalto/bin/splunk_ta_paloalto/aob_py3/cloudconnectlib/splunktacollectorlib/data_collection/ta_checkpoint_manager.py b/Splunk_TA_paloalto/bin/splunk_ta_paloalto/aob_py3/cloudconnectlib/splunktacollectorlib/data_collection/ta_checkpoint_manager.py index 74bd80e6..4e41db97 100755 --- a/Splunk_TA_paloalto/bin/splunk_ta_paloalto/aob_py3/cloudconnectlib/splunktacollectorlib/data_collection/ta_checkpoint_manager.py +++ b/Splunk_TA_paloalto/bin/splunk_ta_paloalto/aob_py3/cloudconnectlib/splunktacollectorlib/data_collection/ta_checkpoint_manager.py @@ -1,15 +1,30 @@ -from builtins import object +# +# Copyright 2021 Splunk Inc. +# +# Licensed under the Apache License, Version 2.0 (the "License"); +# you may not use this file except in compliance with the License. +# You may obtain a copy of the License at +# +# http://www.apache.org/licenses/LICENSE-2.0 +# +# Unless required by applicable law or agreed to in writing, software +# distributed under the License is distributed on an "AS IS" BASIS, +# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. +# See the License for the specific language governing permissions and +# limitations under the License. +# import json import re +from solnlib.utils import is_true +from splunktalib import state_store as ss + +from ..common import log as stulog from . import ta_consts as c from . import ta_helper as th -from ..common import log as stulog -from ...splunktalib import state_store as ss -from ...splunktalib.common.util import is_true -class TACheckPointMgr(object): +class TACheckPointMgr: SEPARATOR = "_" * 3 # FIXME We'd better move all default values together @@ -21,52 +36,48 @@ def __init__(self, meta_config, task_config): self._store = self._create_state_store( meta_config, task_config.get(c.checkpoint_storage_type), - task_config[c.appname] + task_config[c.appname], ) def _create_state_store(self, meta_config, storage_type, app_name): - stulog.logger.debug('Got checkpoint storage type=%s', storage_type) + stulog.logger.debug("Got checkpoint storage type=%s", storage_type) if storage_type == c.checkpoint_kv_storage: collection_name = self._get_collection_name() stulog.logger.debug( - 'Creating KV state store, collection name=%s', collection_name + "Creating KV state store, collection name=%s", collection_name ) return ss.get_state_store( meta_config, appname=app_name, collection_name=collection_name, - use_kv_store=True + use_kv_store=True, ) use_cache_file = self._use_cache_file() - max_cache_seconds = \ - self._get_max_cache_seconds() if use_cache_file else None + max_cache_seconds = self._get_max_cache_seconds() if use_cache_file else None stulog.logger.debug( - 'Creating file state store, use_cache_file=%s, max_cache_seconds=%s', - use_cache_file, max_cache_seconds + "Creating file state store, use_cache_file=%s, max_cache_seconds=%s", + use_cache_file, + max_cache_seconds, ) return ss.get_state_store( - meta_config, - app_name, - use_cache_file=use_cache_file, - max_cache_seconds=max_cache_seconds + meta_config, app_name, use_cached_store=use_cache_file ) def _get_collection_name(self): collection = self._task_config.get(c.collection_name) - collection = collection.strip() if collection else '' + collection = collection.strip() if collection else "" if not collection: input_name = self._task_config[c.mod_input_name] stulog.logger.info( - 'Collection name="%s" is empty, set it to "%s"', - collection, input_name + 'Collection name="%s" is empty, set it to "%s"', collection, input_name ) collection = input_name - return re.sub(r'[^\w]+', '_', collection) + return re.sub(r"[^\w]+", "_", collection) def _use_cache_file(self): # TODO Move the default value outside code @@ -74,22 +85,21 @@ def _use_cache_file(self): if use_cache_file: stulog.logger.info( "Stanza=%s using cached file store to create checkpoint", - self._task_config[c.stanza_name] + self._task_config[c.stanza_name], ) return use_cache_file def _get_max_cache_seconds(self): default = self._DEFAULT_MAX_CACHE_SECONDS - seconds = self._task_config.get( - c.max_cache_seconds, default - ) + seconds = self._task_config.get(c.max_cache_seconds, default) try: seconds = int(seconds) except ValueError: stulog.logger.warning( "The max_cache_seconds '%s' is not a valid integer," " so set this variable to default value %s", - seconds, default + seconds, + default, ) seconds = default else: @@ -100,7 +110,9 @@ def _get_max_cache_seconds(self): stulog.logger.warning( "The max_cache_seconds (%s) is expected in range[1,%s]," " set it to %s", - seconds, maximum, adjusted + seconds, + maximum, + adjusted, ) seconds = adjusted return seconds @@ -111,20 +123,31 @@ def get_ckpt_key(self, namespaces=None): def get_ckpt(self, namespaces=None, show_namespaces=False): key, namespaces = self.get_ckpt_key(namespaces) raw_checkpoint = self._store.get_state(key) - stulog.logger.info("Get checkpoint key='%s' value='%s'", - key, json.dumps(raw_checkpoint)) + stulog.logger.debug( + "Get checkpoint key='%s' value='%s'", key, json.dumps(raw_checkpoint) + ) if not show_namespaces and raw_checkpoint: return raw_checkpoint.get("data") return raw_checkpoint + def delete_if_exists(self, namespaces=None): + """Return true if exist and deleted else False""" + key, _ = self._key_formatter(namespaces) + try: + self._store.delete_state(key) + return True + except Exception: + return False + def update_ckpt(self, ckpt, namespaces=None): if not ckpt: stulog.logger.warning("Checkpoint expect to be not empty.") return key, namespaces = self.get_ckpt_key(namespaces) value = {"namespaces": namespaces, "data": ckpt} - stulog.logger.info("Update checkpoint key='%s' value='%s'", - key, json.dumps(value)) + stulog.logger.info( + "Update checkpoint key='%s' value='%s'", key, json.dumps(value) + ) self._store.update_state(key, value) def remove_ckpt(self, namespaces=None): @@ -133,8 +156,11 @@ def remove_ckpt(self, namespaces=None): def _key_formatter(self, namespaces=None): if not namespaces: - stulog.logger.info('Namespaces is empty, using stanza name instead.') - namespaces = [self._task_config[c.stanza_name]] + stanza = self._task_config[c.stanza_name] + stulog.logger.info( + f"Namespaces is empty, using stanza name {stanza} instead." + ) + namespaces = [stanza] key_str = TACheckPointMgr.SEPARATOR.join(namespaces) hashed_file = th.format_name_for_file(key_str) stulog.logger.info("raw_file='%s' hashed_file='%s'", key_str, hashed_file) @@ -143,6 +169,6 @@ def _key_formatter(self, namespaces=None): def close(self, key=None): try: self._store.close(key) - stulog.logger.info('Closed state store successfully. key=%s', key) + stulog.logger.info("Closed state store successfully. key=%s", key) except Exception: - stulog.logger.exception('Error closing state store. key=%s', key) + stulog.logger.exception("Error closing state store. key=%s", key) diff --git a/Splunk_TA_paloalto/bin/splunk_ta_paloalto/aob_py3/cloudconnectlib/splunktacollectorlib/data_collection/ta_config.py b/Splunk_TA_paloalto/bin/splunk_ta_paloalto/aob_py3/cloudconnectlib/splunktacollectorlib/data_collection/ta_config.py index 25fd8431..4083b305 100755 --- a/Splunk_TA_paloalto/bin/splunk_ta_paloalto/aob_py3/cloudconnectlib/splunktacollectorlib/data_collection/ta_config.py +++ b/Splunk_TA_paloalto/bin/splunk_ta_paloalto/aob_py3/cloudconnectlib/splunktacollectorlib/data_collection/ta_config.py @@ -1,24 +1,44 @@ -from __future__ import absolute_import -from builtins import object +# +# Copyright 2021 Splunk Inc. +# +# Licensed under the Apache License, Version 2.0 (the "License"); +# you may not use this file except in compliance with the License. +# You may obtain a copy of the License at +# +# http://www.apache.org/licenses/LICENSE-2.0 +# +# Unless required by applicable law or agreed to in writing, software +# distributed under the License is distributed on an "AS IS" BASIS, +# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. +# See the License for the specific language governing permissions and +# limitations under the License. +# import os.path as op import socket +from solnlib import server_info +from splunktalib import modinput as modinput +from splunktalib.common import util + +from ..common import log as stulog from . import ta_consts as c from . import ta_helper as th -from ..common import log as stulog -from ...splunktalib import modinput as modinput -from ...splunktalib import splunk_cluster as sc -from ...splunktalib.common import util # methods can be overrided by subclass : process_task_configs -class TaConfig(object): +class TaConfig: _current_hostname = socket.gethostname() _appname = util.get_appname_from_path(op.abspath(__file__)) - def __init__(self, meta_config, client_schema, log_suffix=None, - stanza_name=None, input_type=None, - single_instance=True): + def __init__( + self, + meta_config, + client_schema, + log_suffix=None, + stanza_name=None, + input_type=None, + single_instance=True, + ): self._meta_config = meta_config self._stanza_name = stanza_name self._input_type = input_type @@ -26,8 +46,9 @@ def __init__(self, meta_config, client_schema, log_suffix=None, self._single_instance = single_instance self._task_configs = [] self._client_schema = client_schema - self._server_info = sc.ServerInfo(meta_config[c.server_uri], - meta_config[c.session_key]) + self._server_info = server_info.ServerInfo.from_server_uri( + meta_config[c.server_uri], meta_config[c.session_key] + ) self._all_conf_contents = {} self._get_division_settings = {} self.set_logging() @@ -50,14 +71,18 @@ def get_task_configs(self): def get_all_conf_contents(self): if self._all_conf_contents: - return self._all_conf_contents.get(c.inputs), \ - self._all_conf_contents.get(c.all_configs), \ - self._all_conf_contents.get(c.global_settings) + return ( + self._all_conf_contents.get(c.inputs), + self._all_conf_contents.get(c.all_configs), + self._all_conf_contents.get(c.global_settings), + ) inputs, configs, global_settings = th.get_all_conf_contents( self._meta_config[c.server_uri], self._meta_config[c.session_key], - self._client_schema, self._input_type) + self._client_schema, + self._input_type, + ) self._all_conf_contents[c.inputs] = inputs self._all_conf_contents[c.all_configs] = configs self._all_conf_contents[c.global_settings] = global_settings @@ -66,8 +91,9 @@ def get_all_conf_contents(self): def set_logging(self): # The default logger name is "cloud_connect_engine" if self._stanza_name and self._log_suffix: - logger_name = self._log_suffix + "_" + th.format_name_for_file( - self._stanza_name) + logger_name = ( + self._log_suffix + "_" + th.format_name_for_file(self._stanza_name) + ) stulog.reset_logger(logger_name) inputs, configs, global_settings = self.get_all_conf_contents() log_level = "INFO" @@ -76,8 +102,8 @@ def set_logging(self): log_level = item["loglevel"] break stulog.set_log_level(log_level) - stulog.logger.info("Set log_level={}".format(log_level)) - stulog.logger.info("Start {} task".format(self._stanza_name)) + stulog.logger.info(f"Set log_level={log_level}") + stulog.logger.info(f"Start {self._stanza_name} task") def get_input_type(self): return self._input_type @@ -92,7 +118,8 @@ def _get_checkpoint_storage_type(self, config): if cs_type not in (c.checkpoint_auto, c.checkpoint_file): stulog.logger.warning( "Checkpoint storage type='%s' is invalid, change it to '%s'", - cs_type, c.checkpoint_auto + cs_type, + c.checkpoint_auto, ) cs_type = c.checkpoint_auto @@ -101,7 +128,7 @@ def _get_checkpoint_storage_type(self, config): "Checkpoint storage type is '%s' and instance is " "search head, set checkpoint storage type to '%s'.", c.checkpoint_auto, - c.checkpoint_kv_storage + c.checkpoint_kv_storage, ) cs_type = c.checkpoint_kv_storage return cs_type @@ -111,15 +138,15 @@ def _load_task_configs(self): if self._input_type: inputs = inputs.get(self._input_type) if not self._single_instance: - inputs = [input for input in inputs if - input[c.name] == self._stanza_name] + inputs = [input for input in inputs if input[c.name] == self._stanza_name] all_task_configs = [] for input in inputs: task_config = {} task_config.update(input) task_config[c.configs] = configs - task_config[c.settings] = \ - {item[c.name]: item for item in global_settings["settings"]} + task_config[c.settings] = { + item[c.name]: item for item in global_settings["settings"] + } if self.is_single_instance(): collection_interval = "collection_interval" task_config[c.interval] = task_config.get(collection_interval) @@ -127,11 +154,12 @@ def _load_task_configs(self): if task_config[c.interval] <= 0: raise ValueError( "The interval value {} is invalid." - " It should be a positive integer".format( - task_config[c.interval])) + " It should be a positive integer".format(task_config[c.interval]) + ) - task_config[c.checkpoint_storage_type] = \ - self._get_checkpoint_storage_type(task_config) + task_config[c.checkpoint_storage_type] = self._get_checkpoint_storage_type( + task_config + ) task_config[c.appname] = TaConfig._appname task_config[c.mod_input_name] = self._input_type @@ -146,12 +174,19 @@ def process_task_configs(self, task_configs): pass -def create_ta_config(settings, config_cls=TaConfig, log_suffix=None, - single_instance=True): +def create_ta_config( + settings, config_cls=TaConfig, log_suffix=None, single_instance=True +): meta_config, configs = modinput.get_modinput_configs_from_stdin() stanza_name = None input_type = None if configs and "://" in configs[0].get("name", ""): input_type, stanza_name = configs[0].get("name").split("://", 1) - return config_cls(meta_config, settings, log_suffix, stanza_name, - input_type, single_instance=single_instance) + return config_cls( + meta_config, + settings, + log_suffix, + stanza_name, + input_type, + single_instance=single_instance, + ) diff --git a/Splunk_TA_paloalto/bin/splunk_ta_paloalto/aob_py3/cloudconnectlib/splunktacollectorlib/data_collection/ta_consts.py b/Splunk_TA_paloalto/bin/splunk_ta_paloalto/aob_py3/cloudconnectlib/splunktacollectorlib/data_collection/ta_consts.py index 187ce0f7..c0620bd9 100755 --- a/Splunk_TA_paloalto/bin/splunk_ta_paloalto/aob_py3/cloudconnectlib/splunktacollectorlib/data_collection/ta_consts.py +++ b/Splunk_TA_paloalto/bin/splunk_ta_paloalto/aob_py3/cloudconnectlib/splunktacollectorlib/data_collection/ta_consts.py @@ -1,3 +1,18 @@ +# +# Copyright 2021 Splunk Inc. +# +# Licensed under the Apache License, Version 2.0 (the "License"); +# you may not use this file except in compliance with the License. +# You may obtain a copy of the License at +# +# http://www.apache.org/licenses/LICENSE-2.0 +# +# Unless required by applicable law or agreed to in writing, software +# distributed under the License is distributed on an "AS IS" BASIS, +# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. +# See the License for the specific language governing permissions and +# limitations under the License. +# server_uri = "server_uri" session_key = "session_key" version = "version" @@ -22,7 +37,7 @@ input_data = "input_data" interval = "interval" data = "data" -batch_size = 'batch_size' +batch_size = "batch_size" time_fmt = "%Y-%m-%dT%H:%M:%S" utc_time_fmt = "%Y-%m-%dT%H:%M:%S.%fZ" @@ -30,9 +45,9 @@ checkpoint_storage_type = "builtin_system_checkpoint_storage_type" # Possible values for checkpoint storage type -checkpoint_auto = 'auto' -checkpoint_kv_storage = 'kv_store' -checkpoint_file = 'file' +checkpoint_auto = "auto" +checkpoint_kv_storage = "kv_store" +checkpoint_file = "file" # For cache file use_cache_file = "builtin_system_use_cache_file" diff --git a/Splunk_TA_paloalto/bin/splunk_ta_paloalto/aob_py3/cloudconnectlib/splunktacollectorlib/data_collection/ta_data_client.py b/Splunk_TA_paloalto/bin/splunk_ta_paloalto/aob_py3/cloudconnectlib/splunktacollectorlib/data_collection/ta_data_client.py index f553ac67..65fbf139 100755 --- a/Splunk_TA_paloalto/bin/splunk_ta_paloalto/aob_py3/cloudconnectlib/splunktacollectorlib/data_collection/ta_data_client.py +++ b/Splunk_TA_paloalto/bin/splunk_ta_paloalto/aob_py3/cloudconnectlib/splunktacollectorlib/data_collection/ta_data_client.py @@ -1,30 +1,44 @@ #!/usr/bin/python -from builtins import next -from builtins import object +# +# Copyright 2021 Splunk Inc. +# +# Licensed under the Apache License, Version 2.0 (the "License"); +# you may not use this file except in compliance with the License. +# You may obtain a copy of the License at +# +# http://www.apache.org/licenses/LICENSE-2.0 +# +# Unless required by applicable law or agreed to in writing, software +# distributed under the License is distributed on an "AS IS" BASIS, +# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. +# See the License for the specific language governing permissions and +# limitations under the License. +# from . import ta_checkpoint_manager as cp from . import ta_data_collector as tdc -def build_event(host=None, - source=None, - sourcetype=None, - time=None, - index=None, - raw_data="", - is_unbroken=False, - is_done=False): +def build_event( + host=None, + source=None, + sourcetype=None, + time=None, + index=None, + raw_data="", + is_unbroken=False, + is_done=False, +): if is_unbroken is False and is_done is True: - raise Exception('is_unbroken=False is_done=True is invalid') - return tdc.event_tuple._make([host, source, sourcetype, time, index, - raw_data, is_unbroken, is_done]) + raise Exception("is_unbroken=False is_done=True is invalid") + return tdc.event_tuple._make( + [host, source, sourcetype, time, index, raw_data, is_unbroken, is_done] + ) -class TaDataClient(object): - def __init__(self, - meta_config, - task_config, - checkpoint_mgr=None, - event_writer=None): +class TaDataClient: + def __init__( + self, meta_config, task_config, checkpoint_mgr=None, event_writer=None + ): self._meta_config = meta_config self._task_config = task_config self._checkpoint_mgr = checkpoint_mgr @@ -41,24 +55,24 @@ def get(self): raise StopIteration -def create_data_collector(dataloader, - tconfig, - meta_configs, - task_config, - data_client_cls, - checkpoint_cls=None): +def create_data_collector( + dataloader, tconfig, meta_configs, task_config, data_client_cls, checkpoint_cls=None +): checkpoint_manager_cls = checkpoint_cls or cp.TACheckPointMgr - return tdc.TADataCollector(tconfig, meta_configs, task_config, - checkpoint_manager_cls, data_client_cls, - dataloader) + return tdc.TADataCollector( + tconfig, + meta_configs, + task_config, + checkpoint_manager_cls, + data_client_cls, + dataloader, + ) def client_adatper(job_func): class TaDataClientAdapter(TaDataClient): - def __init__(self, all_conf_contents, meta_config, task_config, - chp_mgr): - super(TaDataClientAdapter, self).__init__(meta_config, task_config, - chp_mgr) + def __init__(self, all_conf_contents, meta_config, task_config, chp_mgr): + super().__init__(meta_config, task_config, chp_mgr) self._execute_times = 0 self._gen = job_func(self._task_config, chp_mgr) @@ -68,7 +82,7 @@ def stop(self): """ # normaly base class just set self._stop as True - super(TaDataClientAdapter, self).stop() + super().stop() def get(self): """ diff --git a/Splunk_TA_paloalto/bin/splunk_ta_paloalto/aob_py3/cloudconnectlib/splunktacollectorlib/data_collection/ta_data_collector.py b/Splunk_TA_paloalto/bin/splunk_ta_paloalto/aob_py3/cloudconnectlib/splunktacollectorlib/data_collection/ta_data_collector.py index 2c2c4eed..7d323d6d 100755 --- a/Splunk_TA_paloalto/bin/splunk_ta_paloalto/aob_py3/cloudconnectlib/splunktacollectorlib/data_collection/ta_data_collector.py +++ b/Splunk_TA_paloalto/bin/splunk_ta_paloalto/aob_py3/cloudconnectlib/splunktacollectorlib/data_collection/ta_data_collector.py @@ -1,49 +1,83 @@ #!/usr/bin/python -from __future__ import absolute_import -from builtins import object +# +# Copyright 2021 Splunk Inc. +# +# Licensed under the Apache License, Version 2.0 (the "License"); +# you may not use this file except in compliance with the License. +# You may obtain a copy of the License at +# +# http://www.apache.org/licenses/LICENSE-2.0 +# +# Unless required by applicable law or agreed to in writing, software +# distributed under the License is distributed on an "AS IS" BASIS, +# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. +# See the License for the specific language governing permissions and +# limitations under the License. +# import threading import time from collections import namedtuple -from . import ta_consts as c +from splunktalib.common import util as scu + from ..common import log as stulog -from ...splunktalib.common import util as scu - -evt_fmt = ("{0}" - "" - "" - "" - "{4}" - "") - -unbroken_evt_fmt = ("" - "" - "{0}" - "" - "" - "" - "{4}" - "" - "{6}" - "" - "") - -event_tuple = namedtuple('Event', - ['host', 'source', 'sourcetype', 'time', 'index', - 'raw_data', 'is_unbroken', 'is_done']) - - -class TADataCollector(object): - def __init__(self, tconfig, meta_config, task_config, - checkpoint_manager_cls, data_client_cls, data_loader): +from . import ta_consts as c + +evt_fmt = ( + "{0}" + "" + "" + "" + "{4}" + "" +) + +unbroken_evt_fmt = ( + "" + '' + "{0}" + "" + "" + "" + "{4}" + "" + "{6}" + "" + "" +) + +event_tuple = namedtuple( + "Event", + [ + "host", + "source", + "sourcetype", + "time", + "index", + "raw_data", + "is_unbroken", + "is_done", + ], +) + + +class TADataCollector: + def __init__( + self, + tconfig, + meta_config, + task_config, + checkpoint_manager_cls, + data_client_cls, + data_loader, + ): self._lock = threading.Lock() self._ta_config = tconfig self._meta_config = meta_config self._task_config = task_config self._stopped = False self._p = self._get_logger_prefix() - self._checkpoint_manager = checkpoint_manager_cls(meta_config, - task_config) + self._checkpoint_manager = checkpoint_manager_cls(meta_config, task_config) self.data_client_cls = data_client_cls self._data_loader = data_loader self._client = None @@ -58,8 +92,7 @@ def get_interval(self): return self._task_config[c.interval] def _get_logger_prefix(self): - pairs = ['{}="{}"'.format(c.stanza_name, self._task_config[ - c.stanza_name])] + pairs = [f'{c.stanza_name}="{self._task_config[c.stanza_name]}"'] return "[{}]".format(" ".join(pairs)) def stop(self): @@ -80,38 +113,49 @@ def _build_event(self, events): assert event.raw_data, "the raw data of events is empty" if event.is_unbroken: evt = unbroken_evt_fmt.format( - event.host or "", event.source or "", event.sourcetype or - "", event.time or "", event.index or "", - scu.escape_cdata(event.raw_data), "" if - event.is_done else "") + event.host or "", + event.source or "", + event.sourcetype or "", + event.time or "", + event.index or "", + scu.escape_cdata(event.raw_data), + "" if event.is_done else "", + ) else: - evt = evt_fmt.format(event.host or "", event.source or "", - event.sourcetype or "", event.time or "", - event.index or "", - scu.escape_cdata(event.raw_data)) + evt = evt_fmt.format( + event.host or "", + event.source or "", + event.sourcetype or "", + event.time or "", + event.index or "", + scu.escape_cdata(event.raw_data), + ) evts.append(evt) return evts def _create_data_client(self): - return self.data_client_cls(self._meta_config, - self._task_config, - self._checkpoint_manager, - self._data_loader.get_event_writer()) + return self.data_client_cls( + self._meta_config, + self._task_config, + self._checkpoint_manager, + self._data_loader.get_event_writer(), + ) def index_data(self): if self._lock.locked(): stulog.logger.debug( "Last round of stanza={} is not done yet".format( - self._task_config[c.stanza_name])) + self._task_config[c.stanza_name] + ) + ) return with self._lock: try: self._do_safe_index() self._checkpoint_manager.close() except Exception: - stulog.logger.exception("{} Failed to index data" - .format(self._p)) - stulog.logger.info("{} End of indexing data".format(self._p)) + stulog.logger.exception(f"{self._p} Failed to index data") + stulog.logger.info(f"{self._p} End of indexing data") if not self._ta_config.is_single_instance(): self._data_loader.tear_down() @@ -119,9 +163,10 @@ def _write_events(self, events): evts = self._build_event(events) if evts: if not self._data_loader.write_events(evts): - stulog.logger.info("{} the event queue is closed and the " - "received data will be discarded".format( - self._p)) + stulog.logger.info( + "{} the event queue is closed and the " + "received data will be discarded".format(self._p) + ) return False return True @@ -136,10 +181,10 @@ def _do_safe_index(self): if not self._write_events(events): break except StopIteration: - stulog.logger.info("{} Finished this round".format(self._p)) + stulog.logger.info(f"{self._p} Finished this round") return except Exception: - stulog.logger.exception("{} Failed to get msg".format(self._p)) + stulog.logger.exception(f"{self._p} Failed to get msg") break # in case encounter exception or fail to write events if not self._stopped: diff --git a/Splunk_TA_paloalto/bin/splunk_ta_paloalto/aob_py3/cloudconnectlib/splunktacollectorlib/data_collection/ta_data_loader.py b/Splunk_TA_paloalto/bin/splunk_ta_paloalto/aob_py3/cloudconnectlib/splunktacollectorlib/data_collection/ta_data_loader.py index 2a1eb848..3ad8d5d2 100755 --- a/Splunk_TA_paloalto/bin/splunk_ta_paloalto/aob_py3/cloudconnectlib/splunktacollectorlib/data_collection/ta_data_loader.py +++ b/Splunk_TA_paloalto/bin/splunk_ta_paloalto/aob_py3/cloudconnectlib/splunktacollectorlib/data_collection/ta_data_loader.py @@ -1,22 +1,35 @@ +# +# Copyright 2021 Splunk Inc. +# +# Licensed under the Apache License, Version 2.0 (the "License"); +# you may not use this file except in compliance with the License. +# You may obtain a copy of the License at +# +# http://www.apache.org/licenses/LICENSE-2.0 +# +# Unless required by applicable law or agreed to in writing, software +# distributed under the License is distributed on an "AS IS" BASIS, +# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. +# See the License for the specific language governing permissions and +# limitations under the License. +# """ Data Loader main entry point """ - - -from future import standard_library -standard_library.install_aliases() -from builtins import object -import queue -import os.path as op import configparser +import os.path as op +import queue + +from solnlib import log +from solnlib import timer_queue as tq +from splunktalib.concurrent import concurrent_executor as ce +from splunktalib.schedule import job as sjob -from ...splunktalib.concurrent import concurrent_executor as ce -from ...splunktalib import timer_queue as tq -from ...splunktalib.schedule import job as sjob -from ...splunktalib.common import log +# Global logger +logger = log.Logs().get_logger("util") -class TADataLoader(object): +class TADataLoader: """ Data Loader boots all underlying facilities to handle data collection """ @@ -48,7 +61,7 @@ def run(self, jobs): self._executor.start() self._timer_queue.start() self._scheduler.start() - log.logger.info("TADataLoader started.") + logger.info("TADataLoader started.") def _enqueue_io_job(job): job_props = job.get_props() @@ -56,8 +69,7 @@ def _enqueue_io_job(job): self.run_io_jobs((real_job,)) for job in jobs: - j = sjob.Job(_enqueue_io_job, {"real_job": job}, - job.get_interval()) + j = sjob.Job(_enqueue_io_job, {"real_job": job}, job.get_interval()) self._scheduler.add_jobs((j,)) self._wait_for_tear_down() @@ -66,10 +78,10 @@ def _enqueue_io_job(job): job.stop() self._scheduler.tear_down() - self._timer_queue.tear_down() + self._timer_queue.stop() self._executor.tear_down() self._event_writer.tear_down() - log.logger.info("DataLoader stopped.") + logger.info("DataLoader stopped.") def _wait_for_tear_down(self): wakeup_q = self._wakeup_queue @@ -80,13 +92,13 @@ def _wait_for_tear_down(self): pass else: if go_exit: - log.logger.info("DataLoader got stop signal") + logger.info("DataLoader got stop signal") self._stopped = True break def tear_down(self): self._wakeup_queue.put(True) - log.logger.info("DataLoader is going to stop.") + logger.info("DataLoader is going to stop.") def stopped(self): return self._stopped @@ -102,8 +114,7 @@ def run_compute_job_async(self, func, args=(), kwargs={}, callback=None): @return: AsyncResult """ - return self._executor.run_compute_func_async(func, args, - kwargs, callback) + return self._executor.run_compute_func_async(func, args, kwargs, callback) def add_timer(self, callback, when, interval): return self._timer_queue.add_timer(callback, when, interval) @@ -120,12 +131,11 @@ def get_event_writer(self): @staticmethod def _read_default_settings(): cur_dir = op.dirname(op.abspath(__file__)) - setting_file = op.join(cur_dir,"../../","splunktalib", "setting.conf") + setting_file = op.join(cur_dir, "../../../", "splunktalib", "setting.conf") parser = configparser.ConfigParser() parser.read(setting_file) settings = {} - keys = ("process_size", "thread_min_size", "thread_max_size", - "task_queue_size") + keys = ("process_size", "thread_min_size", "thread_max_size", "task_queue_size") for option in keys: try: settings[option] = parser.get("global", option) @@ -136,20 +146,19 @@ def _read_default_settings(): settings[option] = int(settings[option]) except ValueError: settings[option] = -1 - log.logger.debug("settings: %s", settings) + logger.debug("settings: %s", settings) return settings -class GlobalDataLoader(object): - """ Singleton, inited when started""" +class GlobalDataLoader: + """Singleton, inited when started""" __instance = None @staticmethod def get_data_loader(scheduler, writer): if GlobalDataLoader.__instance is None: - GlobalDataLoader.__instance = TADataLoader( - scheduler, writer) + GlobalDataLoader.__instance = TADataLoader(scheduler, writer) return GlobalDataLoader.__instance @staticmethod @@ -162,8 +171,8 @@ def create_data_loader(): create a data loader with default event_writer, job_scheudler """ - from ...splunktalib import event_writer as ew - from ...splunktalib.schedule import scheduler as sched + from splunktalib import event_writer as ew + from splunktalib.schedule import scheduler as sched writer = ew.EventWriter() scheduler = sched.Scheduler() diff --git a/Splunk_TA_paloalto/bin/splunk_ta_paloalto/aob_py3/cloudconnectlib/splunktacollectorlib/data_collection/ta_helper.py b/Splunk_TA_paloalto/bin/splunk_ta_paloalto/aob_py3/cloudconnectlib/splunktacollectorlib/data_collection/ta_helper.py index e245f319..bb9ddfa2 100755 --- a/Splunk_TA_paloalto/bin/splunk_ta_paloalto/aob_py3/cloudconnectlib/splunktacollectorlib/data_collection/ta_helper.py +++ b/Splunk_TA_paloalto/bin/splunk_ta_paloalto/aob_py3/cloudconnectlib/splunktacollectorlib/data_collection/ta_helper.py @@ -1,26 +1,35 @@ -import six -from builtins import object +# +# Copyright 2021 Splunk Inc. +# +# Licensed under the Apache License, Version 2.0 (the "License"); +# you may not use this file except in compliance with the License. +# You may obtain a copy of the License at +# +# http://www.apache.org/licenses/LICENSE-2.0 +# +# Unless required by applicable law or agreed to in writing, software +# distributed under the License is distributed on an "AS IS" BASIS, +# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. +# See the License for the specific language governing permissions and +# limitations under the License. +# import hashlib import json import os.path as op import re from calendar import timegm from datetime import datetime +from functools import lru_cache -import sys -if sys.version_info[0] >= 3: - from functools import lru_cache -else: - from functools32 import lru_cache - +from splunktalib.common import util from splunktaucclib.global_config import GlobalConfig, GlobalConfigSchema -from . import ta_consts as c + from ...splunktacollectorlib import config as sc -from ...splunktalib.common import util +from . import ta_consts as c def utc2timestamp(human_time): - regex1 = "\d{4}-\d{2}-\d{2}.\d{2}:\d{2}:\d{2}" + regex1 = r"\d{4}-\d{2}-\d{2}.\d{2}:\d{2}:\d{2}" match = re.search(regex1, human_time) if match: formated = match.group() @@ -30,7 +39,7 @@ def utc2timestamp(human_time): strped_time = datetime.strptime(formated, c.time_fmt) timestamp = timegm(strped_time.utctimetuple()) - regex2 = "\d{4}-\d{2}-\d{2}.\d{2}:\d{2}:\d{2}(\.\d+)" + regex2 = r"\d{4}-\d{2}-\d{2}.\d{2}:\d{2}:\d{2}(\.\d+)" match = re.search(regex2, human_time) if match: timestamp += float(match.group(1)) @@ -46,17 +55,15 @@ def get_md5(data): :return: """ assert data is not None, "The input cannot be None" - if isinstance(data, six.string_types): - return hashlib.sha256(data.encode('utf-8')).hexdigest() + if isinstance(data, str): + return hashlib.sha256(data.encode("utf-8")).hexdigest() elif isinstance(data, (list, tuple, dict)): - return hashlib.sha256(json.dumps(data).encode('utf-8')).hexdigest() + return hashlib.sha256(json.dumps(data).encode("utf-8")).hexdigest() def get_all_conf_contents(server_uri, sessionkey, settings, input_type=None): schema = GlobalConfigSchema(settings) - global_config = GlobalConfig( - server_uri, sessionkey, schema - ) + global_config = GlobalConfig(server_uri, sessionkey, schema) inputs = global_config.inputs.load(input_type=input_type) configs = global_config.configs.load() settings = global_config.settings.load() @@ -65,10 +72,10 @@ def get_all_conf_contents(server_uri, sessionkey, settings, input_type=None): @lru_cache(maxsize=64) def format_name_for_file(name): - return hashlib.sha256(name.encode('utf-8')).hexdigest() + return hashlib.sha256(name.encode("utf-8")).hexdigest() -class ConfigSchemaHandler(object): +class ConfigSchemaHandler: _app_name = util.get_appname_from_path(op.abspath(__file__)) # Division schema keys. TYPE = "type" @@ -79,12 +86,13 @@ class ConfigSchemaHandler(object): SEPARATOR = "separator" def __init__(self, meta_configs, client_schema): - self._config = sc.Config(splunkd_uri=meta_configs[c.server_uri], - session_key=meta_configs[c.session_key], - schema=json.dumps(client_schema[ - c.config]), - user="nobody", - app=ConfigSchemaHandler._app_name) + self._config = sc.Config( + splunkd_uri=meta_configs[c.server_uri], + session_key=meta_configs[c.session_key], + schema=json.dumps(client_schema[c.config]), + user="nobody", + app=ConfigSchemaHandler._app_name, + ) self._client_schema = client_schema self._all_conf_contents = {} self._load_conf_contents() @@ -107,7 +115,8 @@ def _divide_settings(self): division_settings = dict() for division_endpoint, division_contents in division_schema.items(): division_settings[division_endpoint] = self._process_division( - division_endpoint, division_contents) + division_endpoint, division_contents + ) return division_settings def _load_conf_contents(self): @@ -118,26 +127,32 @@ def _process_division(self, division_endpoint, division_contents): assert isinstance(division_contents, dict) for division_key, division_value in division_contents.items(): try: - assert self.TYPE in division_value and \ - division_value[self.TYPE] in \ - [self.TYPE_SINGLE, self.TYPE_MULTI] and \ - self.SEPARATOR in division_value if \ - division_value[self.TYPE] == self.TYPE_MULTI else True + assert ( + self.TYPE in division_value + and division_value[self.TYPE] in [self.TYPE_SINGLE, self.TYPE_MULTI] + and self.SEPARATOR in division_value + if division_value[self.TYPE] == self.TYPE_MULTI + else True + ) except Exception: raise Exception("Invalid division schema") - division_metrics.append(DivisionRule(division_endpoint, - division_key, - division_value[self.TYPE], - division_value.get( - self.SEPARATOR, - ), - division_value.get( - self.REFER, - ))) + division_metrics.append( + DivisionRule( + division_endpoint, + division_key, + division_value[self.TYPE], + division_value.get( + self.SEPARATOR, + ), + division_value.get( + self.REFER, + ), + ) + ) return division_metrics -class DivisionRule(object): +class DivisionRule: def __init__(self, endpoint, metric, type, separator, refer): self._endpoint = endpoint self._metric = metric diff --git a/Splunk_TA_paloalto/bin/splunk_ta_paloalto/aob_py3/cloudconnectlib/splunktacollectorlib/data_collection/ta_mod_input.py b/Splunk_TA_paloalto/bin/splunk_ta_paloalto/aob_py3/cloudconnectlib/splunktacollectorlib/data_collection/ta_mod_input.py index 42c3711b..654d7b39 100755 --- a/Splunk_TA_paloalto/bin/splunk_ta_paloalto/aob_py3/cloudconnectlib/splunktacollectorlib/data_collection/ta_mod_input.py +++ b/Splunk_TA_paloalto/bin/splunk_ta_paloalto/aob_py3/cloudconnectlib/splunktacollectorlib/data_collection/ta_mod_input.py @@ -1,27 +1,41 @@ #!/usr/bin/python +# +# Copyright 2021 Splunk Inc. +# +# Licensed under the Apache License, Version 2.0 (the "License"); +# you may not use this file except in compliance with the License. +# You may obtain a copy of the License at +# +# http://www.apache.org/licenses/LICENSE-2.0 +# +# Unless required by applicable law or agreed to in writing, software +# distributed under the License is distributed on an "AS IS" BASIS, +# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. +# See the License for the specific language governing permissions and +# limitations under the License. +# """ This is the main entry point for My TA """ -from __future__ import print_function - -from builtins import str import os.path as op import platform import sys import time +from solnlib import file_monitor as fm +from solnlib import orphan_process_monitor as opm +from solnlib import utils +from splunktalib import modinput +from splunktalib.common import util as sc_util + +from ...common.lib_util import get_app_root_dir, get_mod_input_script_name +from ..common import load_schema_file as ld +from ..common import log as stulog from . import ta_checkpoint_manager as cpmgr from . import ta_config as tc from . import ta_data_client as tdc from . import ta_data_loader as dl -from ..common import load_schema_file as ld -from ..common import log as stulog -from ...common.lib_util import get_app_root_dir, get_mod_input_script_name -from ...splunktalib import file_monitor as fm -from ...splunktalib import modinput -from ...splunktalib import orphan_process_monitor as opm -from ...splunktalib.common import util as utils utils.remove_http_proxy_env_vars() @@ -29,17 +43,22 @@ def do_scheme( - mod_input_name, - schema_para_list=None, - single_instance=True, + mod_input_name, + schema_para_list=None, + single_instance=True, ): """ Feed splunkd the TA's scheme """ builtin_names = { - "name", "index", "sourcetype", "host", "source", - "disabled", "interval" + "name", + "index", + "sourcetype", + "host", + "source", + "disabled", + "interval", } param_string_list = [] @@ -57,13 +76,18 @@ def do_scheme( 0 0 - """.format(param=param) + """.format( + param=param + ) ) - description = ("Go to the add-on's configuration UI and configure" - " modular inputs under the Inputs menu.") + description = ( + "Go to the add-on's configuration UI and configure" + " modular inputs under the Inputs menu." + ) - print(""" + print( + """ {data_input_title} {description} @@ -80,11 +104,12 @@ def do_scheme( """.format( - single_instance=(str(single_instance)).lower(), - data_input_title=mod_input_name, - param_str=''.join(param_string_list), - description=description, - )) + single_instance=(str(single_instance)).lower(), + data_input_title=mod_input_name, + param_str="".join(param_string_list), + description=description, + ) + ) def _setup_signal_handler(data_loader, ta_short_name): @@ -94,11 +119,11 @@ def _setup_signal_handler(data_loader, ta_short_name): """ def _handle_exit(signum, frame): - stulog.logger.info("{} receives exit signal".format(ta_short_name)) + stulog.logger.info(f"{ta_short_name} receives exit signal") if data_loader is not None: data_loader.tear_down() - utils.handle_tear_down_signals(_handle_exit) + utils.handle_teardown_signals(_handle_exit) def _handle_file_changes(data_loader): @@ -107,8 +132,7 @@ def _handle_file_changes(data_loader): """ def _handle_refresh(changed_files): - stulog.logger.info("Detect {} changed, reboot itself".format( - changed_files)) + stulog.logger.info(f"Detect {changed_files} changed, reboot itself") data_loader.tear_down() return _handle_refresh @@ -127,18 +151,25 @@ def _get_conf_files(settings): return [op.join(ta_dir, "local", f) for f in file_list] -def run(collector_cls, settings, checkpoint_cls=None, config_cls=None, - log_suffix=None, single_instance=True, cc_json_file=None): +def run( + collector_cls, + settings, + checkpoint_cls=None, + config_cls=None, + log_suffix=None, + single_instance=True, + cc_json_file=None, +): """ Main loop. Run this TA forever """ ta_short_name = settings["meta"]["name"].lower() # This is for stdout flush - utils.disable_stdout_buffer() + sc_util.disable_stdout_buffer() # http://bugs.python.org/issue7980 - time.strptime('2016-01-01', '%Y-%m-%d') + time.strptime("2016-01-01", "%Y-%m-%d") loader = dl.create_data_loader() @@ -147,8 +178,9 @@ def run(collector_cls, settings, checkpoint_cls=None, config_cls=None, # monitor files to reboot try: - monitor = fm.FileMonitor(_handle_file_changes(loader), - _get_conf_files(settings)) + monitor = fm.FileMonitor( + _handle_file_changes(loader), _get_conf_files(settings) + ) loader.add_timer(monitor.check_changes, time.time(), 10) except Exception: stulog.logger.exception("Fail to add files for monitoring") @@ -157,8 +189,9 @@ def run(collector_cls, settings, checkpoint_cls=None, config_cls=None, orphan_checker = opm.OrphanProcessChecker(loader.tear_down) loader.add_timer(orphan_checker.check_orphan, time.time(), 1) - tconfig = tc.create_ta_config(settings, config_cls or tc.TaConfig, - log_suffix, single_instance=single_instance) + tconfig = tc.create_ta_config( + settings, config_cls or tc.TaConfig, log_suffix, single_instance=single_instance + ) task_configs = tconfig.get_task_configs() if not task_configs: @@ -169,17 +202,19 @@ def run(collector_cls, settings, checkpoint_cls=None, config_cls=None, if tconfig.is_shc_member(): # Don't support SHC env - stulog.logger.error("This host is in search head cluster environment , " - "will exit.") + stulog.logger.error( + "This host is in search head cluster environment , " "will exit." + ) return # In this case, use file for checkpoint - if _is_checkpoint_dir_length_exceed_limit(tconfig, - meta_config["checkpoint_dir"]): - stulog.logger.error("The length of the checkpoint directory path: '{}' " - "is too long. The max length we support is {}", - meta_config["checkpoint_dir"], - __CHECKPOINT_DIR_MAX_LEN__) + if _is_checkpoint_dir_length_exceed_limit(tconfig, meta_config["checkpoint_dir"]): + stulog.logger.error( + "The length of the checkpoint directory path: '{}' " + "is too long. The max length we support is {}", + meta_config["checkpoint_dir"], + __CHECKPOINT_DIR_MAX_LEN__, + ) return jobs = [ @@ -189,18 +224,20 @@ def run(collector_cls, settings, checkpoint_cls=None, config_cls=None, meta_config, task_config, collector_cls, - checkpoint_cls=checkpoint_cls or cpmgr.TACheckPointMgr + checkpoint_cls=checkpoint_cls or cpmgr.TACheckPointMgr, ) for task_config in task_configs - ] + ] loader.run(jobs) def _is_checkpoint_dir_length_exceed_limit(config, checkpoint_dir): - return platform.system() == 'Windows' \ - and not config.is_search_head() \ - and len(checkpoint_dir) >= __CHECKPOINT_DIR_MAX_LEN__ + return ( + platform.system() == "Windows" + and not config.is_search_head() + and len(checkpoint_dir) >= __CHECKPOINT_DIR_MAX_LEN__ + ) def validate_config(): @@ -223,14 +260,14 @@ def usage(): def main( - collector_cls, - schema_file_path, - log_suffix="modinput", - checkpoint_cls=None, - config_cls=None, - cc_json_file=None, - schema_para_list=None, - single_instance=True + collector_cls, + schema_file_path, + log_suffix="modinput", + checkpoint_cls=None, + config_cls=None, + cc_json_file=None, + schema_para_list=None, + single_instance=True, ): """ Main entry point @@ -248,7 +285,7 @@ def main( do_scheme( mod_input_name=mod_input_name, schema_para_list=schema_para_list, - single_instance=single_instance + single_instance=single_instance, ) elif args[1] == "--validate-arguments": sys.exit(validate_config()) @@ -265,10 +302,9 @@ def main( config_cls=config_cls, log_suffix=log_suffix, single_instance=single_instance, - cc_json_file=cc_json_file + cc_json_file=cc_json_file, ) except Exception: - stulog.logger.exception( - "{} task encounter exception".format(mod_input_name)) - stulog.logger.info("End {} task".format(mod_input_name)) + stulog.logger.exception(f"{mod_input_name} task encounter exception") + stulog.logger.info(f"End {mod_input_name} task") sys.exit(0) diff --git a/Splunk_TA_paloalto/bin/splunk_ta_paloalto/aob_py3/cloudconnectlib/splunktacollectorlib/splunk_ta_import_declare.py b/Splunk_TA_paloalto/bin/splunk_ta_paloalto/aob_py3/cloudconnectlib/splunktacollectorlib/splunk_ta_import_declare.py index 7290cfd8..9aa813c7 100755 --- a/Splunk_TA_paloalto/bin/splunk_ta_paloalto/aob_py3/cloudconnectlib/splunktacollectorlib/splunk_ta_import_declare.py +++ b/Splunk_TA_paloalto/bin/splunk_ta_paloalto/aob_py3/cloudconnectlib/splunktacollectorlib/splunk_ta_import_declare.py @@ -1,17 +1,30 @@ +# +# Copyright 2021 Splunk Inc. +# +# Licensed under the Apache License, Version 2.0 (the "License"); +# you may not use this file except in compliance with the License. +# You may obtain a copy of the License at +# +# http://www.apache.org/licenses/LICENSE-2.0 +# +# Unless required by applicable law or agreed to in writing, software +# distributed under the License is distributed on an "AS IS" BASIS, +# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. +# See the License for the specific language governing permissions and +# limitations under the License. +# """ This module is used to filter and reload PATH. """ import os -import sys import re +import sys ta_name = os.path.basename(os.path.dirname(os.path.dirname(os.path.abspath(__file__)))) -ta_lib_name = re.sub("[^\w]+", "_", ta_name.lower()) +ta_lib_name = re.sub(r"[^\w]+", "_", ta_name.lower()) assert ta_name or ta_name == "package", "TA name is None or package" pattern = re.compile(r"[\\/]etc[\\/]apps[\\/][^\\/]+[\\/]bin[\\/]?$") new_paths = [path for path in sys.path if not pattern.search(path) or ta_name in path] new_paths.insert(0, os.path.sep.join([os.path.dirname(__file__), ta_lib_name])) sys.path = new_paths - - diff --git a/Splunk_TA_paloalto/bin/splunk_ta_paloalto/aob_py3/cloudconnectlib/splunktacollectorlib/ta_cloud_connect_client.py b/Splunk_TA_paloalto/bin/splunk_ta_paloalto/aob_py3/cloudconnectlib/splunktacollectorlib/ta_cloud_connect_client.py index 4e5df1ca..42d2dcb6 100755 --- a/Splunk_TA_paloalto/bin/splunk_ta_paloalto/aob_py3/cloudconnectlib/splunktacollectorlib/ta_cloud_connect_client.py +++ b/Splunk_TA_paloalto/bin/splunk_ta_paloalto/aob_py3/cloudconnectlib/splunktacollectorlib/ta_cloud_connect_client.py @@ -1,33 +1,40 @@ -from .data_collection.ta_data_client import TaDataClient +# +# Copyright 2021 Splunk Inc. +# +# Licensed under the Apache License, Version 2.0 (the "License"); +# you may not use this file except in compliance with the License. +# You may obtain a copy of the License at +# +# http://www.apache.org/licenses/LICENSE-2.0 +# +# Unless required by applicable law or agreed to in writing, software +# distributed under the License is distributed on an "AS IS" BASIS, +# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. +# See the License for the specific language governing permissions and +# limitations under the License. +# +from ..common.log import set_cc_logger from ..splunktacollectorlib.common import log as stulog from ..splunktacollectorlib.data_collection import ta_consts as c -from ..common.log import set_cc_logger +from .data_collection.ta_data_client import TaDataClient class TACloudConnectClient(TaDataClient): - def __init__(self, - meta_config, - task_config, - checkpoint_mgr=None, - event_writer=None - ): - super(TACloudConnectClient, self).__init__(meta_config, - task_config, - checkpoint_mgr, - event_writer) + def __init__( + self, meta_config, task_config, checkpoint_mgr=None, event_writer=None + ): + super().__init__(meta_config, task_config, checkpoint_mgr, event_writer) self._set_log() self._cc_config_file = self._meta_config["cc_json_file"] - from ..core.pipemgr import PipeManager from ..client import CloudConnectClient as Client + from ..core.pipemgr import PipeManager + self._pipe_mgr = PipeManager(event_writer=event_writer) - self._client = Client(self._task_config, self._cc_config_file, - checkpoint_mgr) + self._client = Client(self._task_config, self._cc_config_file, checkpoint_mgr) def _set_log(self): - pairs = ['{}="{}"'.format(c.stanza_name, self._task_config[ - c.stanza_name])] - set_cc_logger(stulog.logger, - logger_prefix="[{}]".format(" ".join(pairs))) + pairs = [f'{c.stanza_name}="{self._task_config[c.stanza_name]}"'] + set_cc_logger(stulog.logger, logger_prefix="[{}]".format(" ".join(pairs))) def is_stopped(self): return self._stop diff --git a/Splunk_TA_paloalto/bin/splunk_ta_paloalto/aob_py3/decorator-4.4.2.dist-info/INSTALLER b/Splunk_TA_paloalto/bin/splunk_ta_paloalto/aob_py3/decorator-4.4.2.dist-info/INSTALLER new file mode 100644 index 00000000..a1b589e3 --- /dev/null +++ b/Splunk_TA_paloalto/bin/splunk_ta_paloalto/aob_py3/decorator-4.4.2.dist-info/INSTALLER @@ -0,0 +1 @@ +pip diff --git a/Splunk_TA_paloalto/bin/splunk_ta_paloalto/aob_py3/decorator-4.4.2.dist-info/LICENSE.txt b/Splunk_TA_paloalto/bin/splunk_ta_paloalto/aob_py3/decorator-4.4.2.dist-info/LICENSE.txt new file mode 100644 index 00000000..b0ade048 --- /dev/null +++ b/Splunk_TA_paloalto/bin/splunk_ta_paloalto/aob_py3/decorator-4.4.2.dist-info/LICENSE.txt @@ -0,0 +1,26 @@ +Copyright (c) 2005-2018, Michele Simionato +All rights reserved. + +Redistribution and use in source and binary forms, with or without +modification, are permitted provided that the following conditions are +met: + + Redistributions of source code must retain the above copyright + notice, this list of conditions and the following disclaimer. + Redistributions in bytecode form must reproduce the above copyright + notice, this list of conditions and the following disclaimer in + the documentation and/or other materials provided with the + distribution. + +THIS SOFTWARE IS PROVIDED BY THE COPYRIGHT HOLDERS AND CONTRIBUTORS +"AS IS" AND ANY EXPRESS OR IMPLIED WARRANTIES, INCLUDING, BUT NOT +LIMITED TO, THE IMPLIED WARRANTIES OF MERCHANTABILITY AND FITNESS FOR +A PARTICULAR PURPOSE ARE DISCLAIMED. IN NO EVENT SHALL THE COPYRIGHT +HOLDERS OR CONTRIBUTORS BE LIABLE FOR ANY DIRECT, INDIRECT, +INCIDENTAL, SPECIAL, EXEMPLARY, OR CONSEQUENTIAL DAMAGES (INCLUDING, +BUT NOT LIMITED TO, PROCUREMENT OF SUBSTITUTE GOODS OR SERVICES; LOSS +OF USE, DATA, OR PROFITS; OR BUSINESS INTERRUPTION) HOWEVER CAUSED AND +ON ANY THEORY OF LIABILITY, WHETHER IN CONTRACT, STRICT LIABILITY, OR +TORT (INCLUDING NEGLIGENCE OR OTHERWISE) ARISING IN ANY WAY OUT OF THE +USE OF THIS SOFTWARE, EVEN IF ADVISED OF THE POSSIBILITY OF SUCH +DAMAGE. diff --git a/Splunk_TA_paloalto/bin/splunk_ta_paloalto/aob_py3/decorator-4.4.2.dist-info/METADATA b/Splunk_TA_paloalto/bin/splunk_ta_paloalto/aob_py3/decorator-4.4.2.dist-info/METADATA new file mode 100644 index 00000000..fd12277a --- /dev/null +++ b/Splunk_TA_paloalto/bin/splunk_ta_paloalto/aob_py3/decorator-4.4.2.dist-info/METADATA @@ -0,0 +1,131 @@ +Metadata-Version: 2.1 +Name: decorator +Version: 4.4.2 +Summary: Decorators for Humans +Home-page: https://github.com/micheles/decorator +Author: Michele Simionato +Author-email: michele.simionato@gmail.com +License: new BSD License +Keywords: decorators generic utility +Platform: All +Classifier: Development Status :: 5 - Production/Stable +Classifier: Intended Audience :: Developers +Classifier: License :: OSI Approved :: BSD License +Classifier: Natural Language :: English +Classifier: Operating System :: OS Independent +Classifier: Programming Language :: Python +Classifier: Programming Language :: Python :: 2 +Classifier: Programming Language :: Python :: 2.6 +Classifier: Programming Language :: Python :: 2.7 +Classifier: Programming Language :: Python :: 3 +Classifier: Programming Language :: Python :: 3.2 +Classifier: Programming Language :: Python :: 3.3 +Classifier: Programming Language :: Python :: 3.4 +Classifier: Programming Language :: Python :: 3.5 +Classifier: Programming Language :: Python :: 3.6 +Classifier: Programming Language :: Python :: 3.7 +Classifier: Programming Language :: Python :: Implementation :: CPython +Classifier: Topic :: Software Development :: Libraries +Classifier: Topic :: Utilities +Requires-Python: >=2.6, !=3.0.*, !=3.1.* + +Decorators for Humans +===================== + +The goal of the decorator module is to make it easy to define +signature-preserving function decorators and decorator factories. +It also includes an implementation of multiple dispatch and other niceties +(please check the docs). It is released under a two-clauses +BSD license, i.e. basically you can do whatever you want with it but I am not +responsible. + +Installation +------------- + +If you are lazy, just perform + + ``$ pip install decorator`` + +which will install just the module on your system. + +If you prefer to install the full distribution from source, including +the documentation, clone the `GitHub repo`_ or download the tarball_, unpack it and run + + ``$ pip install .`` + +in the main directory, possibly as superuser. + +.. _tarball: https://pypi.org/project/decorator/#files +.. _GitHub repo: https://github.com/micheles/decorator + +Testing +-------- + +If you have the source code installation you can run the tests with + + `$ python src/tests/test.py -v` + +or (if you have setuptools installed) + + `$ python setup.py test` + +Notice that you may run into trouble if in your system there +is an older version of the decorator module; in such a case remove the +old version. It is safe even to copy the module `decorator.py` over +an existing one, since we kept backward-compatibility for a long time. + +Repository +--------------- + +The project is hosted on GitHub. You can look at the source here: + + https://github.com/micheles/decorator + +Documentation +--------------- + +The documentation has been moved to https://github.com/micheles/decorator/blob/master/docs/documentation.md + +From there you can get a PDF version by simply using the print +functionality of your browser. + +Here is the documentation for previous versions of the module: + +https://github.com/micheles/decorator/blob/4.3.2/docs/tests.documentation.rst +https://github.com/micheles/decorator/blob/4.2.1/docs/tests.documentation.rst +https://github.com/micheles/decorator/blob/4.1.2/docs/tests.documentation.rst +https://github.com/micheles/decorator/blob/4.0.0/documentation.rst +https://github.com/micheles/decorator/blob/3.4.2/documentation.rst + +For the impatient +----------------- + +Here is an example of how to define a family of decorators tracing slow +operations: + +.. code-block:: python + + from decorator import decorator + + @decorator + def warn_slow(func, timelimit=60, *args, **kw): + t0 = time.time() + result = func(*args, **kw) + dt = time.time() - t0 + if dt > timelimit: + logging.warn('%s took %d seconds', func.__name__, dt) + else: + logging.info('%s took %d seconds', func.__name__, dt) + return result + + @warn_slow # warn if it takes more than 1 minute + def preprocess_input_files(inputdir, tempdir): + ... + + @warn_slow(timelimit=600) # warn if it takes more than 10 minutes + def run_calculation(tempdir, outdir): + ... + +Enjoy! + + diff --git a/Splunk_TA_paloalto/bin/splunk_ta_paloalto/aob_py3/decorator-4.4.2.dist-info/RECORD b/Splunk_TA_paloalto/bin/splunk_ta_paloalto/aob_py3/decorator-4.4.2.dist-info/RECORD new file mode 100644 index 00000000..23ed232f --- /dev/null +++ b/Splunk_TA_paloalto/bin/splunk_ta_paloalto/aob_py3/decorator-4.4.2.dist-info/RECORD @@ -0,0 +1,8 @@ +decorator-4.4.2.dist-info/INSTALLER,sha256=zuuue4knoyJ-UwPPXg8fezS7VCrXJQrAP7zeNuwvFQg,4 +decorator-4.4.2.dist-info/LICENSE.txt,sha256=_RFmDKvwUyCCxFcGhi-vwpSQfsf44heBgkCkmZgGeC4,1309 +decorator-4.4.2.dist-info/METADATA,sha256=RYLh5Qy8XzYOcgCT6RsI_cTXG_PE1QvoAVT-u2vus80,4168 +decorator-4.4.2.dist-info/RECORD,, +decorator-4.4.2.dist-info/WHEEL,sha256=h_aVn5OB2IERUjMbi2pucmR_zzWJtk303YXvhh60NJ8,110 +decorator-4.4.2.dist-info/pbr.json,sha256=AL84oUUWQHwkd8OCPhLRo2NJjU5MDdmXMqRHv-posqs,47 +decorator-4.4.2.dist-info/top_level.txt,sha256=Kn6eQjo83ctWxXVyBMOYt0_YpjRjBznKYVuNyuC_DSI,10 +decorator.py,sha256=aQ8Ozc-EK26xBTOXVR5A-8Szgx99_bhaexZSGNn38Yc,17222 diff --git a/Splunk_TA_paloalto/bin/splunk_ta_paloalto/aob_py3/decorator-4.4.2.dist-info/WHEEL b/Splunk_TA_paloalto/bin/splunk_ta_paloalto/aob_py3/decorator-4.4.2.dist-info/WHEEL new file mode 100644 index 00000000..78e6f69d --- /dev/null +++ b/Splunk_TA_paloalto/bin/splunk_ta_paloalto/aob_py3/decorator-4.4.2.dist-info/WHEEL @@ -0,0 +1,6 @@ +Wheel-Version: 1.0 +Generator: bdist_wheel (0.33.4) +Root-Is-Purelib: true +Tag: py2-none-any +Tag: py3-none-any + diff --git a/Splunk_TA_paloalto/bin/splunk_ta_paloalto/aob_py3/decorator-4.4.2.dist-info/pbr.json b/Splunk_TA_paloalto/bin/splunk_ta_paloalto/aob_py3/decorator-4.4.2.dist-info/pbr.json new file mode 100644 index 00000000..cd045997 --- /dev/null +++ b/Splunk_TA_paloalto/bin/splunk_ta_paloalto/aob_py3/decorator-4.4.2.dist-info/pbr.json @@ -0,0 +1 @@ +{"is_release": false, "git_version": "8608a46"} \ No newline at end of file diff --git a/Splunk_TA_paloalto/bin/splunk_ta_paloalto/aob_py3/decorator-4.4.2.dist-info/top_level.txt b/Splunk_TA_paloalto/bin/splunk_ta_paloalto/aob_py3/decorator-4.4.2.dist-info/top_level.txt new file mode 100644 index 00000000..3fe18a4d --- /dev/null +++ b/Splunk_TA_paloalto/bin/splunk_ta_paloalto/aob_py3/decorator-4.4.2.dist-info/top_level.txt @@ -0,0 +1 @@ +decorator diff --git a/Splunk_TA_paloalto/bin/splunk_ta_paloalto/aob_py3/decorator-5.1.1.dist-info/INSTALLER b/Splunk_TA_paloalto/bin/splunk_ta_paloalto/aob_py3/decorator-5.1.1.dist-info/INSTALLER new file mode 100644 index 00000000..a1b589e3 --- /dev/null +++ b/Splunk_TA_paloalto/bin/splunk_ta_paloalto/aob_py3/decorator-5.1.1.dist-info/INSTALLER @@ -0,0 +1 @@ +pip diff --git a/Splunk_TA_paloalto/bin/splunk_ta_paloalto/aob_py3/decorator-5.1.1.dist-info/LICENSE.txt b/Splunk_TA_paloalto/bin/splunk_ta_paloalto/aob_py3/decorator-5.1.1.dist-info/LICENSE.txt new file mode 100644 index 00000000..b0ade048 --- /dev/null +++ b/Splunk_TA_paloalto/bin/splunk_ta_paloalto/aob_py3/decorator-5.1.1.dist-info/LICENSE.txt @@ -0,0 +1,26 @@ +Copyright (c) 2005-2018, Michele Simionato +All rights reserved. + +Redistribution and use in source and binary forms, with or without +modification, are permitted provided that the following conditions are +met: + + Redistributions of source code must retain the above copyright + notice, this list of conditions and the following disclaimer. + Redistributions in bytecode form must reproduce the above copyright + notice, this list of conditions and the following disclaimer in + the documentation and/or other materials provided with the + distribution. + +THIS SOFTWARE IS PROVIDED BY THE COPYRIGHT HOLDERS AND CONTRIBUTORS +"AS IS" AND ANY EXPRESS OR IMPLIED WARRANTIES, INCLUDING, BUT NOT +LIMITED TO, THE IMPLIED WARRANTIES OF MERCHANTABILITY AND FITNESS FOR +A PARTICULAR PURPOSE ARE DISCLAIMED. IN NO EVENT SHALL THE COPYRIGHT +HOLDERS OR CONTRIBUTORS BE LIABLE FOR ANY DIRECT, INDIRECT, +INCIDENTAL, SPECIAL, EXEMPLARY, OR CONSEQUENTIAL DAMAGES (INCLUDING, +BUT NOT LIMITED TO, PROCUREMENT OF SUBSTITUTE GOODS OR SERVICES; LOSS +OF USE, DATA, OR PROFITS; OR BUSINESS INTERRUPTION) HOWEVER CAUSED AND +ON ANY THEORY OF LIABILITY, WHETHER IN CONTRACT, STRICT LIABILITY, OR +TORT (INCLUDING NEGLIGENCE OR OTHERWISE) ARISING IN ANY WAY OUT OF THE +USE OF THIS SOFTWARE, EVEN IF ADVISED OF THE POSSIBILITY OF SUCH +DAMAGE. diff --git a/Splunk_TA_paloalto/bin/splunk_ta_paloalto/aob_py3/decorator-5.1.1.dist-info/METADATA b/Splunk_TA_paloalto/bin/splunk_ta_paloalto/aob_py3/decorator-5.1.1.dist-info/METADATA new file mode 100644 index 00000000..df407f80 --- /dev/null +++ b/Splunk_TA_paloalto/bin/splunk_ta_paloalto/aob_py3/decorator-5.1.1.dist-info/METADATA @@ -0,0 +1,127 @@ +Metadata-Version: 2.1 +Name: decorator +Version: 5.1.1 +Summary: Decorators for Humans +Home-page: https://github.com/micheles/decorator +Author: Michele Simionato +Author-email: michele.simionato@gmail.com +License: new BSD License +Keywords: decorators generic utility +Platform: All +Classifier: Development Status :: 5 - Production/Stable +Classifier: Intended Audience :: Developers +Classifier: License :: OSI Approved :: BSD License +Classifier: Natural Language :: English +Classifier: Operating System :: OS Independent +Classifier: Programming Language :: Python +Classifier: Programming Language :: Python :: 3.5 +Classifier: Programming Language :: Python :: 3.6 +Classifier: Programming Language :: Python :: 3.7 +Classifier: Programming Language :: Python :: 3.8 +Classifier: Programming Language :: Python :: 3.9 +Classifier: Programming Language :: Python :: 3.10 +Classifier: Programming Language :: Python :: Implementation :: CPython +Classifier: Topic :: Software Development :: Libraries +Classifier: Topic :: Utilities +Requires-Python: >=3.5 + +Decorators for Humans +===================== + +The goal of the decorator module is to make it easy to define +signature-preserving function decorators and decorator factories. +It also includes an implementation of multiple dispatch and other niceties +(please check the docs). It is released under a two-clauses +BSD license, i.e. basically you can do whatever you want with it but I am not +responsible. + +Installation +------------- + +If you are lazy, just perform + + ``$ pip install decorator`` + +which will install just the module on your system. + +If you prefer to install the full distribution from source, including +the documentation, clone the `GitHub repo`_ or download the tarball_, unpack it and run + + ``$ pip install .`` + +in the main directory, possibly as superuser. + +.. _tarball: https://pypi.org/project/decorator/#files +.. _GitHub repo: https://github.com/micheles/decorator + +Testing +-------- + +If you have the source code installation you can run the tests with + + `$ python src/tests/test.py -v` + +or (if you have setuptools installed) + + `$ python setup.py test` + +Notice that you may run into trouble if in your system there +is an older version of the decorator module; in such a case remove the +old version. It is safe even to copy the module `decorator.py` over +an existing one, since we kept backward-compatibility for a long time. + +Repository +--------------- + +The project is hosted on GitHub. You can look at the source here: + + https://github.com/micheles/decorator + +Documentation +--------------- + +The documentation has been moved to https://github.com/micheles/decorator/blob/master/docs/documentation.md + +From there you can get a PDF version by simply using the print +functionality of your browser. + +Here is the documentation for previous versions of the module: + +https://github.com/micheles/decorator/blob/4.3.2/docs/tests.documentation.rst +https://github.com/micheles/decorator/blob/4.2.1/docs/tests.documentation.rst +https://github.com/micheles/decorator/blob/4.1.2/docs/tests.documentation.rst +https://github.com/micheles/decorator/blob/4.0.0/documentation.rst +https://github.com/micheles/decorator/blob/3.4.2/documentation.rst + +For the impatient +----------------- + +Here is an example of how to define a family of decorators tracing slow +operations: + +.. code-block:: python + + from decorator import decorator + + @decorator + def warn_slow(func, timelimit=60, *args, **kw): + t0 = time.time() + result = func(*args, **kw) + dt = time.time() - t0 + if dt > timelimit: + logging.warn('%s took %d seconds', func.__name__, dt) + else: + logging.info('%s took %d seconds', func.__name__, dt) + return result + + @warn_slow # warn if it takes more than 1 minute + def preprocess_input_files(inputdir, tempdir): + ... + + @warn_slow(timelimit=600) # warn if it takes more than 10 minutes + def run_calculation(tempdir, outdir): + ... + +Enjoy! + + diff --git a/Splunk_TA_paloalto/bin/splunk_ta_paloalto/aob_py3/decorator-5.1.1.dist-info/RECORD b/Splunk_TA_paloalto/bin/splunk_ta_paloalto/aob_py3/decorator-5.1.1.dist-info/RECORD new file mode 100644 index 00000000..2115358c --- /dev/null +++ b/Splunk_TA_paloalto/bin/splunk_ta_paloalto/aob_py3/decorator-5.1.1.dist-info/RECORD @@ -0,0 +1,8 @@ +decorator-5.1.1.dist-info/INSTALLER,sha256=zuuue4knoyJ-UwPPXg8fezS7VCrXJQrAP7zeNuwvFQg,4 +decorator-5.1.1.dist-info/LICENSE.txt,sha256=_RFmDKvwUyCCxFcGhi-vwpSQfsf44heBgkCkmZgGeC4,1309 +decorator-5.1.1.dist-info/METADATA,sha256=XAr2zbYpRxCkcPbsmg1oaiS5ea7mhTq-j-wb0XjuVho,3955 +decorator-5.1.1.dist-info/RECORD,, +decorator-5.1.1.dist-info/WHEEL,sha256=ewwEueio1C2XeHTvT17n8dZUJgOvyCWCt0WVNLClP9o,92 +decorator-5.1.1.dist-info/pbr.json,sha256=AL84oUUWQHwkd8OCPhLRo2NJjU5MDdmXMqRHv-posqs,47 +decorator-5.1.1.dist-info/top_level.txt,sha256=Kn6eQjo83ctWxXVyBMOYt0_YpjRjBznKYVuNyuC_DSI,10 +decorator.py,sha256=el5cAEgoTEpRQN65tOxGhElue-CccMv0xol-J2MwOc0,16752 diff --git a/Splunk_TA_paloalto/bin/splunk_ta_paloalto/aob_py3/decorator-5.1.1.dist-info/WHEEL b/Splunk_TA_paloalto/bin/splunk_ta_paloalto/aob_py3/decorator-5.1.1.dist-info/WHEEL new file mode 100644 index 00000000..5bad85fd --- /dev/null +++ b/Splunk_TA_paloalto/bin/splunk_ta_paloalto/aob_py3/decorator-5.1.1.dist-info/WHEEL @@ -0,0 +1,5 @@ +Wheel-Version: 1.0 +Generator: bdist_wheel (0.37.0) +Root-Is-Purelib: true +Tag: py3-none-any + diff --git a/Splunk_TA_paloalto/bin/splunk_ta_paloalto/aob_py3/decorator-5.1.1.dist-info/pbr.json b/Splunk_TA_paloalto/bin/splunk_ta_paloalto/aob_py3/decorator-5.1.1.dist-info/pbr.json new file mode 100644 index 00000000..cd045997 --- /dev/null +++ b/Splunk_TA_paloalto/bin/splunk_ta_paloalto/aob_py3/decorator-5.1.1.dist-info/pbr.json @@ -0,0 +1 @@ +{"is_release": false, "git_version": "8608a46"} \ No newline at end of file diff --git a/Splunk_TA_paloalto/bin/splunk_ta_paloalto/aob_py3/decorator-5.1.1.dist-info/top_level.txt b/Splunk_TA_paloalto/bin/splunk_ta_paloalto/aob_py3/decorator-5.1.1.dist-info/top_level.txt new file mode 100644 index 00000000..3fe18a4d --- /dev/null +++ b/Splunk_TA_paloalto/bin/splunk_ta_paloalto/aob_py3/decorator-5.1.1.dist-info/top_level.txt @@ -0,0 +1 @@ +decorator diff --git a/Splunk_TA_paloalto/bin/splunk_ta_paloalto/aob_py3/decorator.py b/Splunk_TA_paloalto/bin/splunk_ta_paloalto/aob_py3/decorator.py old mode 100755 new mode 100644 index 78d227f3..2479b6f7 --- a/Splunk_TA_paloalto/bin/splunk_ta_paloalto/aob_py3/decorator.py +++ b/Splunk_TA_paloalto/bin/splunk_ta_paloalto/aob_py3/decorator.py @@ -1,6 +1,6 @@ # ######################### LICENSE ############################ # -# Copyright (c) 2005-2018, Michele Simionato +# Copyright (c) 2005-2021, Michele Simionato # All rights reserved. # Redistribution and use in source and binary forms, with or without @@ -28,55 +28,26 @@ # DAMAGE. """ -Decorator module, see http://pypi.python.org/pypi/decorator +Decorator module, see +https://github.com/micheles/decorator/blob/master/docs/documentation.md for the documentation. """ -from __future__ import print_function - import re import sys import inspect import operator import itertools -import collections - -__version__ = '4.4.1' - -if sys.version >= '3': - from inspect import getfullargspec - - def get_init(cls): - return cls.__init__ -else: - FullArgSpec = collections.namedtuple( - 'FullArgSpec', 'args varargs varkw defaults ' - 'kwonlyargs kwonlydefaults annotations') - - def getfullargspec(f): - "A quick and dirty replacement for getfullargspec for Python 2.X" - return FullArgSpec._make(inspect.getargspec(f) + ([], None, {})) - - def get_init(cls): - return cls.__init__.__func__ - -try: - iscoroutinefunction = inspect.iscoroutinefunction -except AttributeError: - # let's assume there are no coroutine functions in old Python - def iscoroutinefunction(f): - return False -try: - from inspect import isgeneratorfunction -except ImportError: - # assume no generator function in old Python versions - def isgeneratorfunction(caller): - return False +from contextlib import _GeneratorContextManager +from inspect import getfullargspec, iscoroutinefunction, isgeneratorfunction +__version__ = '5.1.1' DEF = re.compile(r'\s*def\s*([_\w][_\w\d]*)\s*\(') +POS = inspect.Parameter.POSITIONAL_OR_KEYWORD +EMPTY = inspect.Parameter.empty -# basic functionality +# this is not used anymore in the core, but kept for backward compatibility class FunctionMaker(object): """ An object with the ability to create functions with a given signature. @@ -100,7 +71,7 @@ def __init__(self, func=None, name=None, signature=None, self.name = '_lambda_' self.doc = func.__doc__ self.module = func.__module__ - if inspect.isfunction(func): + if inspect.isroutine(func): argspec = getfullargspec(func) self.annotations = getattr(func, '__annotations__', {}) for a in ('args', 'varargs', 'varkw', 'defaults', 'kwonlyargs', @@ -143,7 +114,9 @@ def __init__(self, func=None, name=None, signature=None, raise TypeError('You are decorating a non function: %s' % func) def update(self, func, **kw): - "Update the signature of func with the data in self" + """ + Update the signature of func with the data in self + """ func.__name__ = self.name func.__doc__ = getattr(self, 'doc', None) func.__dict__ = getattr(self, 'dict', {}) @@ -160,7 +133,9 @@ def update(self, func, **kw): func.__dict__.update(kw) def make(self, src_templ, evaldict=None, addsource=False, **attrs): - "Make a new function from a given template and update the signature" + """ + Make a new function from a given template and update the signature + """ src = src_templ % vars(self) # expand name and signature evaldict = evaldict or {} mo = DEF.search(src) @@ -179,8 +154,7 @@ def make(self, src_templ, evaldict=None, addsource=False, **attrs): # Ensure each generated function has a unique filename for profilers # (such as cProfile) that depend on the tuple of (, # , ) being unique. - filename = '<%s:decorator-gen-%d>' % ( - __file__, next(self._compile_count)) + filename = '' % next(self._compile_count) try: code = compile(src, filename, 'single') exec(code, evaldict) @@ -222,105 +196,127 @@ def create(cls, obj, body, evaldict, defaults=None, return self.make(body, evaldict, addsource, **attrs) -def decorate(func, caller, extras=()): +def fix(args, kwargs, sig): """ - decorate(func, caller) decorates a function using a caller. - If the caller is a generator function, the resulting function - will be a generator function. + Fix args and kwargs to be consistent with the signature """ - evaldict = dict(_call_=caller, _func_=func) - es = '' - for i, extra in enumerate(extras): - ex = '_e%d_' % i - evaldict[ex] = extra - es += ex + ', ' - - if '3.5' <= sys.version < '3.6': - # with Python 3.5 isgeneratorfunction returns True for all coroutines - # however we know that it is NOT possible to have a generator - # coroutine in python 3.5: PEP525 was not there yet - generatorcaller = isgeneratorfunction( - caller) and not iscoroutinefunction(caller) - else: - generatorcaller = isgeneratorfunction(caller) - if generatorcaller: - fun = FunctionMaker.create( - func, "for res in _call_(_func_, %s%%(shortsignature)s):\n" - " yield res" % es, evaldict, __wrapped__=func) + ba = sig.bind(*args, **kwargs) + ba.apply_defaults() # needed for test_dan_schult + return ba.args, ba.kwargs + + +def decorate(func, caller, extras=(), kwsyntax=False): + """ + Decorates a function/generator/coroutine using a caller. + If kwsyntax is True calling the decorated functions with keyword + syntax will pass the named arguments inside the ``kw`` dictionary, + even if such argument are positional, similarly to what functools.wraps + does. By default kwsyntax is False and the the arguments are untouched. + """ + sig = inspect.signature(func) + if iscoroutinefunction(caller): + async def fun(*args, **kw): + if not kwsyntax: + args, kw = fix(args, kw, sig) + return await caller(func, *(extras + args), **kw) + elif isgeneratorfunction(caller): + def fun(*args, **kw): + if not kwsyntax: + args, kw = fix(args, kw, sig) + for res in caller(func, *(extras + args), **kw): + yield res else: - fun = FunctionMaker.create( - func, "return _call_(_func_, %s%%(shortsignature)s)" % es, - evaldict, __wrapped__=func) - if hasattr(func, '__qualname__'): - fun.__qualname__ = func.__qualname__ + def fun(*args, **kw): + if not kwsyntax: + args, kw = fix(args, kw, sig) + return caller(func, *(extras + args), **kw) + fun.__name__ = func.__name__ + fun.__doc__ = func.__doc__ + fun.__wrapped__ = func + fun.__signature__ = sig + fun.__qualname__ = func.__qualname__ + # builtin functions like defaultdict.__setitem__ lack many attributes + try: + fun.__defaults__ = func.__defaults__ + except AttributeError: + pass + try: + fun.__kwdefaults__ = func.__kwdefaults__ + except AttributeError: + pass + try: + fun.__annotations__ = func.__annotations__ + except AttributeError: + pass + try: + fun.__module__ = func.__module__ + except AttributeError: + pass + try: + fun.__dict__.update(func.__dict__) + except AttributeError: + pass return fun -def decorator(caller, _func=None): - """decorator(caller) converts a caller function into a decorator""" +def decoratorx(caller): + """ + A version of "decorator" implemented via "exec" and not via the + Signature object. Use this if you are want to preserve the `.__code__` + object properties (https://github.com/micheles/decorator/issues/129). + """ + def dec(func): + return FunctionMaker.create( + func, + "return _call_(_func_, %(shortsignature)s)", + dict(_call_=caller, _func_=func), + __wrapped__=func, __qualname__=func.__qualname__) + return dec + + +def decorator(caller, _func=None, kwsyntax=False): + """ + decorator(caller) converts a caller function into a decorator + """ if _func is not None: # return a decorated function # this is obsolete behavior; you should use decorate instead - return decorate(_func, caller) + return decorate(_func, caller, (), kwsyntax) # else return a decorator function - defaultargs, defaults = '', () - if inspect.isclass(caller): - name = caller.__name__.lower() - doc = 'decorator(%s) converts functions/generators into ' \ - 'factories of %s objects' % (caller.__name__, caller.__name__) - elif inspect.isfunction(caller): - if caller.__name__ == '': - name = '_lambda_' + sig = inspect.signature(caller) + dec_params = [p for p in sig.parameters.values() if p.kind is POS] + + def dec(func=None, *args, **kw): + na = len(args) + 1 + extras = args + tuple(kw.get(p.name, p.default) + for p in dec_params[na:] + if p.default is not EMPTY) + if func is None: + return lambda func: decorate(func, caller, extras, kwsyntax) else: - name = caller.__name__ - doc = caller.__doc__ - nargs = caller.__code__.co_argcount - ndefs = len(caller.__defaults__ or ()) - defaultargs = ', '.join(caller.__code__.co_varnames[nargs-ndefs:nargs]) - if defaultargs: - defaultargs += ',' - defaults = caller.__defaults__ - else: # assume caller is an object with a __call__ method - name = caller.__class__.__name__.lower() - doc = caller.__call__.__doc__ - evaldict = dict(_call=caller, _decorate_=decorate) - dec = FunctionMaker.create( - '%s(func, %s)' % (name, defaultargs), - 'if func is None: return lambda func: _decorate_(func, _call, (%s))\n' - 'return _decorate_(func, _call, (%s))' % (defaultargs, defaultargs), - evaldict, doc=doc, module=caller.__module__, __wrapped__=caller) - if defaults: - dec.__defaults__ = (None,) + defaults + return decorate(func, caller, extras, kwsyntax) + dec.__signature__ = sig.replace(parameters=dec_params) + dec.__name__ = caller.__name__ + dec.__doc__ = caller.__doc__ + dec.__wrapped__ = caller + dec.__qualname__ = caller.__qualname__ + dec.__kwdefaults__ = getattr(caller, '__kwdefaults__', None) + dec.__dict__.update(caller.__dict__) return dec # ####################### contextmanager ####################### # -try: # Python >= 3.2 - from contextlib import _GeneratorContextManager -except ImportError: # Python >= 2.5 - from contextlib import GeneratorContextManager as _GeneratorContextManager - class ContextManager(_GeneratorContextManager): - def __call__(self, func): - """Context manager decorator""" - return FunctionMaker.create( - func, "with _self_: return _func_(%(shortsignature)s)", - dict(_self_=self, _func_=func), __wrapped__=func) + def __init__(self, g, *a, **k): + _GeneratorContextManager.__init__(self, g, a, k) + def __call__(self, func): + def caller(f, *a, **k): + with self.__class__(self.func, *self.args, **self.kwds): + return f(*a, **k) + return decorate(func, caller) -init = getfullargspec(_GeneratorContextManager.__init__) -n_args = len(init.args) -if n_args == 2 and not init.varargs: # (self, genobj) Python 2.7 - def __init__(self, g, *a, **k): - return _GeneratorContextManager.__init__(self, g(*a, **k)) - ContextManager.__init__ = __init__ -elif n_args == 2 and init.varargs: # (self, gen, *a, **k) Python 3.4 - pass -elif n_args == 4: # (self, gen, args, kwds) Python 3.5 - def __init__(self, g, *a, **k): - return _GeneratorContextManager.__init__(self, g, a, k) - ContextManager.__init__ = __init__ _contextmanager = decorator(ContextManager) diff --git a/Splunk_TA_paloalto/bin/splunk_ta_paloalto/aob_py3/defusedxml-0.7.1.dist-info/INSTALLER b/Splunk_TA_paloalto/bin/splunk_ta_paloalto/aob_py3/defusedxml-0.7.1.dist-info/INSTALLER new file mode 100644 index 00000000..a1b589e3 --- /dev/null +++ b/Splunk_TA_paloalto/bin/splunk_ta_paloalto/aob_py3/defusedxml-0.7.1.dist-info/INSTALLER @@ -0,0 +1 @@ +pip diff --git a/Splunk_TA_paloalto/bin/splunk_ta_paloalto/aob_py3/defusedxml-0.7.1.dist-info/LICENSE b/Splunk_TA_paloalto/bin/splunk_ta_paloalto/aob_py3/defusedxml-0.7.1.dist-info/LICENSE new file mode 100644 index 00000000..311690c6 --- /dev/null +++ b/Splunk_TA_paloalto/bin/splunk_ta_paloalto/aob_py3/defusedxml-0.7.1.dist-info/LICENSE @@ -0,0 +1,49 @@ +PYTHON SOFTWARE FOUNDATION LICENSE VERSION 2 +-------------------------------------------- + +1. This LICENSE AGREEMENT is between the Python Software Foundation +("PSF"), and the Individual or Organization ("Licensee") accessing and +otherwise using this software ("Python") in source or binary form and +its associated documentation. + +2. Subject to the terms and conditions of this License Agreement, PSF +hereby grants Licensee a nonexclusive, royalty-free, world-wide +license to reproduce, analyze, test, perform and/or display publicly, +prepare derivative works, distribute, and otherwise use Python +alone or in any derivative version, provided, however, that PSF's +License Agreement and PSF's notice of copyright, i.e., "Copyright (c) +2001, 2002, 2003, 2004, 2005, 2006, 2007, 2008 Python Software Foundation; +All Rights Reserved" are retained in Python alone or in any derivative +version prepared by Licensee. + +3. In the event Licensee prepares a derivative work that is based on +or incorporates Python or any part thereof, and wants to make +the derivative work available to others as provided herein, then +Licensee hereby agrees to include in any such work a brief summary of +the changes made to Python. + +4. PSF is making Python available to Licensee on an "AS IS" +basis. PSF MAKES NO REPRESENTATIONS OR WARRANTIES, EXPRESS OR +IMPLIED. BY WAY OF EXAMPLE, BUT NOT LIMITATION, PSF MAKES NO AND +DISCLAIMS ANY REPRESENTATION OR WARRANTY OF MERCHANTABILITY OR FITNESS +FOR ANY PARTICULAR PURPOSE OR THAT THE USE OF PYTHON WILL NOT +INFRINGE ANY THIRD PARTY RIGHTS. + +5. PSF SHALL NOT BE LIABLE TO LICENSEE OR ANY OTHER USERS OF PYTHON +FOR ANY INCIDENTAL, SPECIAL, OR CONSEQUENTIAL DAMAGES OR LOSS AS +A RESULT OF MODIFYING, DISTRIBUTING, OR OTHERWISE USING PYTHON, +OR ANY DERIVATIVE THEREOF, EVEN IF ADVISED OF THE POSSIBILITY THEREOF. + +6. This License Agreement will automatically terminate upon a material +breach of its terms and conditions. + +7. Nothing in this License Agreement shall be deemed to create any +relationship of agency, partnership, or joint venture between PSF and +Licensee. This License Agreement does not grant permission to use PSF +trademarks or trade name in a trademark sense to endorse or promote +products or services of Licensee, or any third party. + +8. By copying, installing or otherwise using Python, Licensee +agrees to be bound by the terms and conditions of this License +Agreement. + diff --git a/Splunk_TA_paloalto/bin/splunk_ta_paloalto/aob_py3/defusedxml-0.7.1.dist-info/METADATA b/Splunk_TA_paloalto/bin/splunk_ta_paloalto/aob_py3/defusedxml-0.7.1.dist-info/METADATA new file mode 100644 index 00000000..f916e891 --- /dev/null +++ b/Splunk_TA_paloalto/bin/splunk_ta_paloalto/aob_py3/defusedxml-0.7.1.dist-info/METADATA @@ -0,0 +1,978 @@ +Metadata-Version: 2.1 +Name: defusedxml +Version: 0.7.1 +Summary: XML bomb protection for Python stdlib modules +Home-page: https://github.com/tiran/defusedxml +Author: Christian Heimes +Author-email: christian@python.org +Maintainer: Christian Heimes +Maintainer-email: christian@python.org +License: PSFL +Download-URL: https://pypi.python.org/pypi/defusedxml +Keywords: xml bomb DoS +Platform: all +Classifier: Development Status :: 5 - Production/Stable +Classifier: Intended Audience :: Developers +Classifier: License :: OSI Approved :: Python Software Foundation License +Classifier: Natural Language :: English +Classifier: Programming Language :: Python +Classifier: Programming Language :: Python :: 2 +Classifier: Programming Language :: Python :: 2.7 +Classifier: Programming Language :: Python :: 3 +Classifier: Programming Language :: Python :: 3.5 +Classifier: Programming Language :: Python :: 3.6 +Classifier: Programming Language :: Python :: 3.7 +Classifier: Programming Language :: Python :: 3.8 +Classifier: Programming Language :: Python :: 3.9 +Classifier: Topic :: Text Processing :: Markup :: XML +Requires-Python: >=2.7, !=3.0.*, !=3.1.*, !=3.2.*, !=3.3.*, !=3.4.* + +=================================================== +defusedxml -- defusing XML bombs and other exploits +=================================================== + +.. image:: https://img.shields.io/pypi/v/defusedxml.svg + :target: https://pypi.org/project/defusedxml/ + :alt: Latest Version + +.. image:: https://img.shields.io/pypi/pyversions/defusedxml.svg + :target: https://pypi.org/project/defusedxml/ + :alt: Supported Python versions + +.. image:: https://travis-ci.org/tiran/defusedxml.svg?branch=master + :target: https://travis-ci.org/tiran/defusedxml + :alt: Travis CI + +.. image:: https://codecov.io/github/tiran/defusedxml/coverage.svg?branch=master + :target: https://codecov.io/github/tiran/defusedxml?branch=master + :alt: codecov + +.. image:: https://img.shields.io/pypi/dm/defusedxml.svg + :target: https://pypistats.org/packages/defusedxml + :alt: PyPI downloads + +.. image:: https://img.shields.io/badge/code%20style-black-000000.svg + :target: https://github.com/psf/black + :alt: Code style: black + +.. + + "It's just XML, what could probably go wrong?" + +Christian Heimes + +Synopsis +======== + +The results of an attack on a vulnerable XML library can be fairly dramatic. +With just a few hundred **Bytes** of XML data an attacker can occupy several +**Gigabytes** of memory within **seconds**. An attacker can also keep +CPUs busy for a long time with a small to medium size request. Under some +circumstances it is even possible to access local files on your +server, to circumvent a firewall, or to abuse services to rebound attacks to +third parties. + +The attacks use and abuse less common features of XML and its parsers. The +majority of developers are unacquainted with features such as processing +instructions and entity expansions that XML inherited from SGML. At best +they know about ```` from experience with HTML but they are not +aware that a document type definition (DTD) can generate an HTTP request +or load a file from the file system. + +None of the issues is new. They have been known for a long time. Billion +laughs was first reported in 2003. Nevertheless some XML libraries and +applications are still vulnerable and even heavy users of XML are +surprised by these features. It's hard to say whom to blame for the +situation. It's too short sighted to shift all blame on XML parsers and +XML libraries for using insecure default settings. After all they +properly implement XML specifications. Application developers must not rely +that a library is always configured for security and potential harmful data +by default. + + +.. contents:: Table of Contents + :depth: 2 + + +Attack vectors +============== + +billion laughs / exponential entity expansion +--------------------------------------------- + +The `Billion Laughs`_ attack -- also known as exponential entity expansion -- +uses multiple levels of nested entities. The original example uses 9 levels +of 10 expansions in each level to expand the string ``lol`` to a string of +3 * 10 :sup:`9` bytes, hence the name "billion laughs". The resulting string +occupies 3 GB (2.79 GiB) of memory; intermediate strings require additional +memory. Because most parsers don't cache the intermediate step for every +expansion it is repeated over and over again. It increases the CPU load even +more. + +An XML document of just a few hundred bytes can disrupt all services on a +machine within seconds. + +Example XML:: + + + + + + ]> + &d; + + +quadratic blowup entity expansion +--------------------------------- + +A quadratic blowup attack is similar to a `Billion Laughs`_ attack; it abuses +entity expansion, too. Instead of nested entities it repeats one large entity +with a couple of thousand chars over and over again. The attack isn't as +efficient as the exponential case but it avoids triggering countermeasures of +parsers against heavily nested entities. Some parsers limit the depth and +breadth of a single entity but not the total amount of expanded text +throughout an entire XML document. + +A medium-sized XML document with a couple of hundred kilobytes can require a +couple of hundred MB to several GB of memory. When the attack is combined +with some level of nested expansion an attacker is able to achieve a higher +ratio of success. + +:: + + + ]> + &a;&a;&a;... repeat + + +external entity expansion (remote) +---------------------------------- + +Entity declarations can contain more than just text for replacement. They can +also point to external resources by public identifiers or system identifiers. +System identifiers are standard URIs. When the URI is a URL (e.g. a +``http://`` locator) some parsers download the resource from the remote +location and embed them into the XML document verbatim. + +Simple example of a parsed external entity:: + + + ]> + + +The case of parsed external entities works only for valid XML content. The +XML standard also supports unparsed external entities with a +``NData declaration``. + +External entity expansion opens the door to plenty of exploits. An attacker +can abuse a vulnerable XML library and application to rebound and forward +network requests with the IP address of the server. It highly depends +on the parser and the application what kind of exploit is possible. For +example: + +* An attacker can circumvent firewalls and gain access to restricted + resources as all the requests are made from an internal and trustworthy + IP address, not from the outside. +* An attacker can abuse a service to attack, spy on or DoS your servers but + also third party services. The attack is disguised with the IP address of + the server and the attacker is able to utilize the high bandwidth of a big + machine. +* An attacker can exhaust additional resources on the machine, e.g. with + requests to a service that doesn't respond or responds with very large + files. +* An attacker may gain knowledge, when, how often and from which IP address + an XML document is accessed. +* An attacker could send mail from inside your network if the URL handler + supports ``smtp://`` URIs. + + +external entity expansion (local file) +-------------------------------------- + +External entities with references to local files are a sub-case of external +entity expansion. It's listed as an extra attack because it deserves extra +attention. Some XML libraries such as lxml disable network access by default +but still allow entity expansion with local file access by default. Local +files are either referenced with a ``file://`` URL or by a file path (either +relative or absolute). + +An attacker may be able to access and download all files that can be read by +the application process. This may include critical configuration files, too. + +:: + + + ]> + + + +DTD retrieval +------------- + +This case is similar to external entity expansion, too. Some XML libraries +like Python's xml.dom.pulldom retrieve document type definitions from remote +or local locations. Several attack scenarios from the external entity case +apply to this issue as well. + +:: + + + + + + text + + + +Python XML Libraries +==================== + +.. csv-table:: vulnerabilities and features + :header: "kind", "sax", "etree", "minidom", "pulldom", "xmlrpc", "lxml", "genshi" + :widths: 24, 7, 8, 8, 7, 8, 8, 8 + :stub-columns: 0 + + "billion laughs", "**True**", "**True**", "**True**", "**True**", "**True**", "False (1)", "False (5)" + "quadratic blowup", "**True**", "**True**", "**True**", "**True**", "**True**", "**True**", "False (5)" + "external entity expansion (remote)", "**True**", "False (3)", "False (4)", "**True**", "false", "False (1)", "False (5)" + "external entity expansion (local file)", "**True**", "False (3)", "False (4)", "**True**", "false", "**True**", "False (5)" + "DTD retrieval", "**True**", "False", "False", "**True**", "false", "False (1)", "False" + "gzip bomb", "False", "False", "False", "False", "**True**", "**partly** (2)", "False" + "xpath support (7)", "False", "False", "False", "False", "False", "**True**", "False" + "xsl(t) support (7)", "False", "False", "False", "False", "False", "**True**", "False" + "xinclude support (7)", "False", "**True** (6)", "False", "False", "False", "**True** (6)", "**True**" + "C library", "expat", "expat", "expat", "expat", "expat", "libxml2", "expat" + +1. Lxml is protected against billion laughs attacks and doesn't do network + lookups by default. +2. libxml2 and lxml are not directly vulnerable to gzip decompression bombs + but they don't protect you against them either. +3. xml.etree doesn't expand entities and raises a ParserError when an entity + occurs. +4. minidom doesn't expand entities and simply returns the unexpanded entity + verbatim. +5. genshi.input of genshi 0.6 doesn't support entity expansion and raises a + ParserError when an entity occurs. +6. Library has (limited) XInclude support but requires an additional step to + process inclusion. +7. These are features but they may introduce exploitable holes, see + `Other things to consider`_ + + +Settings in standard library +---------------------------- + + +xml.sax.handler Features +........................ + +feature_external_ges (http://xml.org/sax/features/external-general-entities) + disables external entity expansion + +feature_external_pes (http://xml.org/sax/features/external-parameter-entities) + the option is ignored and doesn't modify any functionality + +DOM xml.dom.xmlbuilder.Options +.............................. + +external_parameter_entities + ignored + +external_general_entities + ignored + +external_dtd_subset + ignored + +entities + unsure + + +defusedxml +========== + +The `defusedxml package`_ (`defusedxml on PyPI`_) +contains several Python-only workarounds and fixes +for denial of service and other vulnerabilities in Python's XML libraries. +In order to benefit from the protection you just have to import and use the +listed functions / classes from the right defusedxml module instead of the +original module. Merely `defusedxml.xmlrpc`_ is implemented as monkey patch. + +Instead of:: + + >>> from xml.etree.ElementTree import parse + >>> et = parse(xmlfile) + +alter code to:: + + >>> from defusedxml.ElementTree import parse + >>> et = parse(xmlfile) + +Additionally the package has an **untested** function to monkey patch +all stdlib modules with ``defusedxml.defuse_stdlib()``. + +All functions and parser classes accept three additional keyword arguments. +They return either the same objects as the original functions or compatible +subclasses. + +forbid_dtd (default: False) + disallow XML with a ```` processing instruction and raise a + *DTDForbidden* exception when a DTD processing instruction is found. + +forbid_entities (default: True) + disallow XML with ```` declarations inside the DTD and raise an + *EntitiesForbidden* exception when an entity is declared. + +forbid_external (default: True) + disallow any access to remote or local resources in external entities + or DTD and raising an *ExternalReferenceForbidden* exception when a DTD + or entity references an external resource. + + +defusedxml (package) +-------------------- + +DefusedXmlException, DTDForbidden, EntitiesForbidden, +ExternalReferenceForbidden, NotSupportedError + +defuse_stdlib() (*experimental*) + + +defusedxml.cElementTree +----------------------- + +**NOTE** ``defusedxml.cElementTree`` is deprecated and will be removed in a +future release. Import from ``defusedxml.ElementTree`` instead. + +parse(), iterparse(), fromstring(), XMLParser + + +defusedxml.ElementTree +----------------------- + +parse(), iterparse(), fromstring(), XMLParser + + +defusedxml.expatreader +---------------------- + +create_parser(), DefusedExpatParser + + +defusedxml.sax +-------------- + +parse(), parseString(), make_parser() + + +defusedxml.expatbuilder +----------------------- + +parse(), parseString(), DefusedExpatBuilder, DefusedExpatBuilderNS + + +defusedxml.minidom +------------------ + +parse(), parseString() + + +defusedxml.pulldom +------------------ + +parse(), parseString() + + +defusedxml.xmlrpc +----------------- + +The fix is implemented as monkey patch for the stdlib's xmlrpc package (3.x) +or xmlrpclib module (2.x). The function `monkey_patch()` enables the fixes, +`unmonkey_patch()` removes the patch and puts the code in its former state. + +The monkey patch protects against XML related attacks as well as +decompression bombs and excessively large requests or responses. The default +setting is 30 MB for requests, responses and gzip decompression. You can +modify the default by changing the module variable `MAX_DATA`. A value of +`-1` disables the limit. + + +defusedxml.lxml +--------------- + +**DEPRECATED** The module is deprecated and will be removed in a future +release. + +The module acts as an *example* how you could protect code that uses +lxml.etree. It implements a custom Element class that filters out +Entity instances, a custom parser factory and a thread local storage for +parser instances. It also has a check_docinfo() function which inspects +a tree for internal or external DTDs and entity declarations. In order to +check for entities lxml > 3.0 is required. + +parse(), fromstring() +RestrictedElement, GlobalParserTLS, getDefaultParser(), check_docinfo() + + +defusedexpat +============ + +The `defusedexpat package`_ (`defusedexpat on PyPI`_) +comes with binary extensions and a +`modified expat`_ library instead of the standard `expat parser`_. It's +basically a stand-alone version of the patches for Python's standard +library C extensions. + +Modifications in expat +---------------------- + +new definitions:: + + XML_BOMB_PROTECTION + XML_DEFAULT_MAX_ENTITY_INDIRECTIONS + XML_DEFAULT_MAX_ENTITY_EXPANSIONS + XML_DEFAULT_RESET_DTD + +new XML_FeatureEnum members:: + + XML_FEATURE_MAX_ENTITY_INDIRECTIONS + XML_FEATURE_MAX_ENTITY_EXPANSIONS + XML_FEATURE_IGNORE_DTD + +new XML_Error members:: + + XML_ERROR_ENTITY_INDIRECTIONS + XML_ERROR_ENTITY_EXPANSION + +new API functions:: + + int XML_GetFeature(XML_Parser parser, + enum XML_FeatureEnum feature, + long *value); + int XML_SetFeature(XML_Parser parser, + enum XML_FeatureEnum feature, + long value); + int XML_GetFeatureDefault(enum XML_FeatureEnum feature, + long *value); + int XML_SetFeatureDefault(enum XML_FeatureEnum feature, + long value); + +XML_FEATURE_MAX_ENTITY_INDIRECTIONS + Limit the amount of indirections that are allowed to occur during the + expansion of a nested entity. A counter starts when an entity reference + is encountered. It resets after the entity is fully expanded. The limit + protects the parser against exponential entity expansion attacks (aka + billion laughs attack). When the limit is exceeded the parser stops and + fails with `XML_ERROR_ENTITY_INDIRECTIONS`. + A value of 0 disables the protection. + + Supported range + 0 .. UINT_MAX + Default + 40 + +XML_FEATURE_MAX_ENTITY_EXPANSIONS + Limit the total length of all entity expansions throughout the entire + document. The lengths of all entities are accumulated in a parser variable. + The setting protects against quadratic blowup attacks (lots of expansions + of a large entity declaration). When the sum of all entities exceeds + the limit, the parser stops and fails with `XML_ERROR_ENTITY_EXPANSION`. + A value of 0 disables the protection. + + Supported range + 0 .. UINT_MAX + Default + 8 MiB + +XML_FEATURE_RESET_DTD + Reset all DTD information after the block has been parsed. When + the flag is set (default: false) all DTD information after the + endDoctypeDeclHandler has been called. The flag can be set inside the + endDoctypeDeclHandler. Without DTD information any entity reference in + the document body leads to `XML_ERROR_UNDEFINED_ENTITY`. + + Supported range + 0, 1 + Default + 0 + + +How to avoid XML vulnerabilities +================================ + +Best practices +-------------- + +* Don't allow DTDs +* Don't expand entities +* Don't resolve externals +* Limit parse depth +* Limit total input size +* Limit parse time +* Favor a SAX or iterparse-like parser for potential large data +* Validate and properly quote arguments to XSL transformations and + XPath queries +* Don't use XPath expression from untrusted sources +* Don't apply XSL transformations that come untrusted sources + +(based on Brad Hill's `Attacking XML Security`_) + + +Other things to consider +======================== + +XML, XML parsers and processing libraries have more features and possible +issue that could lead to DoS vulnerabilities or security exploits in +applications. I have compiled an incomplete list of theoretical issues that +need further research and more attention. The list is deliberately pessimistic +and a bit paranoid, too. It contains things that might go wrong under daffy +circumstances. + + +attribute blowup / hash collision attack +---------------------------------------- + +XML parsers may use an algorithm with quadratic runtime O(n :sup:`2`) to +handle attributes and namespaces. If it uses hash tables (dictionaries) to +store attributes and namespaces the implementation may be vulnerable to +hash collision attacks, thus reducing the performance to O(n :sup:`2`) again. +In either case an attacker is able to forge a denial of service attack with +an XML document that contains thousands upon thousands of attributes in +a single node. + +I haven't researched yet if expat, pyexpat or libxml2 are vulnerable. + + +decompression bomb +------------------ + +The issue of decompression bombs (aka `ZIP bomb`_) apply to all XML libraries +that can parse compressed XML stream like gzipped HTTP streams or LZMA-ed +files. For an attacker it can reduce the amount of transmitted data by three +magnitudes or more. Gzip is able to compress 1 GiB zeros to roughly 1 MB, +lzma is even better:: + + $ dd if=/dev/zero bs=1M count=1024 | gzip > zeros.gz + $ dd if=/dev/zero bs=1M count=1024 | lzma -z > zeros.xy + $ ls -sh zeros.* + 1020K zeros.gz + 148K zeros.xy + +None of Python's standard XML libraries decompress streams except for +``xmlrpclib``. The module is vulnerable +to decompression bombs. + +lxml can load and process compressed data through libxml2 transparently. +libxml2 can handle even very large blobs of compressed data efficiently +without using too much memory. But it doesn't protect applications from +decompression bombs. A carefully written SAX or iterparse-like approach can +be safe. + + +Processing Instruction +---------------------- + +`PI`_'s like:: + + + +may impose more threats for XML processing. It depends if and how a +processor handles processing instructions. The issue of URL retrieval with +network or local file access apply to processing instructions, too. + + +Other DTD features +------------------ + +`DTD`_ has more features like ````. I haven't researched how +these features may be a security threat. + + +XPath +----- + +XPath statements may introduce DoS vulnerabilities. Code should never execute +queries from untrusted sources. An attacker may also be able to create an XML +document that makes certain XPath queries costly or resource hungry. + + +XPath injection attacks +----------------------- + +XPath injeciton attacks pretty much work like SQL injection attacks. +Arguments to XPath queries must be quoted and validated properly, especially +when they are taken from the user. The page `Avoid the dangers of XPath injection`_ +list some ramifications of XPath injections. + +Python's standard library doesn't have XPath support. Lxml supports +parameterized XPath queries which does proper quoting. You just have to use +its xpath() method correctly:: + + # DON'T + >>> tree.xpath("/tag[@id='%s']" % value) + + # instead do + >>> tree.xpath("/tag[@id=$tagid]", tagid=name) + + +XInclude +-------- + +`XML Inclusion`_ is another way to load and include external files:: + + + + + +This feature should be disabled when XML files from an untrusted source are +processed. Some Python XML libraries and libxml2 support XInclude but don't +have an option to sandbox inclusion and limit it to allowed directories. + + +XMLSchema location +------------------ + +A validating XML parser may download schema files from the information in a +``xsi:schemaLocation`` attribute. + +:: + + + + + +XSL Transformation +------------------ + +You should keep in mind that XSLT is a Turing complete language. Never +process XSLT code from unknown or untrusted source! XSLT processors may +allow you to interact with external resources in ways you can't even imagine. +Some processors even support extensions that allow read/write access to file +system, access to JRE objects or scripting with Jython. + +Example from `Attacking XML Security`_ for Xalan-J:: + + + + + + + + + + + +Related CVEs +============ + +CVE-2013-1664 + Unrestricted entity expansion induces DoS vulnerabilities in Python XML + libraries (XML bomb) + +CVE-2013-1665 + External entity expansion in Python XML libraries inflicts potential + security flaws and DoS vulnerabilities + + +Other languages / frameworks +============================= + +Several other programming languages and frameworks are vulnerable as well. A +couple of them are affected by the fact that libxml2 up to 2.9.0 has no +protection against quadratic blowup attacks. Most of them have potential +dangerous default settings for entity expansion and external entities, too. + +Perl +---- + +Perl's XML::Simple is vulnerable to quadratic entity expansion and external +entity expansion (both local and remote). + + +Ruby +---- + +Ruby's REXML document parser is vulnerable to entity expansion attacks +(both quadratic and exponential) but it doesn't do external entity +expansion by default. In order to counteract entity expansion you have to +disable the feature:: + + REXML::Document.entity_expansion_limit = 0 + +libxml-ruby and hpricot don't expand entities in their default configuration. + + +PHP +--- + +PHP's SimpleXML API is vulnerable to quadratic entity expansion and loads +entities from local and remote resources. The option ``LIBXML_NONET`` disables +network access but still allows local file access. ``LIBXML_NOENT`` seems to +have no effect on entity expansion in PHP 5.4.6. + + +C# / .NET / Mono +---------------- + +Information in `XML DoS and Defenses (MSDN)`_ suggest that .NET is +vulnerable with its default settings. The article contains code snippets +how to create a secure XML reader:: + + XmlReaderSettings settings = new XmlReaderSettings(); + settings.ProhibitDtd = false; + settings.MaxCharactersFromEntities = 1024; + settings.XmlResolver = null; + XmlReader reader = XmlReader.Create(stream, settings); + + +Java +---- + +Untested. The documentation of Xerces and its `Xerces SecurityMananger`_ +sounds like Xerces is also vulnerable to billion laugh attacks with its +default settings. It also does entity resolving when an +``org.xml.sax.EntityResolver`` is configured. I'm not yet sure about the +default setting here. + +Java specialists suggest to have a custom builder factory:: + + DocumentBuilderFactory builderFactory = DocumentBuilderFactory.newInstance(); + builderFactory.setXIncludeAware(False); + builderFactory.setExpandEntityReferences(False); + builderFactory.setFeature(XMLConstants.FEATURE_SECURE_PROCESSING, True); + # either + builderFactory.setFeature("http://apache.org/xml/features/disallow-doctype-decl", True); + # or if you need DTDs + builderFactory.setFeature("http://xml.org/sax/features/external-general-entities", False); + builderFactory.setFeature("http://xml.org/sax/features/external-parameter-entities", False); + builderFactory.setFeature("http://apache.org/xml/features/nonvalidating/load-external-dtd", False); + builderFactory.setFeature("http://apache.org/xml/features/nonvalidating/load-dtd-grammar", False); + + +TODO +==== + +* DOM: Use xml.dom.xmlbuilder options for entity handling +* SAX: take feature_external_ges and feature_external_pes (?) into account +* test experimental monkey patching of stdlib modules +* improve documentation + + +License +======= + +Copyright (c) 2013-2017 by Christian Heimes + +Licensed to PSF under a Contributor Agreement. + +See https://www.python.org/psf/license for licensing details. + + +Acknowledgements +================ + +Brett Cannon (Python Core developer) + review and code cleanup + +Antoine Pitrou (Python Core developer) + code review + +Aaron Patterson, Ben Murphy and Michael Koziarski (Ruby community) + Many thanks to Aaron, Ben and Michael from the Ruby community for their + report and assistance. + +Thierry Carrez (OpenStack) + Many thanks to Thierry for his report to the Python Security Response + Team on behalf of the OpenStack security team. + +Carl Meyer (Django) + Many thanks to Carl for his report to PSRT on behalf of the Django security + team. + +Daniel Veillard (libxml2) + Many thanks to Daniel for his insight and assistance with libxml2. + +semantics GmbH (https://www.semantics.de/) + Many thanks to my employer semantics for letting me work on the issue + during working hours as part of semantics's open source initiative. + + +References +========== + +* `XML DoS and Defenses (MSDN)`_ +* `Billion Laughs`_ on Wikipedia +* `ZIP bomb`_ on Wikipedia +* `Configure SAX parsers for secure processing`_ +* `Testing for XML Injection`_ + +.. _defusedxml package: https://github.com/tiran/defusedxml +.. _defusedxml on PyPI: https://pypi.python.org/pypi/defusedxml +.. _defusedexpat package: https://github.com/tiran/defusedexpat +.. _defusedexpat on PyPI: https://pypi.python.org/pypi/defusedexpat +.. _modified expat: https://github.com/tiran/expat +.. _expat parser: http://expat.sourceforge.net/ +.. _Attacking XML Security: https://www.isecpartners.com/media/12976/iSEC-HILL-Attacking-XML-Security-bh07.pdf +.. _Billion Laughs: https://en.wikipedia.org/wiki/Billion_laughs +.. _XML DoS and Defenses (MSDN): https://msdn.microsoft.com/en-us/magazine/ee335713.aspx +.. _ZIP bomb: https://en.wikipedia.org/wiki/Zip_bomb +.. _DTD: https://en.wikipedia.org/wiki/Document_Type_Definition +.. _PI: https://en.wikipedia.org/wiki/Processing_Instruction +.. _Avoid the dangers of XPath injection: http://www.ibm.com/developerworks/xml/library/x-xpathinjection/index.html +.. _Configure SAX parsers for secure processing: http://www.ibm.com/developerworks/xml/library/x-tipcfsx/index.html +.. _Testing for XML Injection: https://www.owasp.org/index.php/Testing_for_XML_Injection_(OWASP-DV-008) +.. _Xerces SecurityMananger: https://xerces.apache.org/xerces2-j/javadocs/xerces2/org/apache/xerces/util/SecurityManager.html +.. _XML Inclusion: https://www.w3.org/TR/xinclude/#include_element + +Changelog +========= + +defusedxml 0.7.1 +--------------------- + +*Release date: 08-Mar-2021* + +- Fix regression ``defusedxml.ElementTree.ParseError`` (#63) + The ``ParseError`` exception is now the same class object as + ``xml.etree.ElementTree.ParseError`` again. + + +defusedxml 0.7.0 +---------------- + +*Release date: 4-Mar-2021* + +- No changes + + +defusedxml 0.7.0rc2 +------------------- + +*Release date: 12-Jan-2021* + +- Re-add and deprecate ``defusedxml.cElementTree`` +- Use GitHub Actions instead of TravisCI +- Restore ``ElementTree`` attribute of ``xml.etree`` module after patching + +defusedxml 0.7.0rc1 +------------------- + +*Release date: 04-May-2020* + +- Add support for Python 3.9 +- ``defusedxml.cElementTree`` is not available with Python 3.9. +- Python 2 is deprecate. Support for Python 2 will be removed in 0.8.0. + + +defusedxml 0.6.0 +---------------- + +*Release date: 17-Apr-2019* + +- Increase test coverage. +- Add badges to README. + + +defusedxml 0.6.0rc1 +------------------- + +*Release date: 14-Apr-2019* + +- Test on Python 3.7 stable and 3.8-dev +- Drop support for Python 3.4 +- No longer pass *html* argument to XMLParse. It has been deprecated and + ignored for a long time. The DefusedXMLParser still takes a html argument. + A deprecation warning is issued when the argument is False and a TypeError + when it's True. +- defusedxml now fails early when pyexpat stdlib module is not available or + broken. +- defusedxml.ElementTree.__all__ now lists ParseError as public attribute. +- The defusedxml.ElementTree and defusedxml.cElementTree modules had a typo + and used XMLParse instead of XMLParser as an alias for DefusedXMLParser. + Both the old and fixed name are now available. + + +defusedxml 0.5.0 +---------------- + +*Release date: 07-Feb-2017* + +- No changes + + +defusedxml 0.5.0.rc1 +-------------------- + +*Release date: 28-Jan-2017* + +- Add compatibility with Python 3.6 +- Drop support for Python 2.6, 3.1, 3.2, 3.3 +- Fix lxml tests (XMLSyntaxError: Detected an entity reference loop) + + +defusedxml 0.4.1 +---------------- + +*Release date: 28-Mar-2013* + +- Add more demo exploits, e.g. python_external.py and Xalan XSLT demos. +- Improved documentation. + + +defusedxml 0.4 +-------------- + +*Release date: 25-Feb-2013* + +- As per http://seclists.org/oss-sec/2013/q1/340 please REJECT + CVE-2013-0278, CVE-2013-0279 and CVE-2013-0280 and use CVE-2013-1664, + CVE-2013-1665 for OpenStack/etc. +- Add missing parser_list argument to sax.make_parser(). The argument is + ignored, though. (thanks to Florian Apolloner) +- Add demo exploit for external entity attack on Python's SAX parser, XML-RPC + and WebDAV. + + +defusedxml 0.3 +-------------- + +*Release date: 19-Feb-2013* + +- Improve documentation + + +defusedxml 0.2 +-------------- + +*Release date: 15-Feb-2013* + +- Rename ExternalEntitiesForbidden to ExternalReferenceForbidden +- Rename defusedxml.lxml.check_dtd() to check_docinfo() +- Unify argument names in callbacks +- Add arguments and formatted representation to exceptions +- Add forbid_external argument to all functions and classes +- More tests +- LOTS of documentation +- Add example code for other languages (Ruby, Perl, PHP) and parsers (Genshi) +- Add protection against XML and gzip attacks to xmlrpclib + +defusedxml 0.1 +-------------- + +*Release date: 08-Feb-2013* + +- Initial and internal release for PSRT review + + diff --git a/Splunk_TA_paloalto/bin/splunk_ta_paloalto/aob_py3/defusedxml-0.7.1.dist-info/RECORD b/Splunk_TA_paloalto/bin/splunk_ta_paloalto/aob_py3/defusedxml-0.7.1.dist-info/RECORD new file mode 100644 index 00000000..0a2fa9c6 --- /dev/null +++ b/Splunk_TA_paloalto/bin/splunk_ta_paloalto/aob_py3/defusedxml-0.7.1.dist-info/RECORD @@ -0,0 +1,17 @@ +defusedxml-0.7.1.dist-info/INSTALLER,sha256=zuuue4knoyJ-UwPPXg8fezS7VCrXJQrAP7zeNuwvFQg,4 +defusedxml-0.7.1.dist-info/LICENSE,sha256=uAzp2oxCofkQeWJ_u-K_JyEK4Qig_-Xwd9WwjgdsJMg,2409 +defusedxml-0.7.1.dist-info/METADATA,sha256=Np0872SHDa-En7pxHLjQWn7-PI2asPdjrcNAef43i7E,32518 +defusedxml-0.7.1.dist-info/RECORD,, +defusedxml-0.7.1.dist-info/WHEEL,sha256=kGT74LWyRUZrL4VgLh6_g12IeVl_9u9ZVhadrgXZUEY,110 +defusedxml-0.7.1.dist-info/top_level.txt,sha256=QGHa90F50pVKhWSFlERI0jtSKtqDiGyfeZX7dQNZAAw,11 +defusedxml/ElementTree.py,sha256=GLSqpCz58oXGPGyzf_HylsPS9_dcGVP5SN4dK7yvyPw,4640 +defusedxml/__init__.py,sha256=RczeaVJG64p2Fgy1jlCzbuRdchEPnEaCBrxgk8JJ_pM,1444 +defusedxml/cElementTree.py,sha256=PpaKMh3rU29sY8amAK4fzHQKl8gcAYD0h1LCoW62Rtk,1449 +defusedxml/common.py,sha256=3d26jNW4fNXzgjWhvUfs83Afiz5EVxFDupQbugkSMZc,4036 +defusedxml/expatbuilder.py,sha256=b4Q05vsBMJ5StkiTFf4my2rGGo1gZyEl_hC5MeFTOAA,3732 +defusedxml/expatreader.py,sha256=KOpSrwkSvj5SGOY9pTXOM26Dnz00rsJt33WueVvzpvc,2196 +defusedxml/lxml.py,sha256=HW-LFKdrfMRzHdi0Vcucq4-n8yz7v_OQwEQWFg1JQYA,4940 +defusedxml/minidom.py,sha256=3QcgygVwJqcWDQ3IZ2iol8zsH4cx3BRX70SPcd0bG2g,1884 +defusedxml/pulldom.py,sha256=DYj2D2lc7xoxZ38gfzujXmdznd8ovzDqGFXqyXbtxjk,1170 +defusedxml/sax.py,sha256=-SF08Msc2mWEYAMw62pJ5FMwWccOctFSnQwDLYLLlVE,1477 +defusedxml/xmlrpc.py,sha256=7rZQey3tqXcc1hrrM3RprOICU6fiFny9B9l4nmTioxA,5364 diff --git a/Splunk_TA_paloalto/bin/splunk_ta_paloalto/aob_py3/defusedxml-0.7.1.dist-info/WHEEL b/Splunk_TA_paloalto/bin/splunk_ta_paloalto/aob_py3/defusedxml-0.7.1.dist-info/WHEEL new file mode 100644 index 00000000..ef99c6cf --- /dev/null +++ b/Splunk_TA_paloalto/bin/splunk_ta_paloalto/aob_py3/defusedxml-0.7.1.dist-info/WHEEL @@ -0,0 +1,6 @@ +Wheel-Version: 1.0 +Generator: bdist_wheel (0.34.2) +Root-Is-Purelib: true +Tag: py2-none-any +Tag: py3-none-any + diff --git a/Splunk_TA_paloalto/bin/splunk_ta_paloalto/aob_py3/defusedxml-0.7.1.dist-info/top_level.txt b/Splunk_TA_paloalto/bin/splunk_ta_paloalto/aob_py3/defusedxml-0.7.1.dist-info/top_level.txt new file mode 100644 index 00000000..36969f2c --- /dev/null +++ b/Splunk_TA_paloalto/bin/splunk_ta_paloalto/aob_py3/defusedxml-0.7.1.dist-info/top_level.txt @@ -0,0 +1 @@ +defusedxml diff --git a/Splunk_TA_paloalto/bin/splunk_ta_paloalto/aob_py3/defusedxml/ElementTree.py b/Splunk_TA_paloalto/bin/splunk_ta_paloalto/aob_py3/defusedxml/ElementTree.py new file mode 100644 index 00000000..5ba765f1 --- /dev/null +++ b/Splunk_TA_paloalto/bin/splunk_ta_paloalto/aob_py3/defusedxml/ElementTree.py @@ -0,0 +1,154 @@ +# defusedxml +# +# Copyright (c) 2013 by Christian Heimes +# Licensed to PSF under a Contributor Agreement. +# See https://www.python.org/psf/license for licensing details. +"""Defused xml.etree.ElementTree facade +""" +from __future__ import print_function, absolute_import + +import sys +import warnings +from xml.etree.ElementTree import ParseError +from xml.etree.ElementTree import TreeBuilder as _TreeBuilder +from xml.etree.ElementTree import parse as _parse +from xml.etree.ElementTree import tostring + +from .common import PY3 + +if PY3: + import importlib +else: + from xml.etree.ElementTree import XMLParser as _XMLParser + from xml.etree.ElementTree import iterparse as _iterparse + + +from .common import ( + DTDForbidden, + EntitiesForbidden, + ExternalReferenceForbidden, + _generate_etree_functions, +) + +__origin__ = "xml.etree.ElementTree" + + +def _get_py3_cls(): + """Python 3.3 hides the pure Python code but defusedxml requires it. + + The code is based on test.support.import_fresh_module(). + """ + pymodname = "xml.etree.ElementTree" + cmodname = "_elementtree" + + pymod = sys.modules.pop(pymodname, None) + cmod = sys.modules.pop(cmodname, None) + + sys.modules[cmodname] = None + try: + pure_pymod = importlib.import_module(pymodname) + finally: + # restore module + sys.modules[pymodname] = pymod + if cmod is not None: + sys.modules[cmodname] = cmod + else: + sys.modules.pop(cmodname, None) + # restore attribute on original package + etree_pkg = sys.modules["xml.etree"] + if pymod is not None: + etree_pkg.ElementTree = pymod + elif hasattr(etree_pkg, "ElementTree"): + del etree_pkg.ElementTree + + _XMLParser = pure_pymod.XMLParser + _iterparse = pure_pymod.iterparse + # patch pure module to use ParseError from C extension + pure_pymod.ParseError = ParseError + + return _XMLParser, _iterparse + + +if PY3: + _XMLParser, _iterparse = _get_py3_cls() + + +_sentinel = object() + + +class DefusedXMLParser(_XMLParser): + def __init__( + self, + html=_sentinel, + target=None, + encoding=None, + forbid_dtd=False, + forbid_entities=True, + forbid_external=True, + ): + # Python 2.x old style class + _XMLParser.__init__(self, target=target, encoding=encoding) + if html is not _sentinel: + # the 'html' argument has been deprecated and ignored in all + # supported versions of Python. Python 3.8 finally removed it. + if html: + raise TypeError("'html=True' is no longer supported.") + else: + warnings.warn( + "'html' keyword argument is no longer supported. Pass " + "in arguments as keyword arguments.", + category=DeprecationWarning, + ) + + self.forbid_dtd = forbid_dtd + self.forbid_entities = forbid_entities + self.forbid_external = forbid_external + if PY3: + parser = self.parser + else: + parser = self._parser + if self.forbid_dtd: + parser.StartDoctypeDeclHandler = self.defused_start_doctype_decl + if self.forbid_entities: + parser.EntityDeclHandler = self.defused_entity_decl + parser.UnparsedEntityDeclHandler = self.defused_unparsed_entity_decl + if self.forbid_external: + parser.ExternalEntityRefHandler = self.defused_external_entity_ref_handler + + def defused_start_doctype_decl(self, name, sysid, pubid, has_internal_subset): + raise DTDForbidden(name, sysid, pubid) + + def defused_entity_decl( + self, name, is_parameter_entity, value, base, sysid, pubid, notation_name + ): + raise EntitiesForbidden(name, value, base, sysid, pubid, notation_name) + + def defused_unparsed_entity_decl(self, name, base, sysid, pubid, notation_name): + # expat 1.2 + raise EntitiesForbidden(name, None, base, sysid, pubid, notation_name) # pragma: no cover + + def defused_external_entity_ref_handler(self, context, base, sysid, pubid): + raise ExternalReferenceForbidden(context, base, sysid, pubid) + + +# aliases +# XMLParse is a typo, keep it for backwards compatibility +XMLTreeBuilder = XMLParse = XMLParser = DefusedXMLParser + +parse, iterparse, fromstring = _generate_etree_functions( + DefusedXMLParser, _TreeBuilder, _parse, _iterparse +) +XML = fromstring + + +__all__ = [ + "ParseError", + "XML", + "XMLParse", + "XMLParser", + "XMLTreeBuilder", + "fromstring", + "iterparse", + "parse", + "tostring", +] diff --git a/Splunk_TA_paloalto/bin/splunk_ta_paloalto/aob_py3/defusedxml/__init__.py b/Splunk_TA_paloalto/bin/splunk_ta_paloalto/aob_py3/defusedxml/__init__.py new file mode 100644 index 00000000..4b5a2300 --- /dev/null +++ b/Splunk_TA_paloalto/bin/splunk_ta_paloalto/aob_py3/defusedxml/__init__.py @@ -0,0 +1,67 @@ +# defusedxml +# +# Copyright (c) 2013 by Christian Heimes +# Licensed to PSF under a Contributor Agreement. +# See https://www.python.org/psf/license for licensing details. +"""Defuse XML bomb denial of service vulnerabilities +""" +from __future__ import print_function, absolute_import + +import warnings + +from .common import ( + DefusedXmlException, + DTDForbidden, + EntitiesForbidden, + ExternalReferenceForbidden, + NotSupportedError, + _apply_defusing, +) + + +def defuse_stdlib(): + """Monkey patch and defuse all stdlib packages + + :warning: The monkey patch is an EXPERIMETNAL feature. + """ + defused = {} + + with warnings.catch_warnings(): + from . import cElementTree + from . import ElementTree + from . import minidom + from . import pulldom + from . import sax + from . import expatbuilder + from . import expatreader + from . import xmlrpc + + xmlrpc.monkey_patch() + defused[xmlrpc] = None + + defused_mods = [ + cElementTree, + ElementTree, + minidom, + pulldom, + sax, + expatbuilder, + expatreader, + ] + + for defused_mod in defused_mods: + stdlib_mod = _apply_defusing(defused_mod) + defused[defused_mod] = stdlib_mod + + return defused + + +__version__ = "0.7.1" + +__all__ = [ + "DefusedXmlException", + "DTDForbidden", + "EntitiesForbidden", + "ExternalReferenceForbidden", + "NotSupportedError", +] diff --git a/Splunk_TA_paloalto/bin/splunk_ta_paloalto/aob_py3/defusedxml/cElementTree.py b/Splunk_TA_paloalto/bin/splunk_ta_paloalto/aob_py3/defusedxml/cElementTree.py new file mode 100644 index 00000000..84670c68 --- /dev/null +++ b/Splunk_TA_paloalto/bin/splunk_ta_paloalto/aob_py3/defusedxml/cElementTree.py @@ -0,0 +1,62 @@ +# defusedxml +# +# Copyright (c) 2013 by Christian Heimes +# Licensed to PSF under a Contributor Agreement. +# See https://www.python.org/psf/license for licensing details. +"""Defused xml.etree.cElementTree +""" +from __future__ import absolute_import + +import warnings + +from .common import _generate_etree_functions + +from xml.etree.cElementTree import TreeBuilder as _TreeBuilder +from xml.etree.cElementTree import parse as _parse +from xml.etree.cElementTree import tostring + +# iterparse from ElementTree! +from xml.etree.ElementTree import iterparse as _iterparse + +# This module is an alias for ElementTree just like xml.etree.cElementTree +from .ElementTree import ( + XML, + XMLParse, + XMLParser, + XMLTreeBuilder, + fromstring, + iterparse, + parse, + tostring, + DefusedXMLParser, + ParseError, +) + +__origin__ = "xml.etree.cElementTree" + + +warnings.warn( + "defusedxml.cElementTree is deprecated, import from defusedxml.ElementTree instead.", + category=DeprecationWarning, + stacklevel=2, +) + +# XMLParse is a typo, keep it for backwards compatibility +XMLTreeBuilder = XMLParse = XMLParser = DefusedXMLParser + +parse, iterparse, fromstring = _generate_etree_functions( + DefusedXMLParser, _TreeBuilder, _parse, _iterparse +) +XML = fromstring + +__all__ = [ + "ParseError", + "XML", + "XMLParse", + "XMLParser", + "XMLTreeBuilder", + "fromstring", + "iterparse", + "parse", + "tostring", +] diff --git a/Splunk_TA_paloalto/bin/splunk_ta_paloalto/aob_py3/defusedxml/common.py b/Splunk_TA_paloalto/bin/splunk_ta_paloalto/aob_py3/defusedxml/common.py new file mode 100644 index 00000000..5ceda1fb --- /dev/null +++ b/Splunk_TA_paloalto/bin/splunk_ta_paloalto/aob_py3/defusedxml/common.py @@ -0,0 +1,129 @@ +# defusedxml +# +# Copyright (c) 2013 by Christian Heimes +# Licensed to PSF under a Contributor Agreement. +# See https://www.python.org/psf/license for licensing details. +"""Common constants, exceptions and helpe functions +""" +import sys +import xml.parsers.expat + +PY3 = sys.version_info[0] == 3 + +# Fail early when pyexpat is not installed correctly +if not hasattr(xml.parsers.expat, "ParserCreate"): + raise ImportError("pyexpat") # pragma: no cover + + +class DefusedXmlException(ValueError): + """Base exception""" + + def __repr__(self): + return str(self) + + +class DTDForbidden(DefusedXmlException): + """Document type definition is forbidden""" + + def __init__(self, name, sysid, pubid): + super(DTDForbidden, self).__init__() + self.name = name + self.sysid = sysid + self.pubid = pubid + + def __str__(self): + tpl = "DTDForbidden(name='{}', system_id={!r}, public_id={!r})" + return tpl.format(self.name, self.sysid, self.pubid) + + +class EntitiesForbidden(DefusedXmlException): + """Entity definition is forbidden""" + + def __init__(self, name, value, base, sysid, pubid, notation_name): + super(EntitiesForbidden, self).__init__() + self.name = name + self.value = value + self.base = base + self.sysid = sysid + self.pubid = pubid + self.notation_name = notation_name + + def __str__(self): + tpl = "EntitiesForbidden(name='{}', system_id={!r}, public_id={!r})" + return tpl.format(self.name, self.sysid, self.pubid) + + +class ExternalReferenceForbidden(DefusedXmlException): + """Resolving an external reference is forbidden""" + + def __init__(self, context, base, sysid, pubid): + super(ExternalReferenceForbidden, self).__init__() + self.context = context + self.base = base + self.sysid = sysid + self.pubid = pubid + + def __str__(self): + tpl = "ExternalReferenceForbidden(system_id='{}', public_id={})" + return tpl.format(self.sysid, self.pubid) + + +class NotSupportedError(DefusedXmlException): + """The operation is not supported""" + + +def _apply_defusing(defused_mod): + assert defused_mod is sys.modules[defused_mod.__name__] + stdlib_name = defused_mod.__origin__ + __import__(stdlib_name, {}, {}, ["*"]) + stdlib_mod = sys.modules[stdlib_name] + stdlib_names = set(dir(stdlib_mod)) + for name, obj in vars(defused_mod).items(): + if name.startswith("_") or name not in stdlib_names: + continue + setattr(stdlib_mod, name, obj) + return stdlib_mod + + +def _generate_etree_functions(DefusedXMLParser, _TreeBuilder, _parse, _iterparse): + """Factory for functions needed by etree, dependent on whether + cElementTree or ElementTree is used.""" + + def parse(source, parser=None, forbid_dtd=False, forbid_entities=True, forbid_external=True): + if parser is None: + parser = DefusedXMLParser( + target=_TreeBuilder(), + forbid_dtd=forbid_dtd, + forbid_entities=forbid_entities, + forbid_external=forbid_external, + ) + return _parse(source, parser) + + def iterparse( + source, + events=None, + parser=None, + forbid_dtd=False, + forbid_entities=True, + forbid_external=True, + ): + if parser is None: + parser = DefusedXMLParser( + target=_TreeBuilder(), + forbid_dtd=forbid_dtd, + forbid_entities=forbid_entities, + forbid_external=forbid_external, + ) + return _iterparse(source, events, parser) + + def fromstring(text, forbid_dtd=False, forbid_entities=True, forbid_external=True): + parser = DefusedXMLParser( + target=_TreeBuilder(), + forbid_dtd=forbid_dtd, + forbid_entities=forbid_entities, + forbid_external=forbid_external, + ) + parser.feed(text) + return parser.close() + + return parse, iterparse, fromstring diff --git a/Splunk_TA_paloalto/bin/splunk_ta_paloalto/aob_py3/defusedxml/expatbuilder.py b/Splunk_TA_paloalto/bin/splunk_ta_paloalto/aob_py3/defusedxml/expatbuilder.py new file mode 100644 index 00000000..7bfc57e4 --- /dev/null +++ b/Splunk_TA_paloalto/bin/splunk_ta_paloalto/aob_py3/defusedxml/expatbuilder.py @@ -0,0 +1,107 @@ +# defusedxml +# +# Copyright (c) 2013 by Christian Heimes +# Licensed to PSF under a Contributor Agreement. +# See https://www.python.org/psf/license for licensing details. +"""Defused xml.dom.expatbuilder +""" +from __future__ import print_function, absolute_import + +from xml.dom.expatbuilder import ExpatBuilder as _ExpatBuilder +from xml.dom.expatbuilder import Namespaces as _Namespaces + +from .common import DTDForbidden, EntitiesForbidden, ExternalReferenceForbidden + +__origin__ = "xml.dom.expatbuilder" + + +class DefusedExpatBuilder(_ExpatBuilder): + """Defused document builder""" + + def __init__( + self, options=None, forbid_dtd=False, forbid_entities=True, forbid_external=True + ): + _ExpatBuilder.__init__(self, options) + self.forbid_dtd = forbid_dtd + self.forbid_entities = forbid_entities + self.forbid_external = forbid_external + + def defused_start_doctype_decl(self, name, sysid, pubid, has_internal_subset): + raise DTDForbidden(name, sysid, pubid) + + def defused_entity_decl( + self, name, is_parameter_entity, value, base, sysid, pubid, notation_name + ): + raise EntitiesForbidden(name, value, base, sysid, pubid, notation_name) + + def defused_unparsed_entity_decl(self, name, base, sysid, pubid, notation_name): + # expat 1.2 + raise EntitiesForbidden(name, None, base, sysid, pubid, notation_name) # pragma: no cover + + def defused_external_entity_ref_handler(self, context, base, sysid, pubid): + raise ExternalReferenceForbidden(context, base, sysid, pubid) + + def install(self, parser): + _ExpatBuilder.install(self, parser) + + if self.forbid_dtd: + parser.StartDoctypeDeclHandler = self.defused_start_doctype_decl + if self.forbid_entities: + # if self._options.entities: + parser.EntityDeclHandler = self.defused_entity_decl + parser.UnparsedEntityDeclHandler = self.defused_unparsed_entity_decl + if self.forbid_external: + parser.ExternalEntityRefHandler = self.defused_external_entity_ref_handler + + +class DefusedExpatBuilderNS(_Namespaces, DefusedExpatBuilder): + """Defused document builder that supports namespaces.""" + + def install(self, parser): + DefusedExpatBuilder.install(self, parser) + if self._options.namespace_declarations: + parser.StartNamespaceDeclHandler = self.start_namespace_decl_handler + + def reset(self): + DefusedExpatBuilder.reset(self) + self._initNamespaces() + + +def parse(file, namespaces=True, forbid_dtd=False, forbid_entities=True, forbid_external=True): + """Parse a document, returning the resulting Document node. + + 'file' may be either a file name or an open file object. + """ + if namespaces: + build_builder = DefusedExpatBuilderNS + else: + build_builder = DefusedExpatBuilder + builder = build_builder( + forbid_dtd=forbid_dtd, forbid_entities=forbid_entities, forbid_external=forbid_external + ) + + if isinstance(file, str): + fp = open(file, "rb") + try: + result = builder.parseFile(fp) + finally: + fp.close() + else: + result = builder.parseFile(file) + return result + + +def parseString( + string, namespaces=True, forbid_dtd=False, forbid_entities=True, forbid_external=True +): + """Parse a document from a string, returning the resulting + Document node. + """ + if namespaces: + build_builder = DefusedExpatBuilderNS + else: + build_builder = DefusedExpatBuilder + builder = build_builder( + forbid_dtd=forbid_dtd, forbid_entities=forbid_entities, forbid_external=forbid_external + ) + return builder.parseString(string) diff --git a/Splunk_TA_paloalto/bin/splunk_ta_paloalto/aob_py3/defusedxml/expatreader.py b/Splunk_TA_paloalto/bin/splunk_ta_paloalto/aob_py3/defusedxml/expatreader.py new file mode 100644 index 00000000..890e1d16 --- /dev/null +++ b/Splunk_TA_paloalto/bin/splunk_ta_paloalto/aob_py3/defusedxml/expatreader.py @@ -0,0 +1,61 @@ +# defusedxml +# +# Copyright (c) 2013 by Christian Heimes +# Licensed to PSF under a Contributor Agreement. +# See https://www.python.org/psf/license for licensing details. +"""Defused xml.sax.expatreader +""" +from __future__ import print_function, absolute_import + +from xml.sax.expatreader import ExpatParser as _ExpatParser + +from .common import DTDForbidden, EntitiesForbidden, ExternalReferenceForbidden + +__origin__ = "xml.sax.expatreader" + + +class DefusedExpatParser(_ExpatParser): + """Defused SAX driver for the pyexpat C module.""" + + def __init__( + self, + namespaceHandling=0, + bufsize=2 ** 16 - 20, + forbid_dtd=False, + forbid_entities=True, + forbid_external=True, + ): + _ExpatParser.__init__(self, namespaceHandling, bufsize) + self.forbid_dtd = forbid_dtd + self.forbid_entities = forbid_entities + self.forbid_external = forbid_external + + def defused_start_doctype_decl(self, name, sysid, pubid, has_internal_subset): + raise DTDForbidden(name, sysid, pubid) + + def defused_entity_decl( + self, name, is_parameter_entity, value, base, sysid, pubid, notation_name + ): + raise EntitiesForbidden(name, value, base, sysid, pubid, notation_name) + + def defused_unparsed_entity_decl(self, name, base, sysid, pubid, notation_name): + # expat 1.2 + raise EntitiesForbidden(name, None, base, sysid, pubid, notation_name) # pragma: no cover + + def defused_external_entity_ref_handler(self, context, base, sysid, pubid): + raise ExternalReferenceForbidden(context, base, sysid, pubid) + + def reset(self): + _ExpatParser.reset(self) + parser = self._parser + if self.forbid_dtd: + parser.StartDoctypeDeclHandler = self.defused_start_doctype_decl + if self.forbid_entities: + parser.EntityDeclHandler = self.defused_entity_decl + parser.UnparsedEntityDeclHandler = self.defused_unparsed_entity_decl + if self.forbid_external: + parser.ExternalEntityRefHandler = self.defused_external_entity_ref_handler + + +def create_parser(*args, **kwargs): + return DefusedExpatParser(*args, **kwargs) diff --git a/Splunk_TA_paloalto/bin/splunk_ta_paloalto/aob_py3/defusedxml/lxml.py b/Splunk_TA_paloalto/bin/splunk_ta_paloalto/aob_py3/defusedxml/lxml.py new file mode 100644 index 00000000..99d5be93 --- /dev/null +++ b/Splunk_TA_paloalto/bin/splunk_ta_paloalto/aob_py3/defusedxml/lxml.py @@ -0,0 +1,153 @@ +# defusedxml +# +# Copyright (c) 2013 by Christian Heimes +# Licensed to PSF under a Contributor Agreement. +# See https://www.python.org/psf/license for licensing details. +"""DEPRECATED Example code for lxml.etree protection + +The code has NO protection against decompression bombs. +""" +from __future__ import print_function, absolute_import + +import threading +import warnings + +from lxml import etree as _etree + +from .common import DTDForbidden, EntitiesForbidden, NotSupportedError + +LXML3 = _etree.LXML_VERSION[0] >= 3 + +__origin__ = "lxml.etree" + +tostring = _etree.tostring + + +warnings.warn( + "defusedxml.lxml is no longer supported and will be removed in a future release.", + category=DeprecationWarning, + stacklevel=2, +) + + +class RestrictedElement(_etree.ElementBase): + """A restricted Element class that filters out instances of some classes""" + + __slots__ = () + # blacklist = (etree._Entity, etree._ProcessingInstruction, etree._Comment) + blacklist = _etree._Entity + + def _filter(self, iterator): + blacklist = self.blacklist + for child in iterator: + if isinstance(child, blacklist): + continue + yield child + + def __iter__(self): + iterator = super(RestrictedElement, self).__iter__() + return self._filter(iterator) + + def iterchildren(self, tag=None, reversed=False): + iterator = super(RestrictedElement, self).iterchildren(tag=tag, reversed=reversed) + return self._filter(iterator) + + def iter(self, tag=None, *tags): + iterator = super(RestrictedElement, self).iter(tag=tag, *tags) + return self._filter(iterator) + + def iterdescendants(self, tag=None, *tags): + iterator = super(RestrictedElement, self).iterdescendants(tag=tag, *tags) + return self._filter(iterator) + + def itersiblings(self, tag=None, preceding=False): + iterator = super(RestrictedElement, self).itersiblings(tag=tag, preceding=preceding) + return self._filter(iterator) + + def getchildren(self): + iterator = super(RestrictedElement, self).__iter__() + return list(self._filter(iterator)) + + def getiterator(self, tag=None): + iterator = super(RestrictedElement, self).getiterator(tag) + return self._filter(iterator) + + +class GlobalParserTLS(threading.local): + """Thread local context for custom parser instances""" + + parser_config = { + "resolve_entities": False, + # 'remove_comments': True, + # 'remove_pis': True, + } + + element_class = RestrictedElement + + def createDefaultParser(self): + parser = _etree.XMLParser(**self.parser_config) + element_class = self.element_class + if self.element_class is not None: + lookup = _etree.ElementDefaultClassLookup(element=element_class) + parser.set_element_class_lookup(lookup) + return parser + + def setDefaultParser(self, parser): + self._default_parser = parser + + def getDefaultParser(self): + parser = getattr(self, "_default_parser", None) + if parser is None: + parser = self.createDefaultParser() + self.setDefaultParser(parser) + return parser + + +_parser_tls = GlobalParserTLS() +getDefaultParser = _parser_tls.getDefaultParser + + +def check_docinfo(elementtree, forbid_dtd=False, forbid_entities=True): + """Check docinfo of an element tree for DTD and entity declarations + + The check for entity declarations needs lxml 3 or newer. lxml 2.x does + not support dtd.iterentities(). + """ + docinfo = elementtree.docinfo + if docinfo.doctype: + if forbid_dtd: + raise DTDForbidden(docinfo.doctype, docinfo.system_url, docinfo.public_id) + if forbid_entities and not LXML3: + # lxml < 3 has no iterentities() + raise NotSupportedError("Unable to check for entity declarations " "in lxml 2.x") + + if forbid_entities: + for dtd in docinfo.internalDTD, docinfo.externalDTD: + if dtd is None: + continue + for entity in dtd.iterentities(): + raise EntitiesForbidden(entity.name, entity.content, None, None, None, None) + + +def parse(source, parser=None, base_url=None, forbid_dtd=False, forbid_entities=True): + if parser is None: + parser = getDefaultParser() + elementtree = _etree.parse(source, parser, base_url=base_url) + check_docinfo(elementtree, forbid_dtd, forbid_entities) + return elementtree + + +def fromstring(text, parser=None, base_url=None, forbid_dtd=False, forbid_entities=True): + if parser is None: + parser = getDefaultParser() + rootelement = _etree.fromstring(text, parser, base_url=base_url) + elementtree = rootelement.getroottree() + check_docinfo(elementtree, forbid_dtd, forbid_entities) + return rootelement + + +XML = fromstring + + +def iterparse(*args, **kwargs): + raise NotSupportedError("defused lxml.etree.iterparse not available") diff --git a/Splunk_TA_paloalto/bin/splunk_ta_paloalto/aob_py3/defusedxml/minidom.py b/Splunk_TA_paloalto/bin/splunk_ta_paloalto/aob_py3/defusedxml/minidom.py new file mode 100644 index 00000000..78033b6c --- /dev/null +++ b/Splunk_TA_paloalto/bin/splunk_ta_paloalto/aob_py3/defusedxml/minidom.py @@ -0,0 +1,63 @@ +# defusedxml +# +# Copyright (c) 2013 by Christian Heimes +# Licensed to PSF under a Contributor Agreement. +# See https://www.python.org/psf/license for licensing details. +"""Defused xml.dom.minidom +""" +from __future__ import print_function, absolute_import + +from xml.dom.minidom import _do_pulldom_parse +from . import expatbuilder as _expatbuilder +from . import pulldom as _pulldom + +__origin__ = "xml.dom.minidom" + + +def parse( + file, parser=None, bufsize=None, forbid_dtd=False, forbid_entities=True, forbid_external=True +): + """Parse a file into a DOM by filename or file object.""" + if parser is None and not bufsize: + return _expatbuilder.parse( + file, + forbid_dtd=forbid_dtd, + forbid_entities=forbid_entities, + forbid_external=forbid_external, + ) + else: + return _do_pulldom_parse( + _pulldom.parse, + (file,), + { + "parser": parser, + "bufsize": bufsize, + "forbid_dtd": forbid_dtd, + "forbid_entities": forbid_entities, + "forbid_external": forbid_external, + }, + ) + + +def parseString( + string, parser=None, forbid_dtd=False, forbid_entities=True, forbid_external=True +): + """Parse a file into a DOM from a string.""" + if parser is None: + return _expatbuilder.parseString( + string, + forbid_dtd=forbid_dtd, + forbid_entities=forbid_entities, + forbid_external=forbid_external, + ) + else: + return _do_pulldom_parse( + _pulldom.parseString, + (string,), + { + "parser": parser, + "forbid_dtd": forbid_dtd, + "forbid_entities": forbid_entities, + "forbid_external": forbid_external, + }, + ) diff --git a/Splunk_TA_paloalto/bin/splunk_ta_paloalto/aob_py3/defusedxml/pulldom.py b/Splunk_TA_paloalto/bin/splunk_ta_paloalto/aob_py3/defusedxml/pulldom.py new file mode 100644 index 00000000..e3b10a46 --- /dev/null +++ b/Splunk_TA_paloalto/bin/splunk_ta_paloalto/aob_py3/defusedxml/pulldom.py @@ -0,0 +1,41 @@ +# defusedxml +# +# Copyright (c) 2013 by Christian Heimes +# Licensed to PSF under a Contributor Agreement. +# See https://www.python.org/psf/license for licensing details. +"""Defused xml.dom.pulldom +""" +from __future__ import print_function, absolute_import + +from xml.dom.pulldom import parse as _parse +from xml.dom.pulldom import parseString as _parseString +from .sax import make_parser + +__origin__ = "xml.dom.pulldom" + + +def parse( + stream_or_string, + parser=None, + bufsize=None, + forbid_dtd=False, + forbid_entities=True, + forbid_external=True, +): + if parser is None: + parser = make_parser() + parser.forbid_dtd = forbid_dtd + parser.forbid_entities = forbid_entities + parser.forbid_external = forbid_external + return _parse(stream_or_string, parser, bufsize) + + +def parseString( + string, parser=None, forbid_dtd=False, forbid_entities=True, forbid_external=True +): + if parser is None: + parser = make_parser() + parser.forbid_dtd = forbid_dtd + parser.forbid_entities = forbid_entities + parser.forbid_external = forbid_external + return _parseString(string, parser) diff --git a/Splunk_TA_paloalto/bin/splunk_ta_paloalto/aob_py3/defusedxml/sax.py b/Splunk_TA_paloalto/bin/splunk_ta_paloalto/aob_py3/defusedxml/sax.py new file mode 100644 index 00000000..b2786f74 --- /dev/null +++ b/Splunk_TA_paloalto/bin/splunk_ta_paloalto/aob_py3/defusedxml/sax.py @@ -0,0 +1,60 @@ +# defusedxml +# +# Copyright (c) 2013 by Christian Heimes +# Licensed to PSF under a Contributor Agreement. +# See https://www.python.org/psf/license for licensing details. +"""Defused xml.sax +""" +from __future__ import print_function, absolute_import + +from xml.sax import InputSource as _InputSource +from xml.sax import ErrorHandler as _ErrorHandler + +from . import expatreader + +__origin__ = "xml.sax" + + +def parse( + source, + handler, + errorHandler=_ErrorHandler(), + forbid_dtd=False, + forbid_entities=True, + forbid_external=True, +): + parser = make_parser() + parser.setContentHandler(handler) + parser.setErrorHandler(errorHandler) + parser.forbid_dtd = forbid_dtd + parser.forbid_entities = forbid_entities + parser.forbid_external = forbid_external + parser.parse(source) + + +def parseString( + string, + handler, + errorHandler=_ErrorHandler(), + forbid_dtd=False, + forbid_entities=True, + forbid_external=True, +): + from io import BytesIO + + if errorHandler is None: + errorHandler = _ErrorHandler() + parser = make_parser() + parser.setContentHandler(handler) + parser.setErrorHandler(errorHandler) + parser.forbid_dtd = forbid_dtd + parser.forbid_entities = forbid_entities + parser.forbid_external = forbid_external + + inpsrc = _InputSource() + inpsrc.setByteStream(BytesIO(string)) + parser.parse(inpsrc) + + +def make_parser(parser_list=[]): + return expatreader.create_parser() diff --git a/Splunk_TA_paloalto/bin/splunk_ta_paloalto/aob_py3/defusedxml/xmlrpc.py b/Splunk_TA_paloalto/bin/splunk_ta_paloalto/aob_py3/defusedxml/xmlrpc.py new file mode 100644 index 00000000..fbc674da --- /dev/null +++ b/Splunk_TA_paloalto/bin/splunk_ta_paloalto/aob_py3/defusedxml/xmlrpc.py @@ -0,0 +1,153 @@ +# defusedxml +# +# Copyright (c) 2013 by Christian Heimes +# Licensed to PSF under a Contributor Agreement. +# See https://www.python.org/psf/license for licensing details. +"""Defused xmlrpclib + +Also defuses gzip bomb +""" +from __future__ import print_function, absolute_import + +import io + +from .common import DTDForbidden, EntitiesForbidden, ExternalReferenceForbidden, PY3 + +if PY3: + __origin__ = "xmlrpc.client" + from xmlrpc.client import ExpatParser + from xmlrpc import client as xmlrpc_client + from xmlrpc import server as xmlrpc_server + from xmlrpc.client import gzip_decode as _orig_gzip_decode + from xmlrpc.client import GzipDecodedResponse as _OrigGzipDecodedResponse +else: + __origin__ = "xmlrpclib" + from xmlrpclib import ExpatParser + import xmlrpclib as xmlrpc_client + + xmlrpc_server = None + from xmlrpclib import gzip_decode as _orig_gzip_decode + from xmlrpclib import GzipDecodedResponse as _OrigGzipDecodedResponse + +try: + import gzip +except ImportError: # pragma: no cover + gzip = None + + +# Limit maximum request size to prevent resource exhaustion DoS +# Also used to limit maximum amount of gzip decoded data in order to prevent +# decompression bombs +# A value of -1 or smaller disables the limit +MAX_DATA = 30 * 1024 * 1024 # 30 MB + + +def defused_gzip_decode(data, limit=None): + """gzip encoded data -> unencoded data + + Decode data using the gzip content encoding as described in RFC 1952 + """ + if not gzip: # pragma: no cover + raise NotImplementedError + if limit is None: + limit = MAX_DATA + f = io.BytesIO(data) + gzf = gzip.GzipFile(mode="rb", fileobj=f) + try: + if limit < 0: # no limit + decoded = gzf.read() + else: + decoded = gzf.read(limit + 1) + except IOError: # pragma: no cover + raise ValueError("invalid data") + f.close() + gzf.close() + if limit >= 0 and len(decoded) > limit: + raise ValueError("max gzipped payload length exceeded") + return decoded + + +class DefusedGzipDecodedResponse(gzip.GzipFile if gzip else object): + """a file-like object to decode a response encoded with the gzip + method, as described in RFC 1952. + """ + + def __init__(self, response, limit=None): + # response doesn't support tell() and read(), required by + # GzipFile + if not gzip: # pragma: no cover + raise NotImplementedError + self.limit = limit = limit if limit is not None else MAX_DATA + if limit < 0: # no limit + data = response.read() + self.readlength = None + else: + data = response.read(limit + 1) + self.readlength = 0 + if limit >= 0 and len(data) > limit: + raise ValueError("max payload length exceeded") + self.stringio = io.BytesIO(data) + gzip.GzipFile.__init__(self, mode="rb", fileobj=self.stringio) + + def read(self, n): + if self.limit >= 0: + left = self.limit - self.readlength + n = min(n, left + 1) + data = gzip.GzipFile.read(self, n) + self.readlength += len(data) + if self.readlength > self.limit: + raise ValueError("max payload length exceeded") + return data + else: + return gzip.GzipFile.read(self, n) + + def close(self): + gzip.GzipFile.close(self) + self.stringio.close() + + +class DefusedExpatParser(ExpatParser): + def __init__(self, target, forbid_dtd=False, forbid_entities=True, forbid_external=True): + ExpatParser.__init__(self, target) + self.forbid_dtd = forbid_dtd + self.forbid_entities = forbid_entities + self.forbid_external = forbid_external + parser = self._parser + if self.forbid_dtd: + parser.StartDoctypeDeclHandler = self.defused_start_doctype_decl + if self.forbid_entities: + parser.EntityDeclHandler = self.defused_entity_decl + parser.UnparsedEntityDeclHandler = self.defused_unparsed_entity_decl + if self.forbid_external: + parser.ExternalEntityRefHandler = self.defused_external_entity_ref_handler + + def defused_start_doctype_decl(self, name, sysid, pubid, has_internal_subset): + raise DTDForbidden(name, sysid, pubid) + + def defused_entity_decl( + self, name, is_parameter_entity, value, base, sysid, pubid, notation_name + ): + raise EntitiesForbidden(name, value, base, sysid, pubid, notation_name) + + def defused_unparsed_entity_decl(self, name, base, sysid, pubid, notation_name): + # expat 1.2 + raise EntitiesForbidden(name, None, base, sysid, pubid, notation_name) # pragma: no cover + + def defused_external_entity_ref_handler(self, context, base, sysid, pubid): + raise ExternalReferenceForbidden(context, base, sysid, pubid) + + +def monkey_patch(): + xmlrpc_client.FastParser = DefusedExpatParser + xmlrpc_client.GzipDecodedResponse = DefusedGzipDecodedResponse + xmlrpc_client.gzip_decode = defused_gzip_decode + if xmlrpc_server: + xmlrpc_server.gzip_decode = defused_gzip_decode + + +def unmonkey_patch(): + xmlrpc_client.FastParser = None + xmlrpc_client.GzipDecodedResponse = _OrigGzipDecodedResponse + xmlrpc_client.gzip_decode = _orig_gzip_decode + if xmlrpc_server: + xmlrpc_server.gzip_decode = _orig_gzip_decode diff --git a/Splunk_TA_paloalto/bin/splunk_ta_paloalto/aob_py3/distutils-precedence.pth b/Splunk_TA_paloalto/bin/splunk_ta_paloalto/aob_py3/distutils-precedence.pth new file mode 100644 index 00000000..7f009fe9 --- /dev/null +++ b/Splunk_TA_paloalto/bin/splunk_ta_paloalto/aob_py3/distutils-precedence.pth @@ -0,0 +1 @@ +import os; var = 'SETUPTOOLS_USE_DISTUTILS'; enabled = os.environ.get(var, 'local') == 'local'; enabled and __import__('_distutils_hack').add_shim(); diff --git a/Splunk_TA_paloalto/bin/splunk_ta_paloalto/aob_py3/future-0.18.2.dist-info/INSTALLER b/Splunk_TA_paloalto/bin/splunk_ta_paloalto/aob_py3/future-0.18.2.dist-info/INSTALLER new file mode 100644 index 00000000..a1b589e3 --- /dev/null +++ b/Splunk_TA_paloalto/bin/splunk_ta_paloalto/aob_py3/future-0.18.2.dist-info/INSTALLER @@ -0,0 +1 @@ +pip diff --git a/Splunk_TA_paloalto/bin/splunk_ta_paloalto/aob_py3/future-0.18.2.dist-info/LICENSE.txt b/Splunk_TA_paloalto/bin/splunk_ta_paloalto/aob_py3/future-0.18.2.dist-info/LICENSE.txt new file mode 100644 index 00000000..4c904dba --- /dev/null +++ b/Splunk_TA_paloalto/bin/splunk_ta_paloalto/aob_py3/future-0.18.2.dist-info/LICENSE.txt @@ -0,0 +1,19 @@ +Copyright (c) 2013-2019 Python Charmers Pty Ltd, Australia + +Permission is hereby granted, free of charge, to any person obtaining a copy +of this software and associated documentation files (the "Software"), to deal +in the Software without restriction, including without limitation the rights +to use, copy, modify, merge, publish, distribute, sublicense, and/or sell +copies of the Software, and to permit persons to whom the Software is +furnished to do so, subject to the following conditions: + +The above copyright notice and this permission notice shall be included in +all copies or substantial portions of the Software. + +THE SOFTWARE IS PROVIDED "AS IS", WITHOUT WARRANTY OF ANY KIND, EXPRESS OR +IMPLIED, INCLUDING BUT NOT LIMITED TO THE WARRANTIES OF MERCHANTABILITY, +FITNESS FOR A PARTICULAR PURPOSE AND NONINFRINGEMENT. IN NO EVENT SHALL THE +AUTHORS OR COPYRIGHT HOLDERS BE LIABLE FOR ANY CLAIM, DAMAGES OR OTHER +LIABILITY, WHETHER IN AN ACTION OF CONTRACT, TORT OR OTHERWISE, ARISING FROM, +OUT OF OR IN CONNECTION WITH THE SOFTWARE OR THE USE OR OTHER DEALINGS IN +THE SOFTWARE. diff --git a/Splunk_TA_paloalto/bin/splunk_ta_paloalto/aob_py3/future-0.18.2.dist-info/METADATA b/Splunk_TA_paloalto/bin/splunk_ta_paloalto/aob_py3/future-0.18.2.dist-info/METADATA new file mode 100644 index 00000000..b6f83573 --- /dev/null +++ b/Splunk_TA_paloalto/bin/splunk_ta_paloalto/aob_py3/future-0.18.2.dist-info/METADATA @@ -0,0 +1,110 @@ +Metadata-Version: 2.1 +Name: future +Version: 0.18.2 +Summary: Clean single-source support for Python 3 and 2 +Home-page: https://python-future.org +Author: Ed Schofield +Author-email: ed@pythoncharmers.com +License: MIT +Keywords: future past python3 migration futurize backport six 2to3 modernize pasteurize 3to2 +Platform: UNKNOWN +Classifier: Programming Language :: Python +Classifier: Programming Language :: Python :: 2 +Classifier: Programming Language :: Python :: 2.6 +Classifier: Programming Language :: Python :: 2.7 +Classifier: Programming Language :: Python :: 3 +Classifier: Programming Language :: Python :: 3.3 +Classifier: Programming Language :: Python :: 3.4 +Classifier: Programming Language :: Python :: 3.5 +Classifier: Programming Language :: Python :: 3.6 +Classifier: Programming Language :: Python :: 3.7 +Classifier: License :: OSI Approved +Classifier: License :: OSI Approved :: MIT License +Classifier: Development Status :: 4 - Beta +Classifier: Intended Audience :: Developers +Requires-Python: >=2.6, !=3.0.*, !=3.1.*, !=3.2.* +License-File: LICENSE.txt + + +future: Easy, safe support for Python 2/3 compatibility +======================================================= + +``future`` is the missing compatibility layer between Python 2 and Python +3. It allows you to use a single, clean Python 3.x-compatible codebase to +support both Python 2 and Python 3 with minimal overhead. + +It is designed to be used as follows:: + + from __future__ import (absolute_import, division, + print_function, unicode_literals) + from builtins import ( + bytes, dict, int, list, object, range, str, + ascii, chr, hex, input, next, oct, open, + pow, round, super, + filter, map, zip) + +followed by predominantly standard, idiomatic Python 3 code that then runs +similarly on Python 2.6/2.7 and Python 3.3+. + +The imports have no effect on Python 3. On Python 2, they shadow the +corresponding builtins, which normally have different semantics on Python 3 +versus 2, to provide their Python 3 semantics. + + +Standard library reorganization +~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~ + +``future`` supports the standard library reorganization (PEP 3108) through the +following Py3 interfaces: + + >>> # Top-level packages with Py3 names provided on Py2: + >>> import html.parser + >>> import queue + >>> import tkinter.dialog + >>> import xmlrpc.client + >>> # etc. + + >>> # Aliases provided for extensions to existing Py2 module names: + >>> from future.standard_library import install_aliases + >>> install_aliases() + + >>> from collections import Counter, OrderedDict # backported to Py2.6 + >>> from collections import UserDict, UserList, UserString + >>> import urllib.request + >>> from itertools import filterfalse, zip_longest + >>> from subprocess import getoutput, getstatusoutput + + +Automatic conversion +-------------------- + +An included script called `futurize +`_ aids in converting +code (from either Python 2 or Python 3) to code compatible with both +platforms. It is similar to ``python-modernize`` but goes further in +providing Python 3 compatibility through the use of the backported types +and builtin functions in ``future``. + + +Documentation +------------- + +See: http://python-future.org + + +Credits +------- + +:Author: Ed Schofield, Jordan M. Adler, et al +:Sponsor: Python Charmers Pty Ltd, Australia, and Python Charmers Pte + Ltd, Singapore. http://pythoncharmers.com +:Others: See docs/credits.rst or http://python-future.org/credits.html + + +Licensing +--------- +Copyright 2013-2019 Python Charmers Pty Ltd, Australia. +The software is distributed under an MIT licence. See LICENSE.txt. + + + diff --git a/Splunk_TA_paloalto/bin/splunk_ta_paloalto/aob_py3/future-0.18.2.dist-info/RECORD b/Splunk_TA_paloalto/bin/splunk_ta_paloalto/aob_py3/future-0.18.2.dist-info/RECORD new file mode 100644 index 00000000..09cd9f3b --- /dev/null +++ b/Splunk_TA_paloalto/bin/splunk_ta_paloalto/aob_py3/future-0.18.2.dist-info/RECORD @@ -0,0 +1,219 @@ +../../bin/futurize,sha256=sXCNiOHh_TxI0O2jpXervN8SaTGHsk3s7DWZVaVd7ps,215 +../../bin/pasteurize,sha256=Z6X36i6UI3tjKEgkWJ-dYlE1hFprHZZaMzOq0Pr6yu8,217 +future-0.18.2.dist-info/INSTALLER,sha256=zuuue4knoyJ-UwPPXg8fezS7VCrXJQrAP7zeNuwvFQg,4 +future-0.18.2.dist-info/LICENSE.txt,sha256=kW5WE5LUhHG5wjQ39W4mUvMgyzsRnOqhYu30EBb3Rrk,1083 +future-0.18.2.dist-info/METADATA,sha256=Xjjk3ziBhbMk6Wv0UPOWwVUsKGWBitr7WJrud7vWKss,3729 +future-0.18.2.dist-info/RECORD,, +future-0.18.2.dist-info/WHEEL,sha256=G16H4A3IeoQmnOrYV4ueZGKSjhipXx8zc8nu9FGlvMA,92 +future-0.18.2.dist-info/entry_points.txt,sha256=-ATQtLUC2gkzrCYqc1Twac093xrI164NuMwsRALJWnM,89 +future-0.18.2.dist-info/top_level.txt,sha256=DT0C3az2gb-uJaj-fs0h4WwHYlJVDp0EvLdud1y5Zyw,38 +future/__init__.py,sha256=TsDq1XoGk6Jfach_rEhwAi07zR5OKYZ6hhUlG5Bj6Ag,2991 +future/backports/__init__.py,sha256=5QXvQ_jc5Xx6p4dSaHnZXPZazBEunKDKhbUjxZ0XD1I,530 +future/backports/_markupbase.py,sha256=MDPTCykLq4J7Aea3PvYotATEE0CG4R_SjlxfJaLXTJM,16215 +future/backports/datetime.py,sha256=I214Vu0cRY8mi8J5aIcsAyQJnWmOKXeLV-QTWSn7VQU,75552 +future/backports/email/__init__.py,sha256=eH3AJr3FkuBy_D6yS1V2K76Q2CQ93y2zmAMWmn8FbHI,2269 +future/backports/email/_encoded_words.py,sha256=m1vTRfxAQdg4VyWO7PF-1ih1mmq97V-BPyHHkuEwSME,8443 +future/backports/email/_header_value_parser.py,sha256=cj_1ce1voLn8H98r9cKqiSLgfFSxCv3_UL3sSvjqgjk,104692 +future/backports/email/_parseaddr.py,sha256=KewEnos0YDM-SYX503z7E1MmVbG5VRaKjxjcl0Ipjbs,17389 +future/backports/email/_policybase.py,sha256=2lJD9xouiz4uHvWGQ6j1nwlwWVQGwwzpy5JZoeQqhUc,14647 +future/backports/email/base64mime.py,sha256=sey6iJA9pHIOdFgoV1p7QAwYVjt8CEkDhITt304-nyI,3729 +future/backports/email/charset.py,sha256=CfE4iV2zAq6MQC0CHXHLnwTNW71zmhNITbzOcfxE4vY,17439 +future/backports/email/encoders.py,sha256=Nn4Pcx1rOdRgoSIzB6T5RWHl5zxClbf32wgE6D0tUt8,2800 +future/backports/email/errors.py,sha256=tRX8PP5g7mk2bAxL1jTCYrbfhD2gPZFNrh4_GJRM8OQ,3680 +future/backports/email/feedparser.py,sha256=bvmhb4cdY-ipextPK2K2sDgMsNvTspmuQfYyCxc4zSc,22736 +future/backports/email/generator.py,sha256=lpaLhZHneguvZ2QgRu7Figkjb7zmY28AGhj9iZTdI7s,19520 +future/backports/email/header.py,sha256=uBHbNKO-yx5I9KBflernJpyy3fX4gImCB1QE7ICApLs,24448 +future/backports/email/headerregistry.py,sha256=ZPbvLKXD0NMLSU4jXlVHfGyGcLMrFm-GQVURu_XHj88,20637 +future/backports/email/iterators.py,sha256=kMRYFGy3SVVpo7HG7JJr2ZAlOoaX6CVPzKYwDSvLfV0,2348 +future/backports/email/message.py,sha256=I6WW5cZDza7uwLOGJSvsDhGZC9K_Q570Lk2gt_vDUXM,35237 +future/backports/email/mime/__init__.py,sha256=47DEQpj8HBSa-_TImW-5JCeuQeRkm5NMpJWZG3hSuFU,0 +future/backports/email/mime/application.py,sha256=m-5a4mSxu2E32XAImnp9x9eMVX5Vme2iNgn2dMMNyss,1401 +future/backports/email/mime/audio.py,sha256=2ognalFRadcsUYQYMUZbjv5i1xJbFhQN643doMuI7M4,2815 +future/backports/email/mime/base.py,sha256=wV3ClQyMsOqmkXSXbk_wd_zPoPTvBx8kAIzq3rdM4lE,875 +future/backports/email/mime/image.py,sha256=DpQk1sB-IMmO43AF4uadsXyf_y5TdEzJLfyhqR48bIw,1907 +future/backports/email/mime/message.py,sha256=pFsMhXW07aRjsLq1peO847PApWFAl28-Z2Z7BP1Dn74,1429 +future/backports/email/mime/multipart.py,sha256=j4Lf_sJmuwTbfgdQ6R35_t1_ha2DynJBJDvpjwbNObE,1699 +future/backports/email/mime/nonmultipart.py,sha256=Ciba1Z8d2yLDDpxgDJuk3Bb-TqcpE9HCd8KfbW5vgl4,832 +future/backports/email/mime/text.py,sha256=zV98BjoR4S_nX8c47x43LnsnifeGhIfNGwSAh575bs0,1552 +future/backports/email/parser.py,sha256=-115SC3DHZ6lLijWFTxuOnE-GiM2BOYaUSz-QpmvYSo,5312 +future/backports/email/policy.py,sha256=gpcbhVRXuCohkK6MUqopTs1lv4E4-ZVUO6OVncoGEJE,8823 +future/backports/email/quoprimime.py,sha256=w93W5XgdFpyGaDqDBJrnXF_v_npH5r20WuAxmrAzyQg,10923 +future/backports/email/utils.py,sha256=vpfN0E8UjNbNw-2NFBQGCo4TNgrghMsqzpEYW5C_fBs,14270 +future/backports/html/__init__.py,sha256=FKwqFtWMCoGNkhU97OPnR1fZSh6etAKfN1FU1KvXcV8,924 +future/backports/html/entities.py,sha256=kzoRnQyGk_3DgoucHLhL5QL1pglK9nvmxhPIGZFDTnc,75428 +future/backports/html/parser.py,sha256=G2tUObvbHSotNt06JLY-BP1swaZNfDYFd_ENWDjPmRg,19770 +future/backports/http/__init__.py,sha256=47DEQpj8HBSa-_TImW-5JCeuQeRkm5NMpJWZG3hSuFU,0 +future/backports/http/client.py,sha256=76EbhEZOtvdHFcU-jrjivoff13oQ9IMbdkZEdf5kQzQ,47602 +future/backports/http/cookiejar.py,sha256=_Vy4BPT-h0ZT0R_utGQAFXzuOAdmU9KedGFffyX9wN4,76559 +future/backports/http/cookies.py,sha256=DsyDUGDEbCXAA9Jq6suswSc76uSZqUu39adDDNj8XGw,21581 +future/backports/http/server.py,sha256=1CaMxgzHf9lYhmTJyE7topgjRIlIn9cnjgw8YEvwJV4,45523 +future/backports/misc.py,sha256=AkbED6BdHKnYCmIAontT4zHKTqdPPfJfn35HIs6LDrg,32682 +future/backports/socket.py,sha256=DH1V6IjKPpJ0tln8bYvxvQ7qnvZG-UoQtMA5yVleHiU,15663 +future/backports/socketserver.py,sha256=Twvyk5FqVnOeiNcbVsyMDPTF1mNlkKfyofG7tKxTdD8,24286 +future/backports/test/__init__.py,sha256=9dXxIZnkI095YfHC-XIaVF6d31GjeY1Ag8TEzcFgepM,264 +future/backports/test/badcert.pem,sha256=JioQeRZkHH8hGsWJjAF3U1zQvcWqhyzG6IOEJpTY9SE,1928 +future/backports/test/badkey.pem,sha256=gaBK9px_gG7DmrLKxfD6f6i-toAmARBTVfs-YGFRQF0,2162 +future/backports/test/dh512.pem,sha256=dUTsjtLbK-femrorUrTGF8qvLjhTiT_n4Uo5V6u__Gs,402 +future/backports/test/https_svn_python_org_root.pem,sha256=wOB3Onnc62Iu9kEFd8GcHhd_suucYjpJNA3jyfHeJWA,2569 +future/backports/test/keycert.passwd.pem,sha256=ZBfnVLpbBtAOf_2gCdiQ-yrBHmRsNzSf8VC3UpQZIjg,1830 +future/backports/test/keycert.pem,sha256=xPXi5idPcQVbrhgxBqF2TNGm6sSZ2aLVVEt6DWzplL8,1783 +future/backports/test/keycert2.pem,sha256=DB46FEAYv8BWwQJ-5RzC696FxPN7CON-Qsi-R4poJgc,1795 +future/backports/test/nokia.pem,sha256=s00x0uPDSaa5DHJ_CwzlVhg3OVdJ47f4zgqQdd0SAfQ,1923 +future/backports/test/nullbytecert.pem,sha256=NFRYWhmP_qT3jGfVjR6-iaC-EQdhIFjiXtTLN5ZPKnE,5435 +future/backports/test/nullcert.pem,sha256=47DEQpj8HBSa-_TImW-5JCeuQeRkm5NMpJWZG3hSuFU,0 +future/backports/test/pystone.py,sha256=fvyoJ_tVovTNaxbJmdJMwr9F6SngY-U4ibULnd_wUqA,7427 +future/backports/test/sha256.pem,sha256=3wB-GQqEc7jq-PYwYAQaPbtTvvr7stk_DVmZxFgehfA,8344 +future/backports/test/ssl_cert.pem,sha256=M607jJNeIeHG9BlTf_jaQkPJI4nOxSJPn-zmEAaW43M,867 +future/backports/test/ssl_key.passwd.pem,sha256=I_WH4sBw9Vs9Z-BvmuXY0aw8tx8avv6rm5UL4S_pP00,963 +future/backports/test/ssl_key.pem,sha256=VKGU-R3UYaZpVTXl7chWl4vEYEDeob69SfvRTQ8aq_4,916 +future/backports/test/ssl_servers.py,sha256=-pd7HMZljuZfFRAbCAiAP_2G04orITJFj-S9ddr6o84,7209 +future/backports/test/support.py,sha256=zJrb-pz-Wu2dZwnNodg1v3w96zVq7ORuN-hOGOHbdA8,70881 +future/backports/total_ordering.py,sha256=O3M57_IisQ-zW5hW20uxkfk4fTGsr0EF2tAKx3BksQo,1929 +future/backports/urllib/__init__.py,sha256=47DEQpj8HBSa-_TImW-5JCeuQeRkm5NMpJWZG3hSuFU,0 +future/backports/urllib/error.py,sha256=ktikuK9ag4lS4f8Z0k5p1F11qF40N2AiOtjbXiF97ew,2715 +future/backports/urllib/parse.py,sha256=67avrYqV1UK7i_22goRUrvJ8SffzjRdTja9wzq_ynXY,35792 +future/backports/urllib/request.py,sha256=aR9ZMzfhV1C2Qk3wFsGvkwxqtdPTdsJVGRt5DUCwgJ8,96276 +future/backports/urllib/response.py,sha256=ooQyswwbb-9N6IVi1Kwjss1aR-Kvm8ZNezoyVEonp8c,3180 +future/backports/urllib/robotparser.py,sha256=pnAGTbKhdbCq_9yMZp7m8hj5q_NJpyQX6oQIZuYcnkw,6865 +future/backports/xmlrpc/__init__.py,sha256=h61ciVTdVvu8oEUXv4dHf_Tc5XUXDH3RKB1-8fQhSsg,38 +future/backports/xmlrpc/client.py,sha256=6a6Pvx_RVC9gIHDkFOVdREeGaZckOOiWd7T6GyzU3qU,48133 +future/backports/xmlrpc/server.py,sha256=W_RW5hgYbNV2LGbnvngzm7akacRdK-XFY-Cy2HL-qsY,37285 +future/builtins/__init__.py,sha256=jSdOucWfCsfkfTR8Jd4-Ls-YQpJ0AnzUomBxgwuoxNs,1687 +future/builtins/disabled.py,sha256=Ysq74bsmwntpq7dzkwTAD7IHKrkXy66vJlPshVwgVBI,2109 +future/builtins/iterators.py,sha256=l1Zawm2x82oqOuGGtCZRE76Ej98sMlHQwu9fZLK5RrA,1396 +future/builtins/misc.py,sha256=hctlKKWUyN0Eoodxg4ySQHEqARTukOLR4L5K5c6PW9k,4550 +future/builtins/new_min_max.py,sha256=7qQ4iiG4GDgRzjPzzzmg9pdby35Mtt6xNOOsyqHnIGY,1757 +future/builtins/newnext.py,sha256=oxXB8baXqJv29YG40aCS9UXk9zObyoOjya8BJ7NdBJM,2009 +future/builtins/newround.py,sha256=l2EXPAFU3fAsZigJxUH6x66B7jhNaB076-L5FR617R8,3181 +future/builtins/newsuper.py,sha256=LmiUQ_f6NXDIz6v6sDPkoTWl-2Zccy7PpZfQKYtscac,4146 +future/moves/__init__.py,sha256=MsAW69Xp_fqUo4xODufcKM6AZf-ozHaz44WPZdsDFJA,220 +future/moves/_dummy_thread.py,sha256=c8ZRUd8ffvyvGKGGgve5NKc8VdtAWquu8-4FnO2EdvA,175 +future/moves/_markupbase.py,sha256=W9wh_Gu3jDAMIhVBV1ZnCkJwYLHRk_v_su_HLALBkZQ,171 +future/moves/_thread.py,sha256=rwY7L4BZMFPlrp_i6T2Un4_iKYwnrXJ-yV6FJZN8YDo,163 +future/moves/builtins.py,sha256=4sjjKiylecJeL9da_RaBZjdymX2jtMs84oA9lCqb4Ug,281 +future/moves/collections.py,sha256=OKQ-TfUgms_2bnZRn4hrclLDoiN2i-HSWcjs3BC2iY8,417 +future/moves/configparser.py,sha256=TNy226uCbljjU-DjAVo7j7Effbj5zxXvDh0SdXehbzk,146 +future/moves/copyreg.py,sha256=Y3UjLXIMSOxZggXtvZucE9yv4tkKZtVan45z8eix4sU,438 +future/moves/dbm/__init__.py,sha256=_VkvQHC2UcIgZFPRroiX_P0Fs7HNqS_69flR0-oq2B8,488 +future/moves/dbm/dumb.py,sha256=HKdjjtO3EyP9EKi1Hgxh_eUU6yCQ0fBX9NN3n-zb8JE,166 +future/moves/dbm/gnu.py,sha256=XoCSEpZ2QaOgo2h1m80GW7NUgj_b93BKtbcuwgtnaKo,162 +future/moves/dbm/ndbm.py,sha256=OFnreyo_1YHDBl5YUm9gCzKlN1MHgWbfSQAZVls2jaM,162 +future/moves/html/__init__.py,sha256=BSUFSHxXf2kGvHozlnrB1nn6bPE6p4PpN3DwA_Z5geo,1016 +future/moves/html/entities.py,sha256=lVvchdjK_RzRj759eg4RMvGWHfgBbj0tKGOoZ8dbRyY,177 +future/moves/html/parser.py,sha256=V2XpHLKLCxQum3N9xlO3IUccAD7BIykZMqdEcWET3vY,167 +future/moves/http/__init__.py,sha256=Mx1v_Tcks4udHCtDM8q2xnYUiQ01gD7EpPyeQwsP3-Q,71 +future/moves/http/client.py,sha256=hqEBq7GDXZidd1AscKnSyjSoMcuj8rERqGTmD7VheDQ,165 +future/moves/http/cookiejar.py,sha256=Frr9ZZCg-145ymy0VGpiPJhvBEpJtVqRBYPaKhgT1Z4,173 +future/moves/http/cookies.py,sha256=PPrHa1_oDbu3D_BhJGc6PvMgY1KoxyYq1jqeJwEcMvE,233 +future/moves/http/server.py,sha256=8YQlSCShjAsB5rr5foVvZgp3IzwYFvTmGZCHhBSDtaI,606 +future/moves/itertools.py,sha256=PVxFHRlBQl9ElS0cuGFPcUtj53eHX7Z1DmggzGfgQ6c,158 +future/moves/pickle.py,sha256=r8j9skzfE8ZCeHyh_OB-WucOkRTIHN7zpRM7l7V3qS4,229 +future/moves/queue.py,sha256=uxvLCChF-zxWWgrY1a_wxt8rp2jILdwO4PrnkBW6VTE,160 +future/moves/reprlib.py,sha256=Nt5sUgMQ3jeVIukqSHOvB0UIsl6Y5t-mmT_13mpZmiY,161 +future/moves/socketserver.py,sha256=v8ZLurDxHOgsubYm1iefjlpnnJQcx2VuRUGt9FCJB9k,174 +future/moves/subprocess.py,sha256=oqRSMfFZkxM4MXkt3oD5N6eBwmmJ6rQ9KPhvSQKT_hM,251 +future/moves/sys.py,sha256=HOMRX4Loim75FMbWawd3oEwuGNJR-ClMREEFkVpBsRs,132 +future/moves/test/__init__.py,sha256=yB9F-fDQpzu1v8cBoKgIrL2ScUNqjlkqEztYrGVCQ-0,110 +future/moves/test/support.py,sha256=6zGgTTXcERyBJIQ04-X-sAe781tVgLVHp3HzmQPy52g,259 +future/moves/tkinter/__init__.py,sha256=jV9vDx3wRl0bsoclU8oSe-5SqHQ3YpCbStmqtXnq1p4,620 +future/moves/tkinter/colorchooser.py,sha256=kprlmpRtvDbW5Gq43H1mi2KmNJ2kuzLQOba0a5EwDkU,333 +future/moves/tkinter/commondialog.py,sha256=mdUbq1IZqOGaSA7_8R367IukDCsMfzXiVHrTQQpp7Z0,333 +future/moves/tkinter/constants.py,sha256=0qRUrZLRPdVxueABL9KTzzEWEsk6xM1rOjxK6OHxXtA,324 +future/moves/tkinter/dialog.py,sha256=ksp-zvs-_A90P9RNHS8S27f1k8f48zG2Bel2jwZV5y0,311 +future/moves/tkinter/dnd.py,sha256=C_Ah0Urnyf2XKE5u-oP6mWi16RzMSXgMA1uhBSAwKY8,306 +future/moves/tkinter/filedialog.py,sha256=RSJFDGOP2AJ4T0ZscJ2hyF9ssOWp9t_S_DtnOmT-WZ8,323 +future/moves/tkinter/font.py,sha256=TXarflhJRxqepaRNSDw6JFUVGz5P1T1C4_uF9VRqj3w,309 +future/moves/tkinter/messagebox.py,sha256=WJt4t83kLmr_UnpCWFuLoyazZr3wAUOEl6ADn3osoEA,327 +future/moves/tkinter/scrolledtext.py,sha256=DRzN8aBAlDBUo1B2KDHzdpRSzXBfH4rOOz0iuHXbQcg,329 +future/moves/tkinter/simpledialog.py,sha256=6MhuVhZCJV4XfPpPSUWKlDLLGEi0Y2ZlGQ9TbsmJFL0,329 +future/moves/tkinter/tix.py,sha256=aNeOfbWSGmcN69UmEGf4tJ-QIxLT6SU5ynzm1iWgepA,302 +future/moves/tkinter/ttk.py,sha256=rRrJpDjcP2gjQNukECu4F026P-CkW-3Ca2tN6Oia-Fw,302 +future/moves/urllib/__init__.py,sha256=yB9F-fDQpzu1v8cBoKgIrL2ScUNqjlkqEztYrGVCQ-0,110 +future/moves/urllib/error.py,sha256=gfrKzv-6W5OjzNIfjvJaQkxABRLym2KwjfKFXSdDB60,479 +future/moves/urllib/parse.py,sha256=xLLUMIIB5MreCdYzRZ5zIRWrhTRCoMO8RZEH4WPFQDY,1045 +future/moves/urllib/request.py,sha256=ttIzq60PwjRyrLQUGdAtfYvs4fziVwvcLe2Kw-hvE0g,3496 +future/moves/urllib/response.py,sha256=ZEZML0FpbB--GIeBFPvSzbtlVJ6EsR4tCI4qB7D8sFQ,342 +future/moves/urllib/robotparser.py,sha256=j24p6dMNzUpGZtT3BQxwRoE-F88iWmBpKgu0tRV61FQ,179 +future/moves/winreg.py,sha256=2zNAG59QI7vFlCj7kqDh0JrAYTpexOnI55PEAIjYhqo,163 +future/moves/xmlrpc/__init__.py,sha256=47DEQpj8HBSa-_TImW-5JCeuQeRkm5NMpJWZG3hSuFU,0 +future/moves/xmlrpc/client.py,sha256=2PfnL5IbKVwdKP7C8B1OUviEtuBObwoH4pAPfvHIvQc,143 +future/moves/xmlrpc/server.py,sha256=ESDXdpUgTKyeFmCDSnJmBp8zONjJklsRJOvy4OtaALc,143 +future/standard_library/__init__.py,sha256=7paz9IsD5qv_tvk5Rre3YrlA2_2aS1FJfI7UlrzAtWY,27743 +future/tests/__init__.py,sha256=47DEQpj8HBSa-_TImW-5JCeuQeRkm5NMpJWZG3hSuFU,0 +future/tests/base.py,sha256=7LTAKHJgUxOwmffD1kgcErVt2VouKcldPnq4iruqk_k,19956 +future/types/__init__.py,sha256=5fBxWqf_OTQ8jZ7k2TS34rFH14togeR488F4zBHIQ-s,6831 +future/types/newbytes.py,sha256=D_kNDD9sbNJir2cUxxePiAuw2OW5irxVnu55uHmuK9E,16303 +future/types/newdict.py,sha256=2N7P44cWmWtiDHvlK5ir15mW492gg6uP2n65d5bsDy4,3100 +future/types/newint.py,sha256=hJiv9qUDrjl1xkfzNFNLzafsRMPoFcRFceoivUzVIek,13286 +future/types/newlist.py,sha256=-H5-fXodd-UQgTFnZBJdwE68CrgIL_jViYdv4w7q2rU,2284 +future/types/newmemoryview.py,sha256=LnARgiKqQ2zLwwDZ3owu1atoonPQkOneWMfxJCwB4_o,712 +future/types/newobject.py,sha256=AX_n8GwlDR2IY-xIwZCvu0Olj_Ca2aS57nlTihnFr-I,3358 +future/types/newopen.py,sha256=lcRNHWZ1UjEn_0_XKis1ZA5U6l-4c-CHlC0WX1sY4NI,810 +future/types/newrange.py,sha256=7sgJaRaC4WIUtZ40K-c1d5QWruyaCWGgTVFadKo8qYA,5294 +future/types/newstr.py,sha256=e0brkurI0IK--4ToQEO4Cz1FECZav4CyUGMKxlrcmK4,15758 +future/utils/__init__.py,sha256=wsvXsKx-DXZichQ10Rdml-CWMqS79RNNynmdvfISpCU,21828 +future/utils/surrogateescape.py,sha256=7u4V4XlW83P5YSAJS2f92YUF8vsWthsiTnmAshOJL_M,6097 +libfuturize/__init__.py,sha256=CZA_KgvTQOPAY1_MrlJeQ6eMh2Eei4_KIv4JuyAkpfw,31 +libfuturize/fixer_util.py,sha256=Zhms5G97l40pyG1krQM2lCp-TxnocBdJkB2AbkAFnKY,17494 +libfuturize/fixes/__init__.py,sha256=5KEpUnjVsFCCsr_-zrikvJbLf9zslEJnFTH_5pBc33I,5236 +libfuturize/fixes/fix_UserDict.py,sha256=jL4jXnGaUQTkG8RKfGXbU_HVTkB3MWZMQwUkqMAjB6I,3840 +libfuturize/fixes/fix_absolute_import.py,sha256=vkrF2FyQR5lSz2WmdqywzkEJVTC0eq4gh_REWBKHh7w,3140 +libfuturize/fixes/fix_add__future__imports_except_unicode_literals.py,sha256=Fr219VAzR8KWXc2_bfiqLl10EgxAWjL6cI3Mowt--VU,662 +libfuturize/fixes/fix_basestring.py,sha256=bHkKuMzhr5FMXwjXlMOjsod4S3rQkVdbzhoWV4-tl3Y,394 +libfuturize/fixes/fix_bytes.py,sha256=AhzOJes6EnPwgzboDjvURANbWKqciG6ZGaYW07PYQK8,685 +libfuturize/fixes/fix_cmp.py,sha256=Blq_Z0IGkYiKS83QzZ5wUgpJyZfQiZoEsWJ1VPyXgFY,701 +libfuturize/fixes/fix_division.py,sha256=gnrAi7stquiVUyi_De1H8q--43iQaSUX0CjnOmQ6O2w,228 +libfuturize/fixes/fix_division_safe.py,sha256=Y_HUfQJAxRClXkcfqWP5SFCsRYZOsLUsNjLXlGOA3cQ,3292 +libfuturize/fixes/fix_execfile.py,sha256=I5AcJ6vPZ7i70TChaq9inxqnZ4C04-yJyfAItGa8E3c,921 +libfuturize/fixes/fix_future_builtins.py,sha256=QBCRpD9XA7tbtfP4wmOF2DXquB4lq-eupkQj-QAxp0s,2027 +libfuturize/fixes/fix_future_standard_library.py,sha256=FVtflFt38efHe_SEX6k3m6IYAtKWjA4rAPZrlCv6yA0,733 +libfuturize/fixes/fix_future_standard_library_urllib.py,sha256=Rf81XcAXA-vwNvrhskf5sLExbR--Wkr5fiUcMYGAKzs,1001 +libfuturize/fixes/fix_input.py,sha256=bhaPNtMrZNbjWIDQCR7Iue5BxBj4rf0RJQ9_jiwvb-s,687 +libfuturize/fixes/fix_metaclass.py,sha256=GLB76wbuyUVciDgW9bgNNOBEnLeS_AR-fKABcPBZk6M,9568 +libfuturize/fixes/fix_next_call.py,sha256=01STG86Av9o5QcpQDJ6UbPhvxt9kKrkatiPeddXRgvA,3158 +libfuturize/fixes/fix_object.py,sha256=qalFIjn0VTWXG5sGOOoCvO65omjX5_9d40SUpwUjBdw,407 +libfuturize/fixes/fix_oldstr_wrap.py,sha256=UCR6Q2l-pVqJSrRTnQAWMlaqBoX7oX1VpG_w6Q0XcyY,1214 +libfuturize/fixes/fix_order___future__imports.py,sha256=ACUCw5NEGWvj6XA9rNj8BYha3ktxLvkM5Ssh5cyV644,829 +libfuturize/fixes/fix_print.py,sha256=92s1w2t9SynA3Y1_85-lexSBbgEWJM6lBrhCxVacfDc,3384 +libfuturize/fixes/fix_print_with_import.py,sha256=hVWn70Q1DPMUiHMyEqgUx-6sM1AylLj78v9pMc4LFw8,735 +libfuturize/fixes/fix_raise.py,sha256=mEXpM9sS6tenMmxayfqM-Kp9gUvaztTY61vFaqyMUuo,3884 +libfuturize/fixes/fix_remove_old__future__imports.py,sha256=j4EC1KEVgXhuQAqhYHnAruUjW6uczPjV_fTCSOLMuAw,851 +libfuturize/fixes/fix_unicode_keep_u.py,sha256=M8fcFxHeFnWVOKoQRpkMsnpd9qmUFubI2oFhO4ZPk7A,779 +libfuturize/fixes/fix_unicode_literals_import.py,sha256=wq-hb-9Yx3Az4ol-ylXZJPEDZ81EaPZeIy5VvpA0CEY,367 +libfuturize/fixes/fix_xrange_with_import.py,sha256=f074qStjMz3OtLjt1bKKZSxQnRbbb7HzEbqHt9wgqdw,479 +libfuturize/main.py,sha256=feICmcv0dzWhutvwz0unnIVxusbSlQZFDaxObkHebs8,13733 +libpasteurize/__init__.py,sha256=CZA_KgvTQOPAY1_MrlJeQ6eMh2Eei4_KIv4JuyAkpfw,31 +libpasteurize/fixes/__init__.py,sha256=ccdv-2MGjQMbq8XuEZBndHmbzGRrZnabksjXZLUv044,3719 +libpasteurize/fixes/feature_base.py,sha256=v7yLjBDBUPeNUc-YHGGlIsJDOQzFAM4Vo0RN5F1JHVU,1723 +libpasteurize/fixes/fix_add_all__future__imports.py,sha256=mHet1LgbHn9GfgCYGNZXKo-rseDWreAvUcAjZwdgeTE,676 +libpasteurize/fixes/fix_add_all_future_builtins.py,sha256=scfkY-Sz5j0yDtLYls2ENOcqEMPVxeDm9gFYYPINPB8,1269 +libpasteurize/fixes/fix_add_future_standard_library_import.py,sha256=thTRbkBzy_SJjZ0bJteTp0sBTx8Wr69xFakH4styf7Y,663 +libpasteurize/fixes/fix_annotations.py,sha256=VT_AorKY9AYWYZUZ17_CeUrJlEA7VGkwVLDQlwD1Bxo,1581 +libpasteurize/fixes/fix_division.py,sha256=_TD_c5KniAYqEm11O7NJF0v2WEhYSNkRGcKG_94ZOas,904 +libpasteurize/fixes/fix_features.py,sha256=NZn0n34_MYZpLNwyP1Tf51hOiN58Rg7A8tA9pK1S8-c,2675 +libpasteurize/fixes/fix_fullargspec.py,sha256=VlZuIU6QNrClmRuvC4mtLICL3yMCi-RcGCnS9fD4b-Q,438 +libpasteurize/fixes/fix_future_builtins.py,sha256=SlCK9I9u05m19Lr1wxlJxF8toZ5yu0yXBeDLxUN9_fw,1450 +libpasteurize/fixes/fix_getcwd.py,sha256=uebvTvFboLqsROFCwdnzoP6ThziM0skz9TDXHoJcFsQ,873 +libpasteurize/fixes/fix_imports.py,sha256=U4lIs_5Xp1qqM8mN72ieDkkIdiyALZFyCZsRC8ZmXlM,4944 +libpasteurize/fixes/fix_imports2.py,sha256=bs2V5Yv0v_8xLx-lNj9kNEAK2dLYXUXkZ2hxECg01CU,8580 +libpasteurize/fixes/fix_kwargs.py,sha256=NB_Ap8YJk-9ncoJRbOiPY_VMIigFgVB8m8AuY29DDhE,5991 +libpasteurize/fixes/fix_memoryview.py,sha256=Fwayx_ezpr22tbJ0-QrKdJ-FZTpU-m7y78l1h_N4xxc,551 +libpasteurize/fixes/fix_metaclass.py,sha256=IcE2KjaDG8jUR3FYXECzOC_cr2pr5r95W1NTbMrK8Wc,3260 +libpasteurize/fixes/fix_newstyle.py,sha256=78sazKOHm9DUoMyW4VdvQpMXZhicbXzorVPRhBpSUrM,888 +libpasteurize/fixes/fix_next.py,sha256=VHqcyORRNVqKJ51jJ1OkhwxHuXRgp8qaldyqcMvA4J0,1233 +libpasteurize/fixes/fix_printfunction.py,sha256=NDIfqVmUJBG3H9E6nrnN0cWZK8ch9pL4F-nMexdsa38,401 +libpasteurize/fixes/fix_raise.py,sha256=zQ_AcMsGmCbtKMgrxZGcHLYNscw6tqXFvHQxgqtNbU8,1099 +libpasteurize/fixes/fix_raise_.py,sha256=9STp633frUfYASjYzqhwxx_MXePNmMhfJClowRj8FLY,1225 +libpasteurize/fixes/fix_throw.py,sha256=_ZREVre-WttUvk4sWjrqUNqm9Q1uFaATECN0_-PXKbk,835 +libpasteurize/fixes/fix_unpacking.py,sha256=eMqRe44Nfq8lo0YFL9oKW75dGARmBSmklj4BCS_q1Lo,5946 +libpasteurize/main.py,sha256=dVHYTQQeJonuOFDNrenJZl-rKHgOQKRMPP1OqnJogWQ,8186 +past/__init__.py,sha256=wIiXaAvXl3svDi-fzuy6HDD0VsuCVr4cnqnCr8XINGI,2918 +past/builtins/__init__.py,sha256=7j_4OsUlN6q2eKr14do7mRQ1GwXRoXAMUR0A1fJpAls,1805 +past/builtins/misc.py,sha256=nw62HVSxuAgT-Q2lD3lmgRB9zmFXopS14dZHEv5xpDQ,2627 +past/builtins/noniterators.py,sha256=LtdELnd7KyYdXg7GkW25cgkEPUC0ggZ5AYMtDe9N95I,9370 +past/translation/__init__.py,sha256=j2e6mLeK74KEICqH6P_-tpKqSNZoMwip2toThhSmKpU,17646 +past/types/__init__.py,sha256=RyJlgqg9uJ8oF-kJT9QlfhfdmhiMh3fShmtvd2CQycY,879 +past/types/basestring.py,sha256=qrImcr24wvdDCMvF9x0Tyx8S1lCt6GIwRvzuAmvg_Tg,728 +past/types/olddict.py,sha256=0YtffZ55VY6AyQ_rwu4DZ4vcRsp6dz-dQzczeyN8hLk,2721 +past/types/oldstr.py,sha256=J2sJPC5jWEdpqXPcFwJFNDKn51TKhi86PsLFmJtQr-M,4332 +past/utils/__init__.py,sha256=e8l1sOfdiDJ3dkckBWLNWvC1ahC5BX5haHC2TGdNgA8,2633 diff --git a/Splunk_TA_paloalto/bin/splunk_ta_paloalto/aob_py3/future-0.18.2.dist-info/WHEEL b/Splunk_TA_paloalto/bin/splunk_ta_paloalto/aob_py3/future-0.18.2.dist-info/WHEEL new file mode 100644 index 00000000..becc9a66 --- /dev/null +++ b/Splunk_TA_paloalto/bin/splunk_ta_paloalto/aob_py3/future-0.18.2.dist-info/WHEEL @@ -0,0 +1,5 @@ +Wheel-Version: 1.0 +Generator: bdist_wheel (0.37.1) +Root-Is-Purelib: true +Tag: py3-none-any + diff --git a/Splunk_TA_paloalto/bin/splunk_ta_paloalto/aob_py3/future-0.18.2.dist-info/entry_points.txt b/Splunk_TA_paloalto/bin/splunk_ta_paloalto/aob_py3/future-0.18.2.dist-info/entry_points.txt new file mode 100644 index 00000000..45d1a880 --- /dev/null +++ b/Splunk_TA_paloalto/bin/splunk_ta_paloalto/aob_py3/future-0.18.2.dist-info/entry_points.txt @@ -0,0 +1,4 @@ +[console_scripts] +futurize = libfuturize.main:main +pasteurize = libpasteurize.main:main + diff --git a/Splunk_TA_paloalto/bin/splunk_ta_paloalto/aob_py3/future-0.18.2.dist-info/top_level.txt b/Splunk_TA_paloalto/bin/splunk_ta_paloalto/aob_py3/future-0.18.2.dist-info/top_level.txt new file mode 100644 index 00000000..58f5843c --- /dev/null +++ b/Splunk_TA_paloalto/bin/splunk_ta_paloalto/aob_py3/future-0.18.2.dist-info/top_level.txt @@ -0,0 +1,4 @@ +future +libfuturize +libpasteurize +past diff --git a/Splunk_TA_paloalto/bin/splunk_ta_paloalto/aob_py3/future/__init__.py b/Splunk_TA_paloalto/bin/splunk_ta_paloalto/aob_py3/future/__init__.py old mode 100755 new mode 100644 index f7a6fbeb..ad419d67 --- a/Splunk_TA_paloalto/bin/splunk_ta_paloalto/aob_py3/future/__init__.py +++ b/Splunk_TA_paloalto/bin/splunk_ta_paloalto/aob_py3/future/__init__.py @@ -68,7 +68,7 @@ Credits ------- -:Author: Ed Schofield +:Author: Ed Schofield, Jordan M. Adler, et al :Sponsor: Python Charmers Pty Ltd, Australia, and Python Charmers Pte Ltd, Singapore. http://pythoncharmers.com :Others: See docs/credits.rst or http://python-future.org/credits.html @@ -76,7 +76,7 @@ Licensing --------- -Copyright 2013-2018 Python Charmers Pty Ltd, Australia. +Copyright 2013-2019 Python Charmers Pty Ltd, Australia. The software is distributed under an MIT licence. See LICENSE.txt. """ @@ -84,10 +84,10 @@ __title__ = 'future' __author__ = 'Ed Schofield' __license__ = 'MIT' -__copyright__ = 'Copyright 2013-2018 Python Charmers Pty Ltd' +__copyright__ = 'Copyright 2013-2019 Python Charmers Pty Ltd' __ver_major__ = 0 -__ver_minor__ = 17 -__ver_patch__ = 1 +__ver_minor__ = 18 +__ver_patch__ = 2 __ver_sub__ = '' __version__ = "%d.%d.%d%s" % (__ver_major__, __ver_minor__, __ver_patch__, __ver_sub__) diff --git a/Splunk_TA_paloalto/bin/splunk_ta_paloalto/aob_py3/future/backports/__init__.py b/Splunk_TA_paloalto/bin/splunk_ta_paloalto/aob_py3/future/backports/__init__.py old mode 100755 new mode 100644 index 68291141..c71e0653 --- a/Splunk_TA_paloalto/bin/splunk_ta_paloalto/aob_py3/future/backports/__init__.py +++ b/Splunk_TA_paloalto/bin/splunk_ta_paloalto/aob_py3/future/backports/__init__.py @@ -10,7 +10,7 @@ from future.standard_library import import_top_level_modules -if sys.version_info[0] == 3: +if sys.version_info[0] >= 3: import_top_level_modules() diff --git a/Splunk_TA_paloalto/bin/splunk_ta_paloalto/aob_py3/future/backports/_markupbase.py b/Splunk_TA_paloalto/bin/splunk_ta_paloalto/aob_py3/future/backports/_markupbase.py old mode 100755 new mode 100644 diff --git a/Splunk_TA_paloalto/bin/splunk_ta_paloalto/aob_py3/future/backports/datetime.py b/Splunk_TA_paloalto/bin/splunk_ta_paloalto/aob_py3/future/backports/datetime.py old mode 100755 new mode 100644 diff --git a/Splunk_TA_paloalto/bin/splunk_ta_paloalto/aob_py3/future/backports/email/__init__.py b/Splunk_TA_paloalto/bin/splunk_ta_paloalto/aob_py3/future/backports/email/__init__.py old mode 100755 new mode 100644 diff --git a/Splunk_TA_paloalto/bin/splunk_ta_paloalto/aob_py3/future/backports/email/_encoded_words.py b/Splunk_TA_paloalto/bin/splunk_ta_paloalto/aob_py3/future/backports/email/_encoded_words.py old mode 100755 new mode 100644 diff --git a/Splunk_TA_paloalto/bin/splunk_ta_paloalto/aob_py3/future/backports/email/_header_value_parser.py b/Splunk_TA_paloalto/bin/splunk_ta_paloalto/aob_py3/future/backports/email/_header_value_parser.py old mode 100755 new mode 100644 diff --git a/Splunk_TA_paloalto/bin/splunk_ta_paloalto/aob_py3/future/backports/email/_parseaddr.py b/Splunk_TA_paloalto/bin/splunk_ta_paloalto/aob_py3/future/backports/email/_parseaddr.py old mode 100755 new mode 100644 diff --git a/Splunk_TA_paloalto/bin/splunk_ta_paloalto/aob_py3/future/backports/email/_policybase.py b/Splunk_TA_paloalto/bin/splunk_ta_paloalto/aob_py3/future/backports/email/_policybase.py old mode 100755 new mode 100644 diff --git a/Splunk_TA_paloalto/bin/splunk_ta_paloalto/aob_py3/future/backports/email/base64mime.py b/Splunk_TA_paloalto/bin/splunk_ta_paloalto/aob_py3/future/backports/email/base64mime.py old mode 100755 new mode 100644 diff --git a/Splunk_TA_paloalto/bin/splunk_ta_paloalto/aob_py3/future/backports/email/charset.py b/Splunk_TA_paloalto/bin/splunk_ta_paloalto/aob_py3/future/backports/email/charset.py old mode 100755 new mode 100644 diff --git a/Splunk_TA_paloalto/bin/splunk_ta_paloalto/aob_py3/future/backports/email/encoders.py b/Splunk_TA_paloalto/bin/splunk_ta_paloalto/aob_py3/future/backports/email/encoders.py old mode 100755 new mode 100644 diff --git a/Splunk_TA_paloalto/bin/splunk_ta_paloalto/aob_py3/future/backports/email/errors.py b/Splunk_TA_paloalto/bin/splunk_ta_paloalto/aob_py3/future/backports/email/errors.py old mode 100755 new mode 100644 diff --git a/Splunk_TA_paloalto/bin/splunk_ta_paloalto/aob_py3/future/backports/email/feedparser.py b/Splunk_TA_paloalto/bin/splunk_ta_paloalto/aob_py3/future/backports/email/feedparser.py old mode 100755 new mode 100644 diff --git a/Splunk_TA_paloalto/bin/splunk_ta_paloalto/aob_py3/future/backports/email/generator.py b/Splunk_TA_paloalto/bin/splunk_ta_paloalto/aob_py3/future/backports/email/generator.py old mode 100755 new mode 100644 diff --git a/Splunk_TA_paloalto/bin/splunk_ta_paloalto/aob_py3/future/backports/email/header.py b/Splunk_TA_paloalto/bin/splunk_ta_paloalto/aob_py3/future/backports/email/header.py old mode 100755 new mode 100644 diff --git a/Splunk_TA_paloalto/bin/splunk_ta_paloalto/aob_py3/future/backports/email/headerregistry.py b/Splunk_TA_paloalto/bin/splunk_ta_paloalto/aob_py3/future/backports/email/headerregistry.py old mode 100755 new mode 100644 diff --git a/Splunk_TA_paloalto/bin/splunk_ta_paloalto/aob_py3/future/backports/email/iterators.py b/Splunk_TA_paloalto/bin/splunk_ta_paloalto/aob_py3/future/backports/email/iterators.py old mode 100755 new mode 100644 diff --git a/Splunk_TA_paloalto/bin/splunk_ta_paloalto/aob_py3/future/backports/email/message.py b/Splunk_TA_paloalto/bin/splunk_ta_paloalto/aob_py3/future/backports/email/message.py old mode 100755 new mode 100644 index 99715fcc..d8d9615d --- a/Splunk_TA_paloalto/bin/splunk_ta_paloalto/aob_py3/future/backports/email/message.py +++ b/Splunk_TA_paloalto/bin/splunk_ta_paloalto/aob_py3/future/backports/email/message.py @@ -800,7 +800,7 @@ def set_boundary(self, boundary): # There was no Content-Type header, and we don't know what type # to set it to, so raise an exception. raise errors.HeaderParseError('No Content-Type header found') - newparams = [] + newparams = list() foundp = False for pk, pv in params: if pk.lower() == 'boundary': @@ -814,10 +814,10 @@ def set_boundary(self, boundary): # instead??? newparams.append(('boundary', '"%s"' % boundary)) # Replace the existing Content-Type header with the new value - newheaders = [] + newheaders = list() for h, v in self._headers: if h.lower() == 'content-type': - parts = [] + parts = list() for k, v in newparams: if v == '': parts.append(k) diff --git a/Splunk_TA_paloalto/bin/splunk_ta_paloalto/aob_py3/future/backports/email/mime/__init__.py b/Splunk_TA_paloalto/bin/splunk_ta_paloalto/aob_py3/future/backports/email/mime/__init__.py old mode 100755 new mode 100644 diff --git a/Splunk_TA_paloalto/bin/splunk_ta_paloalto/aob_py3/future/backports/email/mime/application.py b/Splunk_TA_paloalto/bin/splunk_ta_paloalto/aob_py3/future/backports/email/mime/application.py old mode 100755 new mode 100644 diff --git a/Splunk_TA_paloalto/bin/splunk_ta_paloalto/aob_py3/future/backports/email/mime/audio.py b/Splunk_TA_paloalto/bin/splunk_ta_paloalto/aob_py3/future/backports/email/mime/audio.py old mode 100755 new mode 100644 diff --git a/Splunk_TA_paloalto/bin/splunk_ta_paloalto/aob_py3/future/backports/email/mime/base.py b/Splunk_TA_paloalto/bin/splunk_ta_paloalto/aob_py3/future/backports/email/mime/base.py old mode 100755 new mode 100644 diff --git a/Splunk_TA_paloalto/bin/splunk_ta_paloalto/aob_py3/future/backports/email/mime/image.py b/Splunk_TA_paloalto/bin/splunk_ta_paloalto/aob_py3/future/backports/email/mime/image.py old mode 100755 new mode 100644 diff --git a/Splunk_TA_paloalto/bin/splunk_ta_paloalto/aob_py3/future/backports/email/mime/message.py b/Splunk_TA_paloalto/bin/splunk_ta_paloalto/aob_py3/future/backports/email/mime/message.py old mode 100755 new mode 100644 diff --git a/Splunk_TA_paloalto/bin/splunk_ta_paloalto/aob_py3/future/backports/email/mime/multipart.py b/Splunk_TA_paloalto/bin/splunk_ta_paloalto/aob_py3/future/backports/email/mime/multipart.py old mode 100755 new mode 100644 diff --git a/Splunk_TA_paloalto/bin/splunk_ta_paloalto/aob_py3/future/backports/email/mime/nonmultipart.py b/Splunk_TA_paloalto/bin/splunk_ta_paloalto/aob_py3/future/backports/email/mime/nonmultipart.py old mode 100755 new mode 100644 diff --git a/Splunk_TA_paloalto/bin/splunk_ta_paloalto/aob_py3/future/backports/email/mime/text.py b/Splunk_TA_paloalto/bin/splunk_ta_paloalto/aob_py3/future/backports/email/mime/text.py old mode 100755 new mode 100644 diff --git a/Splunk_TA_paloalto/bin/splunk_ta_paloalto/aob_py3/future/backports/email/parser.py b/Splunk_TA_paloalto/bin/splunk_ta_paloalto/aob_py3/future/backports/email/parser.py old mode 100755 new mode 100644 diff --git a/Splunk_TA_paloalto/bin/splunk_ta_paloalto/aob_py3/future/backports/email/policy.py b/Splunk_TA_paloalto/bin/splunk_ta_paloalto/aob_py3/future/backports/email/policy.py old mode 100755 new mode 100644 diff --git a/Splunk_TA_paloalto/bin/splunk_ta_paloalto/aob_py3/future/backports/email/quoprimime.py b/Splunk_TA_paloalto/bin/splunk_ta_paloalto/aob_py3/future/backports/email/quoprimime.py old mode 100755 new mode 100644 diff --git a/Splunk_TA_paloalto/bin/splunk_ta_paloalto/aob_py3/future/backports/email/utils.py b/Splunk_TA_paloalto/bin/splunk_ta_paloalto/aob_py3/future/backports/email/utils.py old mode 100755 new mode 100644 diff --git a/Splunk_TA_paloalto/bin/splunk_ta_paloalto/aob_py3/future/backports/html/__init__.py b/Splunk_TA_paloalto/bin/splunk_ta_paloalto/aob_py3/future/backports/html/__init__.py old mode 100755 new mode 100644 diff --git a/Splunk_TA_paloalto/bin/splunk_ta_paloalto/aob_py3/future/backports/html/entities.py b/Splunk_TA_paloalto/bin/splunk_ta_paloalto/aob_py3/future/backports/html/entities.py old mode 100755 new mode 100644 diff --git a/Splunk_TA_paloalto/bin/splunk_ta_paloalto/aob_py3/future/backports/html/parser.py b/Splunk_TA_paloalto/bin/splunk_ta_paloalto/aob_py3/future/backports/html/parser.py old mode 100755 new mode 100644 diff --git a/Splunk_TA_paloalto/bin/splunk_ta_paloalto/aob_py3/future/backports/http/__init__.py b/Splunk_TA_paloalto/bin/splunk_ta_paloalto/aob_py3/future/backports/http/__init__.py old mode 100755 new mode 100644 diff --git a/Splunk_TA_paloalto/bin/splunk_ta_paloalto/aob_py3/future/backports/http/client.py b/Splunk_TA_paloalto/bin/splunk_ta_paloalto/aob_py3/future/backports/http/client.py old mode 100755 new mode 100644 index 5dd983d8..e663d125 --- a/Splunk_TA_paloalto/bin/splunk_ta_paloalto/aob_py3/future/backports/http/client.py +++ b/Splunk_TA_paloalto/bin/splunk_ta_paloalto/aob_py3/future/backports/http/client.py @@ -79,11 +79,15 @@ import io import os import socket -import collections from future.backports.urllib.parse import urlsplit import warnings from array import array +if PY2: + from collections import Iterable +else: + from collections.abc import Iterable + __all__ = ["HTTPResponse", "HTTPConnection", "HTTPException", "NotConnected", "UnknownProtocol", "UnknownTransferEncoding", "UnimplementedFileMode", @@ -696,9 +700,19 @@ def _safe_readinto(self, b): while total_bytes < len(b): if MAXAMOUNT < len(mvb): temp_mvb = mvb[0:MAXAMOUNT] - n = self.fp.readinto(temp_mvb) + if PY2: + data = self.fp.read(len(temp_mvb)) + n = len(data) + temp_mvb[:n] = data + else: + n = self.fp.readinto(temp_mvb) else: - n = self.fp.readinto(mvb) + if PY2: + data = self.fp.read(len(mvb)) + n = len(data) + mvb[:n] = data + else: + n = self.fp.readinto(mvb) if not n: raise IncompleteRead(bytes(mvb[0:total_bytes]), len(b)) mvb = mvb[n:] @@ -892,7 +906,7 @@ def send(self, data): try: self.sock.sendall(data) except TypeError: - if isinstance(data, collections.Iterable): + if isinstance(data, Iterable): for d in data: self.sock.sendall(d) else: diff --git a/Splunk_TA_paloalto/bin/splunk_ta_paloalto/aob_py3/future/backports/http/cookiejar.py b/Splunk_TA_paloalto/bin/splunk_ta_paloalto/aob_py3/future/backports/http/cookiejar.py old mode 100755 new mode 100644 index cad72f9b..af3ef415 --- a/Splunk_TA_paloalto/bin/splunk_ta_paloalto/aob_py3/future/backports/http/cookiejar.py +++ b/Splunk_TA_paloalto/bin/splunk_ta_paloalto/aob_py3/future/backports/http/cookiejar.py @@ -33,7 +33,7 @@ from __future__ import division from __future__ import absolute_import from future.builtins import filter, int, map, open, str -from future.utils import as_native_str +from future.utils import as_native_str, PY2 __all__ = ['Cookie', 'CookieJar', 'CookiePolicy', 'DefaultCookiePolicy', 'FileCookieJar', 'LWPCookieJar', 'LoadError', 'MozillaCookieJar'] @@ -41,7 +41,8 @@ import copy import datetime import re -re.ASCII = 0 +if PY2: + re.ASCII = 0 import time from future.backports.urllib.parse import urlparse, urlsplit, quote from future.backports.http.client import HTTP_PORT diff --git a/Splunk_TA_paloalto/bin/splunk_ta_paloalto/aob_py3/future/backports/http/cookies.py b/Splunk_TA_paloalto/bin/splunk_ta_paloalto/aob_py3/future/backports/http/cookies.py old mode 100755 new mode 100644 index ae32ed7e..8bb61e22 --- a/Splunk_TA_paloalto/bin/splunk_ta_paloalto/aob_py3/future/backports/http/cookies.py +++ b/Splunk_TA_paloalto/bin/splunk_ta_paloalto/aob_py3/future/backports/http/cookies.py @@ -138,7 +138,8 @@ # Import our required modules # import re -re.ASCII = 0 # for py2 compatibility +if PY2: + re.ASCII = 0 # for py2 compatibility import string __all__ = ["CookieError", "BaseCookie", "SimpleCookie"] diff --git a/Splunk_TA_paloalto/bin/splunk_ta_paloalto/aob_py3/future/backports/http/server.py b/Splunk_TA_paloalto/bin/splunk_ta_paloalto/aob_py3/future/backports/http/server.py old mode 100755 new mode 100644 diff --git a/Splunk_TA_paloalto/bin/splunk_ta_paloalto/aob_py3/future/backports/misc.py b/Splunk_TA_paloalto/bin/splunk_ta_paloalto/aob_py3/future/backports/misc.py old mode 100755 new mode 100644 index ef752078..098a0667 --- a/Splunk_TA_paloalto/bin/splunk_ta_paloalto/aob_py3/future/backports/misc.py +++ b/Splunk_TA_paloalto/bin/splunk_ta_paloalto/aob_py3/future/backports/misc.py @@ -16,7 +16,6 @@ import subprocess from math import ceil as oldceil -from collections import Mapping, MutableMapping from operator import itemgetter as _itemgetter, eq as _eq import sys @@ -25,7 +24,12 @@ from itertools import repeat as _repeat, chain as _chain, starmap as _starmap from socket import getaddrinfo, SOCK_STREAM, error, socket -from future.utils import iteritems, itervalues, PY26, PY3 +from future.utils import iteritems, itervalues, PY2, PY26, PY3 + +if PY2: + from collections import Mapping, MutableMapping +else: + from collections.abc import Mapping, MutableMapping def ceil(x): diff --git a/Splunk_TA_paloalto/bin/splunk_ta_paloalto/aob_py3/future/backports/socket.py b/Splunk_TA_paloalto/bin/splunk_ta_paloalto/aob_py3/future/backports/socket.py old mode 100755 new mode 100644 diff --git a/Splunk_TA_paloalto/bin/splunk_ta_paloalto/aob_py3/future/backports/socketserver.py b/Splunk_TA_paloalto/bin/splunk_ta_paloalto/aob_py3/future/backports/socketserver.py old mode 100755 new mode 100644 diff --git a/Splunk_TA_paloalto/bin/splunk_ta_paloalto/aob_py3/future/backports/total_ordering.py b/Splunk_TA_paloalto/bin/splunk_ta_paloalto/aob_py3/future/backports/total_ordering.py old mode 100755 new mode 100644 diff --git a/Splunk_TA_paloalto/bin/splunk_ta_paloalto/aob_py3/future/backports/urllib/__init__.py b/Splunk_TA_paloalto/bin/splunk_ta_paloalto/aob_py3/future/backports/urllib/__init__.py old mode 100755 new mode 100644 diff --git a/Splunk_TA_paloalto/bin/splunk_ta_paloalto/aob_py3/future/backports/urllib/error.py b/Splunk_TA_paloalto/bin/splunk_ta_paloalto/aob_py3/future/backports/urllib/error.py old mode 100755 new mode 100644 diff --git a/Splunk_TA_paloalto/bin/splunk_ta_paloalto/aob_py3/future/backports/urllib/parse.py b/Splunk_TA_paloalto/bin/splunk_ta_paloalto/aob_py3/future/backports/urllib/parse.py old mode 100755 new mode 100644 diff --git a/Splunk_TA_paloalto/bin/splunk_ta_paloalto/aob_py3/future/backports/urllib/request.py b/Splunk_TA_paloalto/bin/splunk_ta_paloalto/aob_py3/future/backports/urllib/request.py old mode 100755 new mode 100644 index b1545ca0..baee5401 --- a/Splunk_TA_paloalto/bin/splunk_ta_paloalto/aob_py3/future/backports/urllib/request.py +++ b/Splunk_TA_paloalto/bin/splunk_ta_paloalto/aob_py3/future/backports/urllib/request.py @@ -109,11 +109,17 @@ import socket import sys import time -import collections import tempfile import contextlib import warnings +from future.utils import PY2 + +if PY2: + from collections import Iterable +else: + from collections.abc import Iterable + # check for SSL try: import ssl @@ -1221,7 +1227,7 @@ def do_request_(self, request): mv = memoryview(data) size = len(mv) * mv.itemsize except TypeError: - if isinstance(data, collections.Iterable): + if isinstance(data, Iterable): raise ValueError("Content-Length should be specified " "for iterable data of type %r %r" % (type(data), data)) diff --git a/Splunk_TA_paloalto/bin/splunk_ta_paloalto/aob_py3/future/backports/urllib/response.py b/Splunk_TA_paloalto/bin/splunk_ta_paloalto/aob_py3/future/backports/urllib/response.py old mode 100755 new mode 100644 diff --git a/Splunk_TA_paloalto/bin/splunk_ta_paloalto/aob_py3/future/backports/urllib/robotparser.py b/Splunk_TA_paloalto/bin/splunk_ta_paloalto/aob_py3/future/backports/urllib/robotparser.py old mode 100755 new mode 100644 diff --git a/Splunk_TA_paloalto/bin/splunk_ta_paloalto/aob_py3/future/backports/xmlrpc/__init__.py b/Splunk_TA_paloalto/bin/splunk_ta_paloalto/aob_py3/future/backports/xmlrpc/__init__.py old mode 100755 new mode 100644 diff --git a/Splunk_TA_paloalto/bin/splunk_ta_paloalto/aob_py3/future/backports/xmlrpc/client.py b/Splunk_TA_paloalto/bin/splunk_ta_paloalto/aob_py3/future/backports/xmlrpc/client.py old mode 100755 new mode 100644 diff --git a/Splunk_TA_paloalto/bin/splunk_ta_paloalto/aob_py3/future/backports/xmlrpc/server.py b/Splunk_TA_paloalto/bin/splunk_ta_paloalto/aob_py3/future/backports/xmlrpc/server.py old mode 100755 new mode 100644 diff --git a/Splunk_TA_paloalto/bin/splunk_ta_paloalto/aob_py3/future/builtins/__init__.py b/Splunk_TA_paloalto/bin/splunk_ta_paloalto/aob_py3/future/builtins/__init__.py old mode 100755 new mode 100644 index 216465a1..8bc1649d --- a/Splunk_TA_paloalto/bin/splunk_ta_paloalto/aob_py3/future/builtins/__init__.py +++ b/Splunk_TA_paloalto/bin/splunk_ta_paloalto/aob_py3/future/builtins/__init__.py @@ -11,7 +11,7 @@ # The isinstance import is no longer needed. We provide it only for # backward-compatibility with future v0.8.2. It will be removed in future v1.0. from future.builtins.misc import (ascii, chr, hex, input, isinstance, next, - oct, open, pow, round, super) + oct, open, pow, round, super, max, min) from future.utils import PY3 if PY3: @@ -43,7 +43,7 @@ __all__ = ['filter', 'map', 'zip', 'ascii', 'chr', 'hex', 'input', 'next', 'oct', 'open', 'pow', 'round', 'super', - 'bytes', 'dict', 'int', 'list', 'object', 'range', 'str', + 'bytes', 'dict', 'int', 'list', 'object', 'range', 'str', 'max', 'min' ] else: diff --git a/Splunk_TA_paloalto/bin/splunk_ta_paloalto/aob_py3/future/builtins/disabled.py b/Splunk_TA_paloalto/bin/splunk_ta_paloalto/aob_py3/future/builtins/disabled.py old mode 100755 new mode 100644 diff --git a/Splunk_TA_paloalto/bin/splunk_ta_paloalto/aob_py3/future/builtins/iterators.py b/Splunk_TA_paloalto/bin/splunk_ta_paloalto/aob_py3/future/builtins/iterators.py old mode 100755 new mode 100644 diff --git a/Splunk_TA_paloalto/bin/splunk_ta_paloalto/aob_py3/future/builtins/misc.py b/Splunk_TA_paloalto/bin/splunk_ta_paloalto/aob_py3/future/builtins/misc.py old mode 100755 new mode 100644 index 90dc384a..f86ce5f3 --- a/Splunk_TA_paloalto/bin/splunk_ta_paloalto/aob_py3/future/builtins/misc.py +++ b/Splunk_TA_paloalto/bin/splunk_ta_paloalto/aob_py3/future/builtins/misc.py @@ -13,6 +13,8 @@ - ``open`` (equivalent to io.open on Py2) - ``super`` (backport of Py3's magic zero-argument super() function - ``round`` (new "Banker's Rounding" behaviour from Py3) +- ``max`` (new default option from Py3.4) +- ``min`` (new default option from Py3.4) ``isinstance`` is also currently exported for backwards compatibility with v0.8.2, although this has been deprecated since v0.9. @@ -59,6 +61,8 @@ from future.builtins.newnext import newnext as next from future.builtins.newround import newround as round from future.builtins.newsuper import newsuper as super + from future.builtins.new_min_max import newmax as max + from future.builtins.new_min_max import newmin as min from future.types.newint import newint _SENTINEL = object() @@ -89,11 +93,12 @@ def pow(x, y, z=_SENTINEL): else: return _builtin_pow(x+0j, y, z) + # ``future`` doesn't support Py3.0/3.1. If we ever did, we'd add this: # callable = __builtin__.callable __all__ = ['ascii', 'chr', 'hex', 'input', 'isinstance', 'next', 'oct', - 'open', 'pow', 'round', 'super'] + 'open', 'pow', 'round', 'super', 'max', 'min'] else: import builtins @@ -109,8 +114,14 @@ def pow(x, y, z=_SENTINEL): pow = builtins.pow round = builtins.round super = builtins.super - - __all__ = [] + if utils.PY34_PLUS: + max = builtins.max + min = builtins.min + __all__ = [] + else: + from future.builtins.new_min_max import newmax as max + from future.builtins.new_min_max import newmin as min + __all__ = ['min', 'max'] # The callable() function was removed from Py3.0 and 3.1 and # reintroduced into Py3.2+. ``future`` doesn't support Py3.0/3.1. If we ever diff --git a/Splunk_TA_paloalto/bin/splunk_ta_paloalto/aob_py3/future/builtins/new_min_max.py b/Splunk_TA_paloalto/bin/splunk_ta_paloalto/aob_py3/future/builtins/new_min_max.py new file mode 100644 index 00000000..6f0c2a86 --- /dev/null +++ b/Splunk_TA_paloalto/bin/splunk_ta_paloalto/aob_py3/future/builtins/new_min_max.py @@ -0,0 +1,59 @@ +import itertools + +from future import utils +if utils.PY2: + from __builtin__ import max as _builtin_max, min as _builtin_min +else: + from builtins import max as _builtin_max, min as _builtin_min + +_SENTINEL = object() + + +def newmin(*args, **kwargs): + return new_min_max(_builtin_min, *args, **kwargs) + + +def newmax(*args, **kwargs): + return new_min_max(_builtin_max, *args, **kwargs) + + +def new_min_max(_builtin_func, *args, **kwargs): + """ + To support the argument "default" introduced in python 3.4 for min and max + :param _builtin_func: builtin min or builtin max + :param args: + :param kwargs: + :return: returns the min or max based on the arguments passed + """ + + for key, _ in kwargs.items(): + if key not in set(['key', 'default']): + raise TypeError('Illegal argument %s', key) + + if len(args) == 0: + raise TypeError + + if len(args) != 1 and kwargs.get('default', _SENTINEL) is not _SENTINEL: + raise TypeError + + if len(args) == 1: + iterator = iter(args[0]) + try: + first = next(iterator) + except StopIteration: + if kwargs.get('default', _SENTINEL) is not _SENTINEL: + return kwargs.get('default') + else: + raise ValueError('{}() arg is an empty sequence'.format(_builtin_func.__name__)) + else: + iterator = itertools.chain([first], iterator) + if kwargs.get('key') is not None: + return _builtin_func(iterator, key=kwargs.get('key')) + else: + return _builtin_func(iterator) + + if len(args) > 1: + if kwargs.get('key') is not None: + return _builtin_func(args, key=kwargs.get('key')) + else: + return _builtin_func(args) diff --git a/Splunk_TA_paloalto/bin/splunk_ta_paloalto/aob_py3/future/builtins/newnext.py b/Splunk_TA_paloalto/bin/splunk_ta_paloalto/aob_py3/future/builtins/newnext.py old mode 100755 new mode 100644 diff --git a/Splunk_TA_paloalto/bin/splunk_ta_paloalto/aob_py3/future/builtins/newround.py b/Splunk_TA_paloalto/bin/splunk_ta_paloalto/aob_py3/future/builtins/newround.py old mode 100755 new mode 100644 index 3943ebb6..394a2c63 --- a/Splunk_TA_paloalto/bin/splunk_ta_paloalto/aob_py3/future/builtins/newround.py +++ b/Splunk_TA_paloalto/bin/splunk_ta_paloalto/aob_py3/future/builtins/newround.py @@ -38,11 +38,14 @@ def newround(number, ndigits=None): if 'numpy' in repr(type(number)): number = float(number) - if not PY26: - d = Decimal.from_float(number).quantize(exponent, - rounding=ROUND_HALF_EVEN) + if isinstance(number, Decimal): + d = number else: - d = from_float_26(number).quantize(exponent, rounding=ROUND_HALF_EVEN) + if not PY26: + d = Decimal.from_float(number).quantize(exponent, + rounding=ROUND_HALF_EVEN) + else: + d = from_float_26(number).quantize(exponent, rounding=ROUND_HALF_EVEN) if return_int: return int(d) diff --git a/Splunk_TA_paloalto/bin/splunk_ta_paloalto/aob_py3/future/builtins/newsuper.py b/Splunk_TA_paloalto/bin/splunk_ta_paloalto/aob_py3/future/builtins/newsuper.py old mode 100755 new mode 100644 diff --git a/Splunk_TA_paloalto/bin/splunk_ta_paloalto/aob_py3/future/moves/__init__.py b/Splunk_TA_paloalto/bin/splunk_ta_paloalto/aob_py3/future/moves/__init__.py old mode 100755 new mode 100644 index 040fdcf0..0cd60d3d --- a/Splunk_TA_paloalto/bin/splunk_ta_paloalto/aob_py3/future/moves/__init__.py +++ b/Splunk_TA_paloalto/bin/splunk_ta_paloalto/aob_py3/future/moves/__init__.py @@ -4,5 +4,5 @@ __future_module__ = True from future.standard_library import import_top_level_modules -if sys.version_info[0] == 3: +if sys.version_info[0] >= 3: import_top_level_modules() diff --git a/Splunk_TA_paloalto/bin/splunk_ta_paloalto/aob_py3/future/moves/_dummy_thread.py b/Splunk_TA_paloalto/bin/splunk_ta_paloalto/aob_py3/future/moves/_dummy_thread.py old mode 100755 new mode 100644 diff --git a/Splunk_TA_paloalto/bin/splunk_ta_paloalto/aob_py3/future/moves/_markupbase.py b/Splunk_TA_paloalto/bin/splunk_ta_paloalto/aob_py3/future/moves/_markupbase.py old mode 100755 new mode 100644 diff --git a/Splunk_TA_paloalto/bin/splunk_ta_paloalto/aob_py3/future/moves/_thread.py b/Splunk_TA_paloalto/bin/splunk_ta_paloalto/aob_py3/future/moves/_thread.py old mode 100755 new mode 100644 diff --git a/Splunk_TA_paloalto/bin/splunk_ta_paloalto/aob_py3/future/moves/builtins.py b/Splunk_TA_paloalto/bin/splunk_ta_paloalto/aob_py3/future/moves/builtins.py old mode 100755 new mode 100644 diff --git a/Splunk_TA_paloalto/bin/splunk_ta_paloalto/aob_py3/future/moves/collections.py b/Splunk_TA_paloalto/bin/splunk_ta_paloalto/aob_py3/future/moves/collections.py old mode 100755 new mode 100644 diff --git a/Splunk_TA_paloalto/bin/splunk_ta_paloalto/aob_py3/future/moves/configparser.py b/Splunk_TA_paloalto/bin/splunk_ta_paloalto/aob_py3/future/moves/configparser.py old mode 100755 new mode 100644 diff --git a/Splunk_TA_paloalto/bin/splunk_ta_paloalto/aob_py3/future/moves/copyreg.py b/Splunk_TA_paloalto/bin/splunk_ta_paloalto/aob_py3/future/moves/copyreg.py old mode 100755 new mode 100644 index 21c7a42f..9d08cdc5 --- a/Splunk_TA_paloalto/bin/splunk_ta_paloalto/aob_py3/future/moves/copyreg.py +++ b/Splunk_TA_paloalto/bin/splunk_ta_paloalto/aob_py3/future/moves/copyreg.py @@ -2,7 +2,11 @@ from future.utils import PY3 if PY3: - from copyreg import * + import copyreg, sys + # A "*" import uses Python 3's copyreg.__all__ which does not include + # all public names in the API surface for copyreg, this avoids that + # problem by just making our module _be_ a reference to the actual module. + sys.modules['future.moves.copyreg'] = copyreg else: __future_module__ = True from copy_reg import * diff --git a/Splunk_TA_paloalto/bin/splunk_ta_paloalto/aob_py3/future/moves/dbm/__init__.py b/Splunk_TA_paloalto/bin/splunk_ta_paloalto/aob_py3/future/moves/dbm/__init__.py old mode 100755 new mode 100644 diff --git a/Splunk_TA_paloalto/bin/splunk_ta_paloalto/aob_py3/future/moves/dbm/dumb.py b/Splunk_TA_paloalto/bin/splunk_ta_paloalto/aob_py3/future/moves/dbm/dumb.py old mode 100755 new mode 100644 diff --git a/Splunk_TA_paloalto/bin/splunk_ta_paloalto/aob_py3/future/moves/dbm/gnu.py b/Splunk_TA_paloalto/bin/splunk_ta_paloalto/aob_py3/future/moves/dbm/gnu.py old mode 100755 new mode 100644 diff --git a/Splunk_TA_paloalto/bin/splunk_ta_paloalto/aob_py3/future/moves/dbm/ndbm.py b/Splunk_TA_paloalto/bin/splunk_ta_paloalto/aob_py3/future/moves/dbm/ndbm.py old mode 100755 new mode 100644 diff --git a/Splunk_TA_paloalto/bin/splunk_ta_paloalto/aob_py3/future/moves/html/__init__.py b/Splunk_TA_paloalto/bin/splunk_ta_paloalto/aob_py3/future/moves/html/__init__.py old mode 100755 new mode 100644 diff --git a/Splunk_TA_paloalto/bin/splunk_ta_paloalto/aob_py3/future/moves/html/entities.py b/Splunk_TA_paloalto/bin/splunk_ta_paloalto/aob_py3/future/moves/html/entities.py old mode 100755 new mode 100644 diff --git a/Splunk_TA_paloalto/bin/splunk_ta_paloalto/aob_py3/future/moves/html/parser.py b/Splunk_TA_paloalto/bin/splunk_ta_paloalto/aob_py3/future/moves/html/parser.py old mode 100755 new mode 100644 diff --git a/Splunk_TA_paloalto/bin/splunk_ta_paloalto/aob_py3/future/moves/http/__init__.py b/Splunk_TA_paloalto/bin/splunk_ta_paloalto/aob_py3/future/moves/http/__init__.py old mode 100755 new mode 100644 diff --git a/Splunk_TA_paloalto/bin/splunk_ta_paloalto/aob_py3/future/moves/http/client.py b/Splunk_TA_paloalto/bin/splunk_ta_paloalto/aob_py3/future/moves/http/client.py old mode 100755 new mode 100644 diff --git a/Splunk_TA_paloalto/bin/splunk_ta_paloalto/aob_py3/future/moves/http/cookiejar.py b/Splunk_TA_paloalto/bin/splunk_ta_paloalto/aob_py3/future/moves/http/cookiejar.py old mode 100755 new mode 100644 diff --git a/Splunk_TA_paloalto/bin/splunk_ta_paloalto/aob_py3/future/moves/http/cookies.py b/Splunk_TA_paloalto/bin/splunk_ta_paloalto/aob_py3/future/moves/http/cookies.py old mode 100755 new mode 100644 diff --git a/Splunk_TA_paloalto/bin/splunk_ta_paloalto/aob_py3/future/moves/http/server.py b/Splunk_TA_paloalto/bin/splunk_ta_paloalto/aob_py3/future/moves/http/server.py old mode 100755 new mode 100644 diff --git a/Splunk_TA_paloalto/bin/splunk_ta_paloalto/aob_py3/future/moves/itertools.py b/Splunk_TA_paloalto/bin/splunk_ta_paloalto/aob_py3/future/moves/itertools.py old mode 100755 new mode 100644 diff --git a/Splunk_TA_paloalto/bin/splunk_ta_paloalto/aob_py3/future/moves/pickle.py b/Splunk_TA_paloalto/bin/splunk_ta_paloalto/aob_py3/future/moves/pickle.py old mode 100755 new mode 100644 diff --git a/Splunk_TA_paloalto/bin/splunk_ta_paloalto/aob_py3/future/moves/queue.py b/Splunk_TA_paloalto/bin/splunk_ta_paloalto/aob_py3/future/moves/queue.py old mode 100755 new mode 100644 diff --git a/Splunk_TA_paloalto/bin/splunk_ta_paloalto/aob_py3/future/moves/reprlib.py b/Splunk_TA_paloalto/bin/splunk_ta_paloalto/aob_py3/future/moves/reprlib.py old mode 100755 new mode 100644 diff --git a/Splunk_TA_paloalto/bin/splunk_ta_paloalto/aob_py3/future/moves/socketserver.py b/Splunk_TA_paloalto/bin/splunk_ta_paloalto/aob_py3/future/moves/socketserver.py old mode 100755 new mode 100644 diff --git a/Splunk_TA_paloalto/bin/splunk_ta_paloalto/aob_py3/future/moves/subprocess.py b/Splunk_TA_paloalto/bin/splunk_ta_paloalto/aob_py3/future/moves/subprocess.py old mode 100755 new mode 100644 diff --git a/Splunk_TA_paloalto/bin/splunk_ta_paloalto/aob_py3/future/moves/sys.py b/Splunk_TA_paloalto/bin/splunk_ta_paloalto/aob_py3/future/moves/sys.py old mode 100755 new mode 100644 diff --git a/Splunk_TA_paloalto/bin/splunk_ta_paloalto/aob_py3/future/moves/tkinter/__init__.py b/Splunk_TA_paloalto/bin/splunk_ta_paloalto/aob_py3/future/moves/tkinter/__init__.py old mode 100755 new mode 100644 diff --git a/Splunk_TA_paloalto/bin/splunk_ta_paloalto/aob_py3/future/moves/tkinter/colorchooser.py b/Splunk_TA_paloalto/bin/splunk_ta_paloalto/aob_py3/future/moves/tkinter/colorchooser.py old mode 100755 new mode 100644 diff --git a/Splunk_TA_paloalto/bin/splunk_ta_paloalto/aob_py3/future/moves/tkinter/commondialog.py b/Splunk_TA_paloalto/bin/splunk_ta_paloalto/aob_py3/future/moves/tkinter/commondialog.py old mode 100755 new mode 100644 diff --git a/Splunk_TA_paloalto/bin/splunk_ta_paloalto/aob_py3/future/moves/tkinter/constants.py b/Splunk_TA_paloalto/bin/splunk_ta_paloalto/aob_py3/future/moves/tkinter/constants.py old mode 100755 new mode 100644 diff --git a/Splunk_TA_paloalto/bin/splunk_ta_paloalto/aob_py3/future/moves/tkinter/dialog.py b/Splunk_TA_paloalto/bin/splunk_ta_paloalto/aob_py3/future/moves/tkinter/dialog.py old mode 100755 new mode 100644 diff --git a/Splunk_TA_paloalto/bin/splunk_ta_paloalto/aob_py3/future/moves/tkinter/dnd.py b/Splunk_TA_paloalto/bin/splunk_ta_paloalto/aob_py3/future/moves/tkinter/dnd.py old mode 100755 new mode 100644 diff --git a/Splunk_TA_paloalto/bin/splunk_ta_paloalto/aob_py3/future/moves/tkinter/filedialog.py b/Splunk_TA_paloalto/bin/splunk_ta_paloalto/aob_py3/future/moves/tkinter/filedialog.py old mode 100755 new mode 100644 diff --git a/Splunk_TA_paloalto/bin/splunk_ta_paloalto/aob_py3/future/moves/tkinter/font.py b/Splunk_TA_paloalto/bin/splunk_ta_paloalto/aob_py3/future/moves/tkinter/font.py old mode 100755 new mode 100644 diff --git a/Splunk_TA_paloalto/bin/splunk_ta_paloalto/aob_py3/future/moves/tkinter/messagebox.py b/Splunk_TA_paloalto/bin/splunk_ta_paloalto/aob_py3/future/moves/tkinter/messagebox.py old mode 100755 new mode 100644 diff --git a/Splunk_TA_paloalto/bin/splunk_ta_paloalto/aob_py3/future/moves/tkinter/scrolledtext.py b/Splunk_TA_paloalto/bin/splunk_ta_paloalto/aob_py3/future/moves/tkinter/scrolledtext.py old mode 100755 new mode 100644 diff --git a/Splunk_TA_paloalto/bin/splunk_ta_paloalto/aob_py3/future/moves/tkinter/simpledialog.py b/Splunk_TA_paloalto/bin/splunk_ta_paloalto/aob_py3/future/moves/tkinter/simpledialog.py old mode 100755 new mode 100644 diff --git a/Splunk_TA_paloalto/bin/splunk_ta_paloalto/aob_py3/future/moves/tkinter/tix.py b/Splunk_TA_paloalto/bin/splunk_ta_paloalto/aob_py3/future/moves/tkinter/tix.py old mode 100755 new mode 100644 diff --git a/Splunk_TA_paloalto/bin/splunk_ta_paloalto/aob_py3/future/moves/tkinter/ttk.py b/Splunk_TA_paloalto/bin/splunk_ta_paloalto/aob_py3/future/moves/tkinter/ttk.py old mode 100755 new mode 100644 diff --git a/Splunk_TA_paloalto/bin/splunk_ta_paloalto/aob_py3/future/moves/urllib/__init__.py b/Splunk_TA_paloalto/bin/splunk_ta_paloalto/aob_py3/future/moves/urllib/__init__.py old mode 100755 new mode 100644 diff --git a/Splunk_TA_paloalto/bin/splunk_ta_paloalto/aob_py3/future/moves/urllib/error.py b/Splunk_TA_paloalto/bin/splunk_ta_paloalto/aob_py3/future/moves/urllib/error.py old mode 100755 new mode 100644 diff --git a/Splunk_TA_paloalto/bin/splunk_ta_paloalto/aob_py3/future/moves/urllib/parse.py b/Splunk_TA_paloalto/bin/splunk_ta_paloalto/aob_py3/future/moves/urllib/parse.py old mode 100755 new mode 100644 diff --git a/Splunk_TA_paloalto/bin/splunk_ta_paloalto/aob_py3/future/moves/urllib/request.py b/Splunk_TA_paloalto/bin/splunk_ta_paloalto/aob_py3/future/moves/urllib/request.py old mode 100755 new mode 100644 index 60e440a7..972aa4ab --- a/Splunk_TA_paloalto/bin/splunk_ta_paloalto/aob_py3/future/moves/urllib/request.py +++ b/Splunk_TA_paloalto/bin/splunk_ta_paloalto/aob_py3/future/moves/urllib/request.py @@ -11,19 +11,8 @@ proxy_bypass, quote, request_host, - splitattr, - splithost, - splitpasswd, - splitport, - splitquery, - splittag, - splittype, - splituser, - splitvalue, thishost, - to_bytes, unquote, - unwrap, url2pathname, urlcleanup, urljoin, @@ -32,6 +21,18 @@ urlretrieve, urlsplit, urlunparse) + + from urllib.parse import (splitattr, + splithost, + splitpasswd, + splitport, + splitquery, + splittag, + splittype, + splituser, + splitvalue, + to_bytes, + unwrap) else: __future_module__ = True with suspend_hooks(): diff --git a/Splunk_TA_paloalto/bin/splunk_ta_paloalto/aob_py3/future/moves/urllib/response.py b/Splunk_TA_paloalto/bin/splunk_ta_paloalto/aob_py3/future/moves/urllib/response.py old mode 100755 new mode 100644 diff --git a/Splunk_TA_paloalto/bin/splunk_ta_paloalto/aob_py3/future/moves/urllib/robotparser.py b/Splunk_TA_paloalto/bin/splunk_ta_paloalto/aob_py3/future/moves/urllib/robotparser.py old mode 100755 new mode 100644 diff --git a/Splunk_TA_paloalto/bin/splunk_ta_paloalto/aob_py3/future/moves/winreg.py b/Splunk_TA_paloalto/bin/splunk_ta_paloalto/aob_py3/future/moves/winreg.py old mode 100755 new mode 100644 diff --git a/Splunk_TA_paloalto/bin/splunk_ta_paloalto/aob_py3/future/moves/xmlrpc/__init__.py b/Splunk_TA_paloalto/bin/splunk_ta_paloalto/aob_py3/future/moves/xmlrpc/__init__.py old mode 100755 new mode 100644 diff --git a/Splunk_TA_paloalto/bin/splunk_ta_paloalto/aob_py3/future/moves/xmlrpc/client.py b/Splunk_TA_paloalto/bin/splunk_ta_paloalto/aob_py3/future/moves/xmlrpc/client.py old mode 100755 new mode 100644 diff --git a/Splunk_TA_paloalto/bin/splunk_ta_paloalto/aob_py3/future/moves/xmlrpc/server.py b/Splunk_TA_paloalto/bin/splunk_ta_paloalto/aob_py3/future/moves/xmlrpc/server.py old mode 100755 new mode 100644 diff --git a/Splunk_TA_paloalto/bin/splunk_ta_paloalto/aob_py3/future/standard_library/__init__.py b/Splunk_TA_paloalto/bin/splunk_ta_paloalto/aob_py3/future/standard_library/__init__.py old mode 100755 new mode 100644 diff --git a/Splunk_TA_paloalto/bin/splunk_ta_paloalto/aob_py3/future/tests/__init__.py b/Splunk_TA_paloalto/bin/splunk_ta_paloalto/aob_py3/future/tests/__init__.py old mode 100755 new mode 100644 diff --git a/Splunk_TA_paloalto/bin/splunk_ta_paloalto/aob_py3/future/tests/base.py b/Splunk_TA_paloalto/bin/splunk_ta_paloalto/aob_py3/future/tests/base.py old mode 100755 new mode 100644 index 9f4607b6..4ef437ba --- a/Splunk_TA_paloalto/bin/splunk_ta_paloalto/aob_py3/future/tests/base.py +++ b/Splunk_TA_paloalto/bin/splunk_ta_paloalto/aob_py3/future/tests/base.py @@ -272,7 +272,11 @@ def convert_check(self, before, expected, stages=(1, 2), all_imports=False, else: headers = '' - self.compare(output, headers + reformat_code(expected), + reformatted = reformat_code(expected) + if headers in reformatted: + headers = '' + + self.compare(output, headers + reformatted, ignore_imports=ignore_imports) def unchanged(self, code, **kwargs): @@ -338,6 +342,10 @@ def _futurize_test_script(self, filename='mytestscript.py', stages=(1, 2), '----\n%s\n----' % f.read(), ) ErrorClass = (FuturizeError if 'futurize' in script else PasteurizeError) + + if not hasattr(e, 'output'): + # The attribute CalledProcessError.output doesn't exist on Py2.6 + e.output = None raise ErrorClass(msg, e.returncode, e.cmd, output=e.output) return output diff --git a/Splunk_TA_paloalto/bin/splunk_ta_paloalto/aob_py3/future/types/__init__.py b/Splunk_TA_paloalto/bin/splunk_ta_paloalto/aob_py3/future/types/__init__.py old mode 100755 new mode 100644 diff --git a/Splunk_TA_paloalto/bin/splunk_ta_paloalto/aob_py3/future/types/newbytes.py b/Splunk_TA_paloalto/bin/splunk_ta_paloalto/aob_py3/future/types/newbytes.py old mode 100755 new mode 100644 index 2a337c86..c9d584a7 --- a/Splunk_TA_paloalto/bin/splunk_ta_paloalto/aob_py3/future/types/newbytes.py +++ b/Splunk_TA_paloalto/bin/splunk_ta_paloalto/aob_py3/future/types/newbytes.py @@ -5,15 +5,19 @@ different beast to the Python 3 bytes object. """ -from collections import Iterable from numbers import Integral import string import copy -from future.utils import istext, isbytes, PY3, with_metaclass +from future.utils import istext, isbytes, PY2, PY3, with_metaclass from future.types import no, issubset from future.types.newobject import newobject +if PY2: + from collections import Iterable +else: + from collections.abc import Iterable + _builtin_bytes = bytes diff --git a/Splunk_TA_paloalto/bin/splunk_ta_paloalto/aob_py3/future/types/newdict.py b/Splunk_TA_paloalto/bin/splunk_ta_paloalto/aob_py3/future/types/newdict.py old mode 100755 new mode 100644 diff --git a/Splunk_TA_paloalto/bin/splunk_ta_paloalto/aob_py3/future/types/newint.py b/Splunk_TA_paloalto/bin/splunk_ta_paloalto/aob_py3/future/types/newint.py old mode 100755 new mode 100644 index 705b8fa9..748dba9d --- a/Splunk_TA_paloalto/bin/splunk_ta_paloalto/aob_py3/future/types/newint.py +++ b/Splunk_TA_paloalto/bin/splunk_ta_paloalto/aob_py3/future/types/newint.py @@ -8,7 +8,6 @@ from __future__ import division import struct -import collections from future.types.newbytes import newbytes from future.types.newobject import newobject @@ -17,6 +16,9 @@ if PY3: long = int + from collections.abc import Iterable +else: + from collections import Iterable class BaseNewInt(type): @@ -356,7 +358,7 @@ def from_bytes(cls, mybytes, byteorder='big', signed=False): raise TypeError("cannot convert unicode objects to bytes") # mybytes can also be passed as a sequence of integers on Py3. # Test for this: - elif isinstance(mybytes, collections.Iterable): + elif isinstance(mybytes, Iterable): mybytes = newbytes(mybytes) b = mybytes if byteorder == 'big' else mybytes[::-1] if len(b) == 0: diff --git a/Splunk_TA_paloalto/bin/splunk_ta_paloalto/aob_py3/future/types/newlist.py b/Splunk_TA_paloalto/bin/splunk_ta_paloalto/aob_py3/future/types/newlist.py old mode 100755 new mode 100644 diff --git a/Splunk_TA_paloalto/bin/splunk_ta_paloalto/aob_py3/future/types/newmemoryview.py b/Splunk_TA_paloalto/bin/splunk_ta_paloalto/aob_py3/future/types/newmemoryview.py old mode 100755 new mode 100644 index 72c6990a..09f804dc --- a/Splunk_TA_paloalto/bin/splunk_ta_paloalto/aob_py3/future/types/newmemoryview.py +++ b/Splunk_TA_paloalto/bin/splunk_ta_paloalto/aob_py3/future/types/newmemoryview.py @@ -1,14 +1,16 @@ """ A pretty lame implementation of a memoryview object for Python 2.6. """ - -from collections import Iterable from numbers import Integral import string -from future.utils import istext, isbytes, PY3, with_metaclass +from future.utils import istext, isbytes, PY2, with_metaclass from future.types import no, issubset +if PY2: + from collections import Iterable +else: + from collections.abc import Iterable # class BaseNewBytes(type): # def __instancecheck__(cls, instance): diff --git a/Splunk_TA_paloalto/bin/splunk_ta_paloalto/aob_py3/future/types/newobject.py b/Splunk_TA_paloalto/bin/splunk_ta_paloalto/aob_py3/future/types/newobject.py old mode 100755 new mode 100644 index 776d4766..31b84fc1 --- a/Splunk_TA_paloalto/bin/splunk_ta_paloalto/aob_py3/future/types/newobject.py +++ b/Splunk_TA_paloalto/bin/splunk_ta_paloalto/aob_py3/future/types/newobject.py @@ -112,5 +112,6 @@ def __native__(self): """ return object(self) + __slots__ = [] __all__ = ['newobject'] diff --git a/Splunk_TA_paloalto/bin/splunk_ta_paloalto/aob_py3/future/types/newopen.py b/Splunk_TA_paloalto/bin/splunk_ta_paloalto/aob_py3/future/types/newopen.py old mode 100755 new mode 100644 diff --git a/Splunk_TA_paloalto/bin/splunk_ta_paloalto/aob_py3/future/types/newrange.py b/Splunk_TA_paloalto/bin/splunk_ta_paloalto/aob_py3/future/types/newrange.py old mode 100755 new mode 100644 index 9173b050..eda01a5a --- a/Splunk_TA_paloalto/bin/splunk_ta_paloalto/aob_py3/future/types/newrange.py +++ b/Splunk_TA_paloalto/bin/splunk_ta_paloalto/aob_py3/future/types/newrange.py @@ -19,7 +19,12 @@ """ from __future__ import absolute_import -from collections import Sequence, Iterator +from future.utils import PY2 + +if PY2: + from collections import Sequence, Iterator +else: + from collections.abc import Sequence, Iterator from itertools import islice from future.backports.misc import count # with step parameter on Py2.6 diff --git a/Splunk_TA_paloalto/bin/splunk_ta_paloalto/aob_py3/future/types/newstr.py b/Splunk_TA_paloalto/bin/splunk_ta_paloalto/aob_py3/future/types/newstr.py old mode 100755 new mode 100644 index e6272fb9..8ca191f9 --- a/Splunk_TA_paloalto/bin/splunk_ta_paloalto/aob_py3/future/types/newstr.py +++ b/Splunk_TA_paloalto/bin/splunk_ta_paloalto/aob_py3/future/types/newstr.py @@ -40,7 +40,6 @@ """ -from collections import Iterable from numbers import Number from future.utils import PY3, istext, with_metaclass, isnewbytes @@ -51,6 +50,9 @@ if PY3: # We'll probably never use newstr on Py3 anyway... unicode = str + from collections.abc import Iterable +else: + from collections import Iterable class BaseNewStr(type): @@ -105,6 +107,7 @@ def __repr__(self): """ Without the u prefix """ + value = super(newstr, self).__repr__() # assert value[0] == u'u' return value[1:] @@ -290,7 +293,14 @@ def __eq__(self, other): isinstance(other, bytes) and not isnewbytes(other)): return super(newstr, self).__eq__(other) else: - return False + return NotImplemented + + def __hash__(self): + if (isinstance(self, unicode) or + isinstance(self, bytes) and not isnewbytes(self)): + return super(newstr, self).__hash__() + else: + raise NotImplementedError() def __ne__(self, other): if (isinstance(other, unicode) or diff --git a/Splunk_TA_paloalto/bin/splunk_ta_paloalto/aob_py3/future/utils/__init__.py b/Splunk_TA_paloalto/bin/splunk_ta_paloalto/aob_py3/future/utils/__init__.py old mode 100755 new mode 100644 index 906f1e46..46bd96de --- a/Splunk_TA_paloalto/bin/splunk_ta_paloalto/aob_py3/future/utils/__init__.py +++ b/Splunk_TA_paloalto/bin/splunk_ta_paloalto/aob_py3/future/utils/__init__.py @@ -18,8 +18,10 @@ * types: * text_type: unicode in Python 2, str in Python 3 - * binary_type: str in Python 2, bytes in Python 3 * string_types: basestring in Python 2, str in Python 3 + * binary_type: str in Python 2, bytes in Python 3 + * integer_types: (int, long) in Python 2, int in Python 3 + * class_types: (type, types.ClassType) in Python 2, type in Python 3 * bchr(c): Take an integer and make a 1-character byte string @@ -55,7 +57,8 @@ import inspect -PY3 = sys.version_info[0] == 3 +PY3 = sys.version_info[0] >= 3 +PY34_PLUS = sys.version_info[0:2] >= (3, 4) PY35_PLUS = sys.version_info[0:2] >= (3, 5) PY36_PLUS = sys.version_info[0:2] >= (3, 6) PY2 = sys.version_info[0] == 2 @@ -405,12 +408,34 @@ def raise_(tp, value=None, tb=None): allows re-raising exceptions with the cls value and traceback on Python 2 and 3. """ - if value is not None and isinstance(tp, Exception): - raise TypeError("instance exception may not have a separate value") - if value is not None: - exc = tp(value) - else: + if isinstance(tp, BaseException): + # If the first object is an instance, the type of the exception + # is the class of the instance, the instance itself is the value, + # and the second object must be None. + if value is not None: + raise TypeError("instance exception may not have a separate value") exc = tp + elif isinstance(tp, type) and not issubclass(tp, BaseException): + # If the first object is a class, it becomes the type of the + # exception. + raise TypeError("class must derive from BaseException, not %s" % tp.__name__) + else: + # The second object is used to determine the exception value: If it + # is an instance of the class, the instance becomes the exception + # value. If the second object is a tuple, it is used as the argument + # list for the class constructor; if it is None, an empty argument + # list is used, and any other object is treated as a single argument + # to the constructor. The instance so created by calling the + # constructor is used as the exception value. + if isinstance(value, tp): + exc = value + elif isinstance(value, tuple): + exc = tp(*value) + elif value is None: + exc = tp() + else: + exc = tp(value) + if exc.__traceback__ is not tb: raise exc.with_traceback(tb) raise exc @@ -443,12 +468,14 @@ def raise_from(exc, cause): e.__suppress_context__ = False if isinstance(cause, type) and issubclass(cause, Exception): e.__cause__ = cause() + e.__cause__.__traceback__ = sys.exc_info()[2] e.__suppress_context__ = True elif cause is None: e.__cause__ = None e.__suppress_context__ = True elif isinstance(cause, BaseException): e.__cause__ = cause + object.__setattr__(e.__cause__, '__traceback__', sys.exc_info()[2]) e.__suppress_context__ = True else: raise TypeError("exception causes must derive from BaseException") @@ -552,15 +579,14 @@ def isbytes(obj): def isnewbytes(obj): """ - Equivalent to the result of ``isinstance(obj, newbytes)`` were - ``__instancecheck__`` not overridden on the newbytes subclass. In - other words, it is REALLY a newbytes instance, not a Py2 native str + Equivalent to the result of ``type(obj) == type(newbytes)`` + in other words, it is REALLY a newbytes instance, not a Py2 native str object? + + Note that this does not cover subclasses of newbytes, and it is not + equivalent to ininstance(obj, newbytes) """ - # TODO: generalize this so that it works with subclasses of newbytes - # Import is here to avoid circular imports: - from future.types.newbytes import newbytes - return type(obj) == newbytes + return type(obj).__name__ == 'newbytes' def isint(obj): @@ -726,16 +752,16 @@ def ensure_new_type(obj): __all__ = ['PY2', 'PY26', 'PY3', 'PYPY', - 'as_native_str', 'bind_method', 'bord', 'bstr', - 'bytes_to_native_str', 'encode_filename', 'ensure_new_type', - 'exec_', 'get_next', 'getexception', 'implements_iterator', - 'is_new_style', 'isbytes', 'isidentifier', 'isint', - 'isnewbytes', 'istext', 'iteritems', 'iterkeys', 'itervalues', - 'lfilter', 'listitems', 'listvalues', 'lmap', 'lrange', - 'lzip', 'native', 'native_bytes', 'native_str', + 'as_native_str', 'binary_type', 'bind_method', 'bord', 'bstr', + 'bytes_to_native_str', 'class_types', 'encode_filename', + 'ensure_new_type', 'exec_', 'get_next', 'getexception', + 'implements_iterator', 'integer_types', 'is_new_style', 'isbytes', + 'isidentifier', 'isint', 'isnewbytes', 'istext', 'iteritems', + 'iterkeys', 'itervalues', 'lfilter', 'listitems', 'listvalues', + 'lmap', 'lrange', 'lzip', 'native', 'native_bytes', 'native_str', 'native_str_to_bytes', 'old_div', 'python_2_unicode_compatible', 'raise_', - 'raise_with_traceback', 'reraise', 'text_to_native_str', - 'tobytes', 'viewitems', 'viewkeys', 'viewvalues', - 'with_metaclass' - ] + 'raise_with_traceback', 'reraise', 'string_types', + 'text_to_native_str', 'text_type', 'tobytes', 'viewitems', + 'viewkeys', 'viewvalues', 'with_metaclass' + ] diff --git a/Splunk_TA_paloalto/bin/splunk_ta_paloalto/aob_py3/future/utils/surrogateescape.py b/Splunk_TA_paloalto/bin/splunk_ta_paloalto/aob_py3/future/utils/surrogateescape.py old mode 100755 new mode 100644 diff --git a/Splunk_TA_paloalto/bin/splunk_ta_paloalto/aob_py3/httplib2-0.19.1.dist-info/INSTALLER b/Splunk_TA_paloalto/bin/splunk_ta_paloalto/aob_py3/httplib2-0.19.1.dist-info/INSTALLER new file mode 100644 index 00000000..a1b589e3 --- /dev/null +++ b/Splunk_TA_paloalto/bin/splunk_ta_paloalto/aob_py3/httplib2-0.19.1.dist-info/INSTALLER @@ -0,0 +1 @@ +pip diff --git a/Splunk_TA_paloalto/bin/splunk_ta_paloalto/aob_py3/httplib2-0.19.1.dist-info/LICENSE b/Splunk_TA_paloalto/bin/splunk_ta_paloalto/aob_py3/httplib2-0.19.1.dist-info/LICENSE new file mode 100644 index 00000000..ae382866 --- /dev/null +++ b/Splunk_TA_paloalto/bin/splunk_ta_paloalto/aob_py3/httplib2-0.19.1.dist-info/LICENSE @@ -0,0 +1,23 @@ +Httplib2 Software License + +Copyright (c) 2006 by Joe Gregorio + +Permission is hereby granted, free of charge, to any person +obtaining a copy of this software and associated documentation +files (the "Software"), to deal in the Software without restriction, +including without limitation the rights to use, copy, modify, merge, +publish, distribute, sublicense, and/or sell copies of the Software, +and to permit persons to whom the Software is furnished to do so, +subject to the following conditions: + +The above copyright notice and this permission notice shall be +included in all copies or substantial portions of the Software. + +THE SOFTWARE IS PROVIDED "AS IS", WITHOUT WARRANTY OF ANY KIND, +EXPRESS OR IMPLIED, INCLUDING BUT NOT LIMITED TO THE WARRANTIES +OF MERCHANTABILITY, FITNESS FOR A PARTICULAR PURPOSE AND +NONINFRINGEMENT. IN NO EVENT SHALL THE AUTHORS OR COPYRIGHT HOLDERS +BE LIABLE FOR ANY CLAIM, DAMAGES OR OTHER LIABILITY, WHETHER IN AN +ACTION OF CONTRACT, TORT OR OTHERWISE, ARISING FROM, OUT OF OR IN +CONNECTION WITH THE SOFTWARE OR THE USE OR OTHER DEALINGS IN THE +SOFTWARE. diff --git a/Splunk_TA_paloalto/bin/splunk_ta_paloalto/aob_py3/httplib2-0.19.1.dist-info/METADATA b/Splunk_TA_paloalto/bin/splunk_ta_paloalto/aob_py3/httplib2-0.19.1.dist-info/METADATA new file mode 100644 index 00000000..276c4430 --- /dev/null +++ b/Splunk_TA_paloalto/bin/splunk_ta_paloalto/aob_py3/httplib2-0.19.1.dist-info/METADATA @@ -0,0 +1,71 @@ +Metadata-Version: 2.1 +Name: httplib2 +Version: 0.19.1 +Summary: A comprehensive HTTP client library. +Home-page: https://github.com/httplib2/httplib2 +Author: Joe Gregorio +Author-email: joe@bitworking.org +License: MIT +Platform: UNKNOWN +Classifier: Development Status :: 4 - Beta +Classifier: Environment :: Web Environment +Classifier: Intended Audience :: Developers +Classifier: License :: OSI Approved :: MIT License +Classifier: Operating System :: OS Independent +Classifier: Programming Language :: Python +Classifier: Programming Language :: Python :: 2 +Classifier: Programming Language :: Python :: 2.7 +Classifier: Programming Language :: Python :: 3 +Classifier: Programming Language :: Python :: 3.4 +Classifier: Programming Language :: Python :: 3.5 +Classifier: Programming Language :: Python :: 3.6 +Classifier: Programming Language :: Python :: 3.7 +Classifier: Topic :: Internet :: WWW/HTTP +Classifier: Topic :: Software Development :: Libraries +Requires-Dist: pyparsing (<3,>=2.4.2) + + + +A comprehensive HTTP client library, ``httplib2`` supports many features left out of other HTTP libraries. + +**HTTP and HTTPS** + HTTPS support is only available if the socket module was compiled with SSL support. + + +**Keep-Alive** + Supports HTTP 1.1 Keep-Alive, keeping the socket open and performing multiple requests over the same connection if possible. + + +**Authentication** + The following three types of HTTP Authentication are supported. These can be used over both HTTP and HTTPS. + + * Digest + * Basic + * WSSE + +**Caching** + The module can optionally operate with a private cache that understands the Cache-Control: + header and uses both the ETag and Last-Modified cache validators. Both file system + and memcached based caches are supported. + + +**All Methods** + The module can handle any HTTP request method, not just GET and POST. + + +**Redirects** + Automatically follows 3XX redirects on GETs. + + +**Compression** + Handles both 'deflate' and 'gzip' types of compression. + + +**Lost update support** + Automatically adds back ETags into PUT requests to resources we have already cached. This implements Section 3.2 of Detecting the Lost Update Problem Using Unreserved Checkout + + +**Unit Tested** + A large and growing set of unit tests. + + diff --git a/Splunk_TA_paloalto/bin/splunk_ta_paloalto/aob_py3/httplib2-0.19.1.dist-info/RECORD b/Splunk_TA_paloalto/bin/splunk_ta_paloalto/aob_py3/httplib2-0.19.1.dist-info/RECORD new file mode 100644 index 00000000..dd4f6146 --- /dev/null +++ b/Splunk_TA_paloalto/bin/splunk_ta_paloalto/aob_py3/httplib2-0.19.1.dist-info/RECORD @@ -0,0 +1,13 @@ +httplib2-0.19.1.dist-info/INSTALLER,sha256=zuuue4knoyJ-UwPPXg8fezS7VCrXJQrAP7zeNuwvFQg,4 +httplib2-0.19.1.dist-info/LICENSE,sha256=WJ7sOPct8r4gNxHTuMvs6bkIxef_ALw8q39juunjZrQ,1086 +httplib2-0.19.1.dist-info/METADATA,sha256=Y773x9o8W64zxHwc9LyJIyAzCAQsnyvWDyrUs7l1l50,2235 +httplib2-0.19.1.dist-info/RECORD,, +httplib2-0.19.1.dist-info/WHEEL,sha256=OqRkF0eY5GHssMorFjlbTIq072vpHpF60fIQA6lS9xA,92 +httplib2-0.19.1.dist-info/top_level.txt,sha256=BEY8ChKwagUWmu9x8yN9JObJpZKNeWCr1E-sIECb56I,9 +httplib2/__init__.py,sha256=BIXewNb18bHReQm1J-NWasURlkO0UjjEgup2UadKP88,68412 +httplib2/auth.py,sha256=IdJCKqMC2nx7O5wbYwfO04m1X3miYW5JAZ9Wn5eQZi4,2026 +httplib2/cacerts.txt,sha256=AQyadVjp1sEIG0yIiMJ82l52hplPo3odJIyTSS_sONw,135547 +httplib2/certs.py,sha256=guhfjMNhDdKJEyYBb5ZyLxVO5q1I7Y_P-4BG8MniBk8,971 +httplib2/error.py,sha256=GyqPUvZeKdVLq0f3xg0uX4rjtv7jVGJuPerAdyc-jfk,954 +httplib2/iri2uri.py,sha256=PhIzEzeR6C73l7piwrNAJlVvlWgsqxtJTlFeXgznzQo,4153 +httplib2/socks.py,sha256=oaeEOnT2rkTNm6wnn0CSdhWzVaVshnnkAKiP4kxKzzc,19701 diff --git a/Splunk_TA_paloalto/bin/splunk_ta_paloalto/aob_py3/httplib2-0.19.1.dist-info/WHEEL b/Splunk_TA_paloalto/bin/splunk_ta_paloalto/aob_py3/httplib2-0.19.1.dist-info/WHEEL new file mode 100644 index 00000000..385faab0 --- /dev/null +++ b/Splunk_TA_paloalto/bin/splunk_ta_paloalto/aob_py3/httplib2-0.19.1.dist-info/WHEEL @@ -0,0 +1,5 @@ +Wheel-Version: 1.0 +Generator: bdist_wheel (0.36.2) +Root-Is-Purelib: true +Tag: py3-none-any + diff --git a/Splunk_TA_paloalto/bin/splunk_ta_paloalto/aob_py3/httplib2-0.19.1.dist-info/top_level.txt b/Splunk_TA_paloalto/bin/splunk_ta_paloalto/aob_py3/httplib2-0.19.1.dist-info/top_level.txt new file mode 100644 index 00000000..fb881ece --- /dev/null +++ b/Splunk_TA_paloalto/bin/splunk_ta_paloalto/aob_py3/httplib2-0.19.1.dist-info/top_level.txt @@ -0,0 +1 @@ +httplib2 diff --git a/Splunk_TA_paloalto/bin/splunk_ta_paloalto/aob_py3/httplib2/__init__.py b/Splunk_TA_paloalto/bin/splunk_ta_paloalto/aob_py3/httplib2/__init__.py old mode 100755 new mode 100644 index 4312f300..8b240dbf --- a/Splunk_TA_paloalto/bin/splunk_ta_paloalto/aob_py3/httplib2/__init__.py +++ b/Splunk_TA_paloalto/bin/splunk_ta_paloalto/aob_py3/httplib2/__init__.py @@ -15,7 +15,7 @@ "Alex Yu", ] __license__ = "MIT" -__version__ = '0.14.0' +__version__ = "0.19.1" import base64 import calendar @@ -49,6 +49,8 @@ # TODO: remove this fallback and copypasted socksipy module upon py2/3 merge, # idea is to have soft-dependency on any compatible module called socks from . import socks +from . import auth +from .error import * from .iri2uri import iri2uri @@ -79,56 +81,6 @@ def has_timeout(timeout): RETRIES = 2 -# All exceptions raised here derive from HttpLib2Error -class HttpLib2Error(Exception): - pass - - -# Some exceptions can be caught and optionally -# be turned back into responses. -class HttpLib2ErrorWithResponse(HttpLib2Error): - def __init__(self, desc, response, content): - self.response = response - self.content = content - HttpLib2Error.__init__(self, desc) - - -class RedirectMissingLocation(HttpLib2ErrorWithResponse): - pass - - -class RedirectLimit(HttpLib2ErrorWithResponse): - pass - - -class FailedToDecompressContent(HttpLib2ErrorWithResponse): - pass - - -class UnimplementedDigestAuthOptionError(HttpLib2ErrorWithResponse): - pass - - -class UnimplementedHmacDigestAuthOptionError(HttpLib2ErrorWithResponse): - pass - - -class MalformedHeader(HttpLib2Error): - pass - - -class RelativeURIError(HttpLib2Error): - pass - - -class ServerNotFoundError(HttpLib2Error): - pass - - -class ProxiesUnavailableError(HttpLib2Error): - pass - - # Open Items: # ----------- @@ -161,7 +113,15 @@ class ProxiesUnavailableError(HttpLib2Error): "upgrade", ] +# https://tools.ietf.org/html/rfc7231#section-8.1.3 +SAFE_METHODS = ("GET", "HEAD", "OPTIONS", "TRACE") + +# To change, assign to `Http().redirect_codes` +REDIRECT_CODES = frozenset((300, 301, 302, 303, 307, 308)) + + from httplib2 import certs + CA_CERTS = certs.where() # PROTOCOL_TLS is python 3.5.3+. PROTOCOL_SSLv23 is deprecated. @@ -169,21 +129,23 @@ class ProxiesUnavailableError(HttpLib2Error): # > Selects the highest protocol version that both the client and server support. # > Despite the name, this option can select “TLS” protocols as well as “SSL”. # source: https://docs.python.org/3.5/library/ssl.html#ssl.PROTOCOL_TLS -DEFAULT_TLS_VERSION = getattr(ssl, "PROTOCOL_TLS", None) or getattr( - ssl, "PROTOCOL_SSLv23" -) +DEFAULT_TLS_VERSION = getattr(ssl, "PROTOCOL_TLS", None) or getattr(ssl, "PROTOCOL_SSLv23") + def _build_ssl_context( - disable_ssl_certificate_validation, ca_certs, cert_file=None, key_file=None, - maximum_version=None, minimum_version=None, + disable_ssl_certificate_validation, + ca_certs, + cert_file=None, + key_file=None, + maximum_version=None, + minimum_version=None, + key_password=None, ): if not hasattr(ssl, "SSLContext"): raise RuntimeError("httplib2 requires Python 3.2+ for ssl.SSLContext") context = ssl.SSLContext(DEFAULT_TLS_VERSION) - context.verify_mode = ( - ssl.CERT_NONE if disable_ssl_certificate_validation else ssl.CERT_REQUIRED - ) + context.verify_mode = ssl.CERT_NONE if disable_ssl_certificate_validation else ssl.CERT_REQUIRED # SSLContext.maximum_version and SSLContext.minimum_version are python 3.7+. # source: https://docs.python.org/3/library/ssl.html#ssl.SSLContext.maximum_version @@ -207,7 +169,7 @@ def _build_ssl_context( context.load_verify_locations(ca_certs) if cert_file: - context.load_cert_chain(cert_file, key_file) + context.load_cert_chain(cert_file, key_file, key_password) return context @@ -281,10 +243,7 @@ def safename(filename): def _normalize_headers(headers): return dict( [ - ( - _convert_byte_str(key).lower(), - NORMALIZE_SPACE.sub(_convert_byte_str(value), " ").strip(), - ) + (_convert_byte_str(key).lower(), NORMALIZE_SPACE.sub(_convert_byte_str(value), " ").strip(),) for (key, value) in headers.items() ] ) @@ -301,13 +260,9 @@ def _parse_cache_control(headers): if "cache-control" in headers: parts = headers["cache-control"].split(",") parts_with_args = [ - tuple([x.strip().lower() for x in part.split("=", 1)]) - for part in parts - if -1 != part.find("=") - ] - parts_wo_args = [ - (name.strip().lower(), 1) for name in parts if -1 == name.find("=") + tuple([x.strip().lower() for x in part.split("=", 1)]) for part in parts if -1 != part.find("=") ] + parts_wo_args = [(name.strip().lower(), 1) for name in parts if -1 == name.find("=")] retval = dict(parts_with_args + parts_wo_args) return retval @@ -315,56 +270,9 @@ def _parse_cache_control(headers): # Whether to use a strict mode to parse WWW-Authenticate headers # Might lead to bad results in case of ill-formed header value, # so disabled by default, falling back to relaxed parsing. -# Set to true to turn on, usefull for testing servers. +# Set to true to turn on, useful for testing servers. USE_WWW_AUTH_STRICT_PARSING = 0 -# In regex below: -# [^\0-\x1f\x7f-\xff()<>@,;:\\\"/[\]?={} \t]+ matches a "token" as defined by HTTP -# "(?:[^\0-\x08\x0A-\x1f\x7f-\xff\\\"]|\\[\0-\x7f])*?" matches a "quoted-string" as defined by HTTP, when LWS have already been replaced by a single space -# Actually, as an auth-param value can be either a token or a quoted-string, they are combined in a single pattern which matches both: -# \"?((?<=\")(?:[^\0-\x1f\x7f-\xff\\\"]|\\[\0-\x7f])*?(?=\")|(?@,;:\\\"/[\]?={} \t]+(?!\"))\"? -WWW_AUTH_STRICT = re.compile( - r"^(?:\s*(?:,\s*)?([^\0-\x1f\x7f-\xff()<>@,;:\\\"/[\]?={} \t]+)\s*=\s*\"?((?<=\")(?:[^\0-\x08\x0A-\x1f\x7f-\xff\\\"]|\\[\0-\x7f])*?(?=\")|(?@,;:\\\"/[\]?={} \t]+(?!\"))\"?)(.*)$" -) -WWW_AUTH_RELAXED = re.compile( - r"^(?:\s*(?:,\s*)?([^ \t\r\n=]+)\s*=\s*\"?((?<=\")(?:[^\\\"]|\\.)*?(?=\")|(? 0: # service = "wise" - auth = dict( - Email=credentials[0], - Passwd=credentials[1], - service=service, - source=headers["user-agent"], - ) + auth = dict(Email=credentials[0], Passwd=credentials[1], service=service, source=headers["user-agent"],) resp, content = self.http.request( "https://www.google.com/accounts/ClientLogin", method="POST", @@ -909,9 +747,7 @@ class FileCache(object): be running on the same cache. """ - def __init__( - self, cache, safe=safename - ): # use safe=lambda x: md5.new(x).hexdigest() for the old behavior + def __init__(self, cache, safe=safename): # use safe=lambda x: md5.new(x).hexdigest() for the old behavior self.cache = cache self.safe = safe if not os.path.exists(cache): @@ -960,7 +796,13 @@ class KeyCerts(Credentials): """Identical to Credentials except that name/password are mapped to key/cert.""" - pass + def add(self, key, cert, domain, password): + self.credentials.append((domain.lower(), key, cert, password)) + + def iter(self, domain): + for (cdomain, key, cert, password) in self.credentials: + if cdomain == "" or domain == cdomain: + yield (key, cert, password) class AllHosts(object): @@ -973,14 +815,7 @@ class ProxyInfo(object): bypass_hosts = () def __init__( - self, - proxy_type, - proxy_host, - proxy_port, - proxy_rdns=True, - proxy_user=None, - proxy_pass=None, - proxy_headers=None, + self, proxy_type, proxy_host, proxy_port, proxy_rdns=True, proxy_user=None, proxy_pass=None, proxy_headers=None, ): """Args: @@ -999,11 +834,19 @@ def __init__( proxy_headers: Additional or modified headers for the proxy connect request. """ - if isinstance(proxy_user, str): - proxy_user = proxy_user.encode() - if isinstance(proxy_pass, str): - proxy_pass = proxy_pass.encode() - self.proxy_type, self.proxy_host, self.proxy_port, self.proxy_rdns, self.proxy_user, self.proxy_pass, self.proxy_headers = ( + if isinstance(proxy_user, bytes): + proxy_user = proxy_user.decode() + if isinstance(proxy_pass, bytes): + proxy_pass = proxy_pass.decode() + ( + self.proxy_type, + self.proxy_host, + self.proxy_port, + self.proxy_rdns, + self.proxy_user, + self.proxy_pass, + self.proxy_headers, + ) = ( proxy_type, proxy_host, proxy_port, @@ -1137,14 +980,18 @@ def __init__(self, host, port=None, timeout=None, proxy_info=None): def connect(self): """Connect to the host and port specified in __init__.""" if self.proxy_info and socks is None: - raise ProxiesUnavailableError( - "Proxy support missing but proxy use was requested!" - ) + raise ProxiesUnavailableError("Proxy support missing but proxy use was requested!") if self.proxy_info and self.proxy_info.isgood() and self.proxy_info.applies_to(self.host): use_proxy = True - proxy_type, proxy_host, proxy_port, proxy_rdns, proxy_user, proxy_pass, proxy_headers = ( - self.proxy_info.astuple() - ) + ( + proxy_type, + proxy_host, + proxy_port, + proxy_rdns, + proxy_user, + proxy_pass, + proxy_headers, + ) = self.proxy_info.astuple() host = proxy_host port = proxy_port @@ -1163,12 +1010,7 @@ def connect(self): if use_proxy: self.sock = socks.socksocket(af, socktype, proto) self.sock.setproxy( - proxy_type, - proxy_host, - proxy_port, - proxy_rdns, - proxy_user, - proxy_pass, + proxy_type, proxy_host, proxy_port, proxy_rdns, proxy_user, proxy_pass, ) else: self.sock = socket.socket(af, socktype, proto) @@ -1176,22 +1018,11 @@ def connect(self): if has_timeout(self.timeout): self.sock.settimeout(self.timeout) if self.debuglevel > 0: - print( - "connect: ({0}, {1}) ************".format(self.host, self.port) - ) + print("connect: ({0}, {1}) ************".format(self.host, self.port)) if use_proxy: print( "proxy: {0} ************".format( - str( - ( - proxy_host, - proxy_port, - proxy_rdns, - proxy_user, - proxy_pass, - proxy_headers, - ) - ) + str((proxy_host, proxy_port, proxy_rdns, proxy_user, proxy_pass, proxy_headers,)) ) ) @@ -1203,16 +1034,7 @@ def connect(self): if use_proxy: print( "proxy: {0}".format( - str( - ( - proxy_host, - proxy_port, - proxy_rdns, - proxy_user, - proxy_pass, - proxy_headers, - ) - ) + str((proxy_host, proxy_port, proxy_rdns, proxy_user, proxy_pass, proxy_headers,)) ) ) if self.sock: @@ -1245,6 +1067,7 @@ def __init__( disable_ssl_certificate_validation=False, tls_maximum_version=None, tls_minimum_version=None, + key_password=None, ): self.disable_ssl_certificate_validation = disable_ssl_certificate_validation @@ -1255,25 +1078,34 @@ def __init__( self.proxy_info = proxy_info("https") context = _build_ssl_context( - self.disable_ssl_certificate_validation, self.ca_certs, cert_file, key_file, - maximum_version=tls_maximum_version, minimum_version=tls_minimum_version, + self.disable_ssl_certificate_validation, + self.ca_certs, + cert_file, + key_file, + maximum_version=tls_maximum_version, + minimum_version=tls_minimum_version, + key_password=key_password, ) super(HTTPSConnectionWithTimeout, self).__init__( - host, - port=port, - key_file=key_file, - cert_file=cert_file, - timeout=timeout, - context=context, + host, port=port, timeout=timeout, context=context, ) + self.key_file = key_file + self.cert_file = cert_file + self.key_password = key_password def connect(self): """Connect to a host on a given (SSL) port.""" - if self.proxy_info and self.proxy_info.isgood(): + if self.proxy_info and self.proxy_info.isgood() and self.proxy_info.applies_to(self.host): use_proxy = True - proxy_type, proxy_host, proxy_port, proxy_rdns, proxy_user, proxy_pass, proxy_headers = ( - self.proxy_info.astuple() - ) + ( + proxy_type, + proxy_host, + proxy_port, + proxy_rdns, + proxy_user, + proxy_pass, + proxy_headers, + ) = self.proxy_info.astuple() host = proxy_host port = proxy_port @@ -1294,12 +1126,7 @@ def connect(self): sock = socks.socksocket(family, socktype, proto) sock.setproxy( - proxy_type, - proxy_host, - proxy_port, - proxy_rdns, - proxy_user, - proxy_pass, + proxy_type, proxy_host, proxy_port, proxy_rdns, proxy_user, proxy_pass, ) else: sock = socket.socket(family, socktype, proto) @@ -1311,10 +1138,7 @@ def connect(self): self.sock = self._context.wrap_socket(sock, server_hostname=self.host) # Python 3.3 compatibility: emulate the check_hostname behavior - if ( - not hasattr(self._context, "check_hostname") - and not self.disable_ssl_certificate_validation - ): + if not hasattr(self._context, "check_hostname") and not self.disable_ssl_certificate_validation: try: ssl.match_hostname(self.sock.getpeercert(), self.host) except Exception: @@ -1327,16 +1151,7 @@ def connect(self): if use_proxy: print( "proxy: {0}".format( - str( - ( - proxy_host, - proxy_port, - proxy_rdns, - proxy_user, - proxy_pass, - proxy_headers, - ) - ) + str((proxy_host, proxy_port, proxy_rdns, proxy_user, proxy_pass, proxy_headers,)) ) ) except (ssl.SSLError, ssl.CertificateError) as e: @@ -1351,20 +1166,11 @@ def connect(self): except socket.error as e: socket_err = e if self.debuglevel > 0: - print("connect fail: ({0}, {1})".format((self.host, self.port))) + print("connect fail: ({0}, {1})".format(self.host, self.port)) if use_proxy: print( "proxy: {0}".format( - str( - ( - proxy_host, - proxy_port, - proxy_rdns, - proxy_user, - proxy_pass, - proxy_headers, - ) - ) + str((proxy_host, proxy_port, proxy_rdns, proxy_user, proxy_pass, proxy_headers,)) ) ) if self.sock: @@ -1459,10 +1265,14 @@ def __init__( # If set to False then no redirects are followed, even safe ones. self.follow_redirects = True + self.redirect_codes = REDIRECT_CODES + # Which HTTP methods do we apply optimistic concurrency to, i.e. # which methods get an "if-match:" etag header added to them. self.optimistic_concurrency_methods = ["PUT", "PATCH"] + self.safe_methods = list(SAFE_METHODS) + # If 'follow_redirects' is True, and this is set to True then # all redirecs are followed, including unsafe ones. self.follow_all_redirects = False @@ -1476,6 +1286,16 @@ def __init__( # Keep Authorization: headers on a redirect. self.forward_authorization_headers = False + def close(self): + """Close persistent connections, clear sensitive data. + Not thread-safe, requires external synchronization against concurrent requests. + """ + existing, self.connections = self.connections, {} + for _, c in existing.items(): + c.close() + self.certificates.clear() + self.clear_credentials() + def __getstate__(self): state_dict = copy.copy(self.__dict__) # In case request is augmented by some foreign object such as @@ -1494,23 +1314,21 @@ def _auth_from_challenge(self, host, request_uri, headers, response, content): """A generator that creates Authorization objects that can be applied to requests. """ - challenges = _parse_www_authenticate(response, "www-authenticate") + challenges = auth._parse_www_authenticate(response, "www-authenticate") for cred in self.credentials.iter(host): for scheme in AUTH_SCHEME_ORDER: if scheme in challenges: - yield AUTH_SCHEME_CLASSES[scheme]( - cred, host, request_uri, headers, response, content, self - ) + yield AUTH_SCHEME_CLASSES[scheme](cred, host, request_uri, headers, response, content, self) def add_credentials(self, name, password, domain=""): """Add a name and password that will be used any time a request requires authentication.""" self.credentials.add(name, password, domain) - def add_certificate(self, key, cert, domain): + def add_certificate(self, key, cert, domain, password=None): """Add a key and cert that will be used any time a request requires authentication.""" - self.certificates.add(key, cert, domain) + self.certificates.add(key, cert, domain, password) def clear_credentials(self): """Remove all the names and passwords @@ -1534,9 +1352,7 @@ def _conn_request(self, conn, request_uri, method, body, headers): conn.close() raise ServerNotFoundError("Unable to find the server at %s" % conn.host) except socket.error as e: - errno_ = ( - e.args[0].errno if isinstance(e.args[0], socket.error) else e.errno - ) + errno_ = e.args[0].errno if isinstance(e.args[0], socket.error) else e.errno if errno_ in (errno.ENETUNREACH, errno.EADDRNOTAVAIL) and i < RETRIES: continue # retry on potentially transient errors raise @@ -1595,80 +1411,49 @@ def _conn_request(self, conn, request_uri, method, body, headers): return (response, content) def _request( - self, - conn, - host, - absolute_uri, - request_uri, - method, - body, - headers, - redirections, - cachekey, + self, conn, host, absolute_uri, request_uri, method, body, headers, redirections, cachekey, ): """Do the actual request using the connection object and also follow one level of redirects if necessary""" - auths = [ - (auth.depth(request_uri), auth) - for auth in self.authorizations - if auth.inscope(host, request_uri) - ] + auths = [(auth.depth(request_uri), auth) for auth in self.authorizations if auth.inscope(host, request_uri)] auth = auths and sorted(auths)[0][1] or None if auth: auth.request(method, request_uri, headers, body) - (response, content) = self._conn_request( - conn, request_uri, method, body, headers - ) + (response, content) = self._conn_request(conn, request_uri, method, body, headers) if auth: if auth.response(response, body): auth.request(method, request_uri, headers, body) - (response, content) = self._conn_request( - conn, request_uri, method, body, headers - ) + (response, content) = self._conn_request(conn, request_uri, method, body, headers) response._stale_digest = 1 if response.status == 401: - for authorization in self._auth_from_challenge( - host, request_uri, headers, response, content - ): + for authorization in self._auth_from_challenge(host, request_uri, headers, response, content): authorization.request(method, request_uri, headers, body) - (response, content) = self._conn_request( - conn, request_uri, method, body, headers - ) + (response, content) = self._conn_request(conn, request_uri, method, body, headers) if response.status != 401: self.authorizations.append(authorization) authorization.response(response, body) break - if ( - self.follow_all_redirects - or (method in ["GET", "HEAD"]) - or response.status == 303 - ): - if self.follow_redirects and response.status in [300, 301, 302, 303, 307]: + if self.follow_all_redirects or method in self.safe_methods or response.status in (303, 308): + if self.follow_redirects and response.status in self.redirect_codes: # Pick out the location header and basically start from the beginning # remembering first to strip the ETag header and decrement our 'depth' if redirections: if "location" not in response and response.status != 300: raise RedirectMissingLocation( - _( - "Redirected but the response is missing a Location: header." - ), - response, - content, + _("Redirected but the response is missing a Location: header."), response, content, ) # Fix-up relative redirects (which violate an RFC 2616 MUST) if "location" in response: location = response["location"] (scheme, authority, path, query, fragment) = parse_uri(location) if authority == None: - response["location"] = urllib.parse.urljoin( - absolute_uri, location - ) - if response.status == 301 and method in ["GET", "HEAD"]: + response["location"] = urllib.parse.urljoin(absolute_uri, location) + if response.status == 308 or (response.status == 301 and (method in self.safe_methods)): response["-x-permanent-redirect-url"] = response["location"] if "content-location" not in response: response["content-location"] = absolute_uri @@ -1677,10 +1462,7 @@ def _request( del headers["if-none-match"] if "if-modified-since" in headers: del headers["if-modified-since"] - if ( - "authorization" in headers - and not self.forward_authorization_headers - ): + if "authorization" in headers and not self.forward_authorization_headers: del headers["authorization"] if "location" in response: location = response["location"] @@ -1692,20 +1474,14 @@ def _request( redirect_method = "GET" body = None (response, content) = self.request( - location, - method=redirect_method, - body=body, - headers=headers, - redirections=redirections - 1, + location, method=redirect_method, body=body, headers=headers, redirections=redirections - 1, ) response.previous = old_response else: raise RedirectLimit( - "Redirected more times than redirection_limit allows.", - response, - content, + "Redirected more times than redirection_limit allows.", response, content, ) - elif response.status in [200, 203] and method in ["GET", "HEAD"]: + elif response.status in [200, 203] and method in self.safe_methods: # Don't cache 206's since we aren't going to handle byte range requests if "content-location" not in response: response["content-location"] = absolute_uri @@ -1721,13 +1497,7 @@ def _normalize_headers(self, headers): # including all socket.* and httplib.* exceptions. def request( - self, - uri, - method="GET", - body=None, - headers=None, - redirections=DEFAULT_MAX_REDIRECTS, - connection_type=None, + self, uri, method="GET", body=None, headers=None, redirections=DEFAULT_MAX_REDIRECTS, connection_type=None, ): """ Performs a single HTTP request. The 'uri' is the URI of the HTTP resource and can begin @@ -1749,7 +1519,7 @@ def request( being and instance of the 'Response' class, the second being a string that contains the response entity body. """ - conn_key = '' + conn_key = "" try: if headers is None: @@ -1761,6 +1531,9 @@ def request( headers["user-agent"] = "Python-httplib2/%s (gzip)" % __version__ uri = iri2uri(uri) + # Prevent CWE-75 space injection to manipulate request via part of uri. + # Prevent CWE-93 CRLF injection to modify headers via part of uri. + uri = uri.replace(" ", "%20").replace("\r", "%0D").replace("\n", "%0A") (scheme, authority, request_uri, defrag_uri) = urlnorm(uri) @@ -1782,6 +1555,7 @@ def request( disable_ssl_certificate_validation=self.disable_ssl_certificate_validation, tls_maximum_version=self.tls_maximum_version, tls_minimum_version=self.tls_minimum_version, + key_password=certs[0][2], ) else: conn = self.connections[conn_key] = connection_type( @@ -1803,6 +1577,7 @@ def request( headers["accept-encoding"] = "gzip, deflate" info = email.message.Message() + cachekey = None cached_value = None if self.cache: cachekey = defrag_uri @@ -1813,15 +1588,11 @@ def request( info = email.message_from_bytes(info) for k, v in info.items(): if v.startswith("=?") and v.endswith("?="): - info.replace_header( - k, str(*email.header.decode_header(v)[0]) - ) + info.replace_header(k, str(*email.header.decode_header(v)[0])) except (IndexError, ValueError): self.cache.delete(cachekey) cachekey = None cached_value = None - else: - cachekey = None if ( method in self.optimistic_concurrency_methods @@ -1833,13 +1604,15 @@ def request( # http://www.w3.org/1999/04/Editing/ headers["if-match"] = info["etag"] - if method not in ["GET", "HEAD"] and self.cache and cachekey: - # RFC 2616 Section 13.10 + # https://tools.ietf.org/html/rfc7234 + # A cache MUST invalidate the effective Request URI as well as [...] Location and Content-Location + # when a non-error status code is received in response to an unsafe request method. + if self.cache and cachekey and method not in self.safe_methods: self.cache.delete(cachekey) # Check the vary header in the cache to see if this request # matches what varies in the cache. - if method in ["GET", "HEAD"] and "vary" in info: + if method in self.safe_methods and "vary" in info: vary = info["vary"] vary_headers = vary.lower().replace(" ", "").split(",") for header in vary_headers: @@ -1850,22 +1623,23 @@ def request( break if ( - cached_value - and method in ["GET", "HEAD"] - and self.cache + self.cache + and cached_value + and (method in self.safe_methods or info["status"] == "308") and "range" not in headers ): + redirect_method = method + if info["status"] not in ("307", "308"): + redirect_method = "GET" if "-x-permanent-redirect-url" in info: # Should cached permanent redirects be counted in our redirection count? For now, yes. if redirections <= 0: raise RedirectLimit( - "Redirected more times than redirection_limit allows.", - {}, - "", + "Redirected more times than redirection_limit allows.", {}, "", ) (response, new_content) = self.request( info["-x-permanent-redirect-url"], - method="GET", + method=redirect_method, headers=headers, redirections=redirections - 1, ) @@ -1892,11 +1666,7 @@ def request( return (response, content) if entry_disposition == "STALE": - if ( - "etag" in info - and not self.ignore_etag - and not "if-none-match" in headers - ): + if "etag" in info and not self.ignore_etag and not "if-none-match" in headers: headers["if-none-match"] = info["etag"] if "last-modified" in info and not "last-modified" in headers: headers["if-modified-since"] = info["last-modified"] @@ -1904,15 +1674,7 @@ def request( pass (response, new_content) = self._request( - conn, - authority, - uri, - request_uri, - method, - body, - headers, - redirections, - cachekey, + conn, authority, uri, request_uri, method, body, headers, redirections, cachekey, ) if response.status == 304 and method == "GET": @@ -1926,9 +1688,7 @@ def request( merged_response = Response(info) if hasattr(response, "_stale_digest"): merged_response._stale_digest = response._stale_digest - _updateCache( - headers, merged_response, content, self.cache, cachekey - ) + _updateCache(headers, merged_response, content, self.cache, cachekey) response = merged_response response.status = 200 response.fromcache = True @@ -1946,15 +1706,7 @@ def request( content = b"" else: (response, content) = self._request( - conn, - authority, - uri, - request_uri, - method, - body, - headers, - redirections, - cachekey, + conn, authority, uri, request_uri, method, body, headers, redirections, cachekey, ) except Exception as e: is_timeout = isinstance(e, socket.timeout) @@ -1971,23 +1723,11 @@ def request( response.reason = str(e) elif isinstance(e, socket.timeout): content = b"Request Timeout" - response = Response( - { - "content-type": "text/plain", - "status": "408", - "content-length": len(content), - } - ) + response = Response({"content-type": "text/plain", "status": "408", "content-length": len(content),}) response.reason = "Request Timeout" else: content = str(e).encode("utf-8") - response = Response( - { - "content-type": "text/plain", - "status": "400", - "content-length": len(content), - } - ) + response = Response({"content-type": "text/plain", "status": "400", "content-length": len(content),}) response.reason = "Bad Request" else: raise diff --git a/Splunk_TA_paloalto/bin/splunk_ta_paloalto/aob_py3/httplib2/auth.py b/Splunk_TA_paloalto/bin/splunk_ta_paloalto/aob_py3/httplib2/auth.py new file mode 100644 index 00000000..84b58317 --- /dev/null +++ b/Splunk_TA_paloalto/bin/splunk_ta_paloalto/aob_py3/httplib2/auth.py @@ -0,0 +1,63 @@ +import base64 +import re + +import pyparsing as pp + +from .error import * + +UNQUOTE_PAIRS = re.compile(r"\\(.)") +unquote = lambda s, l, t: UNQUOTE_PAIRS.sub(r"\1", t[0][1:-1]) + +# https://tools.ietf.org/html/rfc7235#section-1.2 +# https://tools.ietf.org/html/rfc7235#appendix-B +tchar = "!#$%&'*+-.^_`|~" + pp.nums + pp.alphas +token = pp.Word(tchar).setName("token") +token68 = pp.Combine(pp.Word("-._~+/" + pp.nums + pp.alphas) + pp.Optional(pp.Word("=").leaveWhitespace())).setName( + "token68" +) + +quoted_string = pp.dblQuotedString.copy().setName("quoted-string").setParseAction(unquote) +auth_param_name = token.copy().setName("auth-param-name").addParseAction(pp.downcaseTokens) +auth_param = auth_param_name + pp.Suppress("=") + (quoted_string | token) +params = pp.Dict(pp.delimitedList(pp.Group(auth_param))) + +scheme = token("scheme") +challenge = scheme + (params("params") | token68("token")) + +authentication_info = params.copy() +www_authenticate = pp.delimitedList(pp.Group(challenge)) + + +def _parse_authentication_info(headers, headername="authentication-info"): + """https://tools.ietf.org/html/rfc7615 + """ + header = headers.get(headername, "").strip() + if not header: + return {} + try: + parsed = authentication_info.parseString(header) + except pp.ParseException as ex: + # print(ex.explain(ex)) + raise MalformedHeader(headername) + + return parsed.asDict() + + +def _parse_www_authenticate(headers, headername="www-authenticate"): + """Returns a dictionary of dictionaries, one dict per auth_scheme.""" + header = headers.get(headername, "").strip() + if not header: + return {} + try: + parsed = www_authenticate.parseString(header) + except pp.ParseException as ex: + # print(ex.explain(ex)) + raise MalformedHeader(headername) + + retval = { + challenge["scheme"].lower(): challenge["params"].asDict() + if "params" in challenge + else {"token": challenge.get("token")} + for challenge in parsed + } + return retval diff --git a/Splunk_TA_paloalto/bin/splunk_ta_paloalto/aob_py3/httplib2/certs.py b/Splunk_TA_paloalto/bin/splunk_ta_paloalto/aob_py3/httplib2/certs.py old mode 100755 new mode 100644 diff --git a/Splunk_TA_paloalto/bin/splunk_ta_paloalto/aob_py3/httplib2/error.py b/Splunk_TA_paloalto/bin/splunk_ta_paloalto/aob_py3/httplib2/error.py new file mode 100644 index 00000000..0e68c12a --- /dev/null +++ b/Splunk_TA_paloalto/bin/splunk_ta_paloalto/aob_py3/httplib2/error.py @@ -0,0 +1,48 @@ +# All exceptions raised here derive from HttpLib2Error +class HttpLib2Error(Exception): + pass + + +# Some exceptions can be caught and optionally +# be turned back into responses. +class HttpLib2ErrorWithResponse(HttpLib2Error): + def __init__(self, desc, response, content): + self.response = response + self.content = content + HttpLib2Error.__init__(self, desc) + + +class RedirectMissingLocation(HttpLib2ErrorWithResponse): + pass + + +class RedirectLimit(HttpLib2ErrorWithResponse): + pass + + +class FailedToDecompressContent(HttpLib2ErrorWithResponse): + pass + + +class UnimplementedDigestAuthOptionError(HttpLib2ErrorWithResponse): + pass + + +class UnimplementedHmacDigestAuthOptionError(HttpLib2ErrorWithResponse): + pass + + +class MalformedHeader(HttpLib2Error): + pass + + +class RelativeURIError(HttpLib2Error): + pass + + +class ServerNotFoundError(HttpLib2Error): + pass + + +class ProxiesUnavailableError(HttpLib2Error): + pass diff --git a/Splunk_TA_paloalto/bin/splunk_ta_paloalto/aob_py3/httplib2/iri2uri.py b/Splunk_TA_paloalto/bin/splunk_ta_paloalto/aob_py3/httplib2/iri2uri.py old mode 100755 new mode 100644 diff --git a/Splunk_TA_paloalto/bin/splunk_ta_paloalto/aob_py3/httplib2/socks.py b/Splunk_TA_paloalto/bin/splunk_ta_paloalto/aob_py3/httplib2/socks.py old mode 100755 new mode 100644 index 2926b4e5..cc68e634 --- a/Splunk_TA_paloalto/bin/splunk_ta_paloalto/aob_py3/httplib2/socks.py +++ b/Splunk_TA_paloalto/bin/splunk_ta_paloalto/aob_py3/httplib2/socks.py @@ -238,7 +238,15 @@ def setproxy( headers - Additional or modified headers for the proxy connect request. """ - self.__proxy = (proxytype, addr, port, rdns, username, password, headers) + self.__proxy = ( + proxytype, + addr, + port, + rdns, + username.encode() if username else None, + password.encode() if password else None, + headers, + ) def __negotiatesocks5(self, destaddr, destport): """__negotiatesocks5(self,destaddr,destport) diff --git a/Splunk_TA_paloalto/bin/splunk_ta_paloalto/aob_py3/idna-3.3.dist-info/INSTALLER b/Splunk_TA_paloalto/bin/splunk_ta_paloalto/aob_py3/idna-3.3.dist-info/INSTALLER new file mode 100644 index 00000000..a1b589e3 --- /dev/null +++ b/Splunk_TA_paloalto/bin/splunk_ta_paloalto/aob_py3/idna-3.3.dist-info/INSTALLER @@ -0,0 +1 @@ +pip diff --git a/Splunk_TA_paloalto/bin/splunk_ta_paloalto/aob_py3/idna-3.3.dist-info/LICENSE.md b/Splunk_TA_paloalto/bin/splunk_ta_paloalto/aob_py3/idna-3.3.dist-info/LICENSE.md new file mode 100644 index 00000000..b6f87326 --- /dev/null +++ b/Splunk_TA_paloalto/bin/splunk_ta_paloalto/aob_py3/idna-3.3.dist-info/LICENSE.md @@ -0,0 +1,29 @@ +BSD 3-Clause License + +Copyright (c) 2013-2021, Kim Davies +All rights reserved. + +Redistribution and use in source and binary forms, with or without +modification, are permitted provided that the following conditions are met: + +1. Redistributions of source code must retain the above copyright notice, this + list of conditions and the following disclaimer. + +2. Redistributions in binary form must reproduce the above copyright notice, + this list of conditions and the following disclaimer in the documentation + and/or other materials provided with the distribution. + +3. Neither the name of the copyright holder nor the names of its + contributors may be used to endorse or promote products derived from + this software without specific prior written permission. + +THIS SOFTWARE IS PROVIDED BY THE COPYRIGHT HOLDERS AND CONTRIBUTORS "AS IS" +AND ANY EXPRESS OR IMPLIED WARRANTIES, INCLUDING, BUT NOT LIMITED TO, THE +IMPLIED WARRANTIES OF MERCHANTABILITY AND FITNESS FOR A PARTICULAR PURPOSE ARE +DISCLAIMED. IN NO EVENT SHALL THE COPYRIGHT HOLDER OR CONTRIBUTORS BE LIABLE +FOR ANY DIRECT, INDIRECT, INCIDENTAL, SPECIAL, EXEMPLARY, OR CONSEQUENTIAL +DAMAGES (INCLUDING, BUT NOT LIMITED TO, PROCUREMENT OF SUBSTITUTE GOODS OR +SERVICES; LOSS OF USE, DATA, OR PROFITS; OR BUSINESS INTERRUPTION) HOWEVER +CAUSED AND ON ANY THEORY OF LIABILITY, WHETHER IN CONTRACT, STRICT LIABILITY, +OR TORT (INCLUDING NEGLIGENCE OR OTHERWISE) ARISING IN ANY WAY OUT OF THE USE +OF THIS SOFTWARE, EVEN IF ADVISED OF THE POSSIBILITY OF SUCH DAMAGE. diff --git a/Splunk_TA_paloalto/bin/splunk_ta_paloalto/aob_py3/idna-3.3.dist-info/METADATA b/Splunk_TA_paloalto/bin/splunk_ta_paloalto/aob_py3/idna-3.3.dist-info/METADATA new file mode 100644 index 00000000..6446805d --- /dev/null +++ b/Splunk_TA_paloalto/bin/splunk_ta_paloalto/aob_py3/idna-3.3.dist-info/METADATA @@ -0,0 +1,236 @@ +Metadata-Version: 2.1 +Name: idna +Version: 3.3 +Summary: Internationalized Domain Names in Applications (IDNA) +Home-page: https://github.com/kjd/idna +Author: Kim Davies +Author-email: kim@cynosure.com.au +License: BSD-3-Clause +Platform: UNKNOWN +Classifier: Development Status :: 5 - Production/Stable +Classifier: Intended Audience :: Developers +Classifier: Intended Audience :: System Administrators +Classifier: License :: OSI Approved :: BSD License +Classifier: Operating System :: OS Independent +Classifier: Programming Language :: Python +Classifier: Programming Language :: Python :: 3 +Classifier: Programming Language :: Python :: 3 :: Only +Classifier: Programming Language :: Python :: 3.5 +Classifier: Programming Language :: Python :: 3.6 +Classifier: Programming Language :: Python :: 3.7 +Classifier: Programming Language :: Python :: 3.8 +Classifier: Programming Language :: Python :: 3.9 +Classifier: Programming Language :: Python :: 3.10 +Classifier: Programming Language :: Python :: Implementation :: CPython +Classifier: Programming Language :: Python :: Implementation :: PyPy +Classifier: Topic :: Internet :: Name Service (DNS) +Classifier: Topic :: Software Development :: Libraries :: Python Modules +Classifier: Topic :: Utilities +Requires-Python: >=3.5 +License-File: LICENSE.md + +Internationalized Domain Names in Applications (IDNA) +===================================================== + +Support for the Internationalised Domain Names in Applications +(IDNA) protocol as specified in `RFC 5891 `_. +This is the latest version of the protocol and is sometimes referred to as +“IDNA 2008”. + +This library also provides support for Unicode Technical Standard 46, +`Unicode IDNA Compatibility Processing `_. + +This acts as a suitable replacement for the “encodings.idna” module that +comes with the Python standard library, but which only supports the +older superseded IDNA specification (`RFC 3490 `_). + +Basic functions are simply executed: + +.. code-block:: pycon + + >>> import idna + >>> idna.encode('ドメイン.テスト') + b'xn--eckwd4c7c.xn--zckzah' + >>> print(idna.decode('xn--eckwd4c7c.xn--zckzah')) + ドメイン.テスト + + +Installation +------------ + +To install this library, you can use pip: + +.. code-block:: bash + + $ pip install idna + +Alternatively, you can install the package using the bundled setup script: + +.. code-block:: bash + + $ python setup.py install + + +Usage +----- + +For typical usage, the ``encode`` and ``decode`` functions will take a domain +name argument and perform a conversion to A-labels or U-labels respectively. + +.. code-block:: pycon + + >>> import idna + >>> idna.encode('ドメイン.テスト') + b'xn--eckwd4c7c.xn--zckzah' + >>> print(idna.decode('xn--eckwd4c7c.xn--zckzah')) + ドメイン.テスト + +You may use the codec encoding and decoding methods using the +``idna.codec`` module: + +.. code-block:: pycon + + >>> import idna.codec + >>> print('домен.испытание'.encode('idna')) + b'xn--d1acufc.xn--80akhbyknj4f' + >>> print(b'xn--d1acufc.xn--80akhbyknj4f'.decode('idna')) + домен.испытание + +Conversions can be applied at a per-label basis using the ``ulabel`` or ``alabel`` +functions if necessary: + +.. code-block:: pycon + + >>> idna.alabel('测试') + b'xn--0zwm56d' + +Compatibility Mapping (UTS #46) ++++++++++++++++++++++++++++++++ + +As described in `RFC 5895 `_, the IDNA +specification does not normalize input from different potential ways a user +may input a domain name. This functionality, known as a “mapping”, is +considered by the specification to be a local user-interface issue distinct +from IDNA conversion functionality. + +This library provides one such mapping, that was developed by the Unicode +Consortium. Known as `Unicode IDNA Compatibility Processing `_, +it provides for both a regular mapping for typical applications, as well as +a transitional mapping to help migrate from older IDNA 2003 applications. + +For example, “Königsgäßchen” is not a permissible label as *LATIN CAPITAL +LETTER K* is not allowed (nor are capital letters in general). UTS 46 will +convert this into lower case prior to applying the IDNA conversion. + +.. code-block:: pycon + + >>> import idna + >>> idna.encode('Königsgäßchen') + ... + idna.core.InvalidCodepoint: Codepoint U+004B at position 1 of 'Königsgäßchen' not allowed + >>> idna.encode('Königsgäßchen', uts46=True) + b'xn--knigsgchen-b4a3dun' + >>> print(idna.decode('xn--knigsgchen-b4a3dun')) + königsgäßchen + +Transitional processing provides conversions to help transition from the older +2003 standard to the current standard. For example, in the original IDNA +specification, the *LATIN SMALL LETTER SHARP S* (ß) was converted into two +*LATIN SMALL LETTER S* (ss), whereas in the current IDNA specification this +conversion is not performed. + +.. code-block:: pycon + + >>> idna.encode('Königsgäßchen', uts46=True, transitional=True) + 'xn--knigsgsschen-lcb0w' + +Implementors should use transitional processing with caution, only in rare +cases where conversion from legacy labels to current labels must be performed +(i.e. IDNA implementations that pre-date 2008). For typical applications +that just need to convert labels, transitional processing is unlikely to be +beneficial and could produce unexpected incompatible results. + +``encodings.idna`` Compatibility +++++++++++++++++++++++++++++++++ + +Function calls from the Python built-in ``encodings.idna`` module are +mapped to their IDNA 2008 equivalents using the ``idna.compat`` module. +Simply substitute the ``import`` clause in your code to refer to the +new module name. + +Exceptions +---------- + +All errors raised during the conversion following the specification should +raise an exception derived from the ``idna.IDNAError`` base class. + +More specific exceptions that may be generated as ``idna.IDNABidiError`` +when the error reflects an illegal combination of left-to-right and +right-to-left characters in a label; ``idna.InvalidCodepoint`` when +a specific codepoint is an illegal character in an IDN label (i.e. +INVALID); and ``idna.InvalidCodepointContext`` when the codepoint is +illegal based on its positional context (i.e. it is CONTEXTO or CONTEXTJ +but the contextual requirements are not satisfied.) + +Building and Diagnostics +------------------------ + +The IDNA and UTS 46 functionality relies upon pre-calculated lookup +tables for performance. These tables are derived from computing against +eligibility criteria in the respective standards. These tables are +computed using the command-line script ``tools/idna-data``. + +This tool will fetch relevant codepoint data from the Unicode repository +and perform the required calculations to identify eligibility. There are +three main modes: + +* ``idna-data make-libdata``. Generates ``idnadata.py`` and ``uts46data.py``, + the pre-calculated lookup tables using for IDNA and UTS 46 conversions. Implementors + who wish to track this library against a different Unicode version may use this tool + to manually generate a different version of the ``idnadata.py`` and ``uts46data.py`` + files. + +* ``idna-data make-table``. Generate a table of the IDNA disposition + (e.g. PVALID, CONTEXTJ, CONTEXTO) in the format found in Appendix B.1 of RFC + 5892 and the pre-computed tables published by `IANA `_. + +* ``idna-data U+0061``. Prints debugging output on the various properties + associated with an individual Unicode codepoint (in this case, U+0061), that are + used to assess the IDNA and UTS 46 status of a codepoint. This is helpful in debugging + or analysis. + +The tool accepts a number of arguments, described using ``idna-data -h``. Most notably, +the ``--version`` argument allows the specification of the version of Unicode to use +in computing the table data. For example, ``idna-data --version 9.0.0 make-libdata`` +will generate library data against Unicode 9.0.0. + + +Additional Notes +---------------- + +* **Packages**. The latest tagged release version is published in the + `Python Package Index `_. + +* **Version support**. This library supports Python 3.5 and higher. As this library + serves as a low-level toolkit for a variety of applications, many of which strive + for broad compatibility with older Python versions, there is no rush to remove + older intepreter support. Removing support for older versions should be well + justified in that the maintenance burden has become too high. + +* **Python 2**. Python 2 is supported by version 2.x of this library. While active + development of the version 2.x series has ended, notable issues being corrected + may be backported to 2.x. Use "idna<3" in your requirements file if you need this + library for a Python 2 application. + +* **Testing**. The library has a test suite based on each rule of the IDNA specification, as + well as tests that are provided as part of the Unicode Technical Standard 46, + `Unicode IDNA Compatibility Processing `_. + +* **Emoji**. It is an occasional request to support emoji domains in this library. Encoding + of symbols like emoji is expressly prohibited by the technical standard IDNA 2008 and + emoji domains are broadly phased out across the domain industry due to associated security + risks. For now, applications that wish need to support these non-compliant labels may + wish to consider trying the encode/decode operation in this library first, and then falling + back to using `encodings.idna`. See `the Github project `_ + for more discussion. + diff --git a/Splunk_TA_paloalto/bin/splunk_ta_paloalto/aob_py3/idna-3.3.dist-info/RECORD b/Splunk_TA_paloalto/bin/splunk_ta_paloalto/aob_py3/idna-3.3.dist-info/RECORD new file mode 100644 index 00000000..79e2d8e4 --- /dev/null +++ b/Splunk_TA_paloalto/bin/splunk_ta_paloalto/aob_py3/idna-3.3.dist-info/RECORD @@ -0,0 +1,15 @@ +idna-3.3.dist-info/INSTALLER,sha256=zuuue4knoyJ-UwPPXg8fezS7VCrXJQrAP7zeNuwvFQg,4 +idna-3.3.dist-info/LICENSE.md,sha256=otbk2UC9JNvnuWRc3hmpeSzFHbeuDVrNMBrIYMqj6DY,1523 +idna-3.3.dist-info/METADATA,sha256=BdqiAf8ou4x1nzIHp2_sDfXWjl7BrSUGpOeVzbYHQuQ,9765 +idna-3.3.dist-info/RECORD,, +idna-3.3.dist-info/WHEEL,sha256=ewwEueio1C2XeHTvT17n8dZUJgOvyCWCt0WVNLClP9o,92 +idna-3.3.dist-info/top_level.txt,sha256=jSag9sEDqvSPftxOQy-ABfGV_RSy7oFh4zZJpODV8k0,5 +idna/__init__.py,sha256=KJQN1eQBr8iIK5SKrJ47lXvxG0BJ7Lm38W4zT0v_8lk,849 +idna/codec.py,sha256=6ly5odKfqrytKT9_7UrlGklHnf1DSK2r9C6cSM4sa28,3374 +idna/compat.py,sha256=0_sOEUMT4CVw9doD3vyRhX80X19PwqFoUBs7gWsFME4,321 +idna/core.py,sha256=RFIkY-HhFZaDoBEFjGwyGd_vWI04uOAQjnzueMWqwOU,12795 +idna/idnadata.py,sha256=fzMzkCea2xieVxcrjngJ-2pLsKQNejPCZFlBajIuQdw,44025 +idna/intranges.py,sha256=YBr4fRYuWH7kTKS2tXlFjM24ZF1Pdvcir-aywniInqg,1881 +idna/package_data.py,sha256=szxQhV0ZD0nKJ84Kuobw3l8q4_KeCyXjFRdpwIpKZmw,21 +idna/py.typed,sha256=47DEQpj8HBSa-_TImW-5JCeuQeRkm5NMpJWZG3hSuFU,0 +idna/uts46data.py,sha256=o-D7V-a0fOLZNd7tvxof6MYfUd0TBZzE2bLR5XO67xU,204400 diff --git a/Splunk_TA_paloalto/bin/splunk_ta_paloalto/aob_py3/idna-3.3.dist-info/WHEEL b/Splunk_TA_paloalto/bin/splunk_ta_paloalto/aob_py3/idna-3.3.dist-info/WHEEL new file mode 100644 index 00000000..5bad85fd --- /dev/null +++ b/Splunk_TA_paloalto/bin/splunk_ta_paloalto/aob_py3/idna-3.3.dist-info/WHEEL @@ -0,0 +1,5 @@ +Wheel-Version: 1.0 +Generator: bdist_wheel (0.37.0) +Root-Is-Purelib: true +Tag: py3-none-any + diff --git a/Splunk_TA_paloalto/bin/splunk_ta_paloalto/aob_py3/idna-3.3.dist-info/top_level.txt b/Splunk_TA_paloalto/bin/splunk_ta_paloalto/aob_py3/idna-3.3.dist-info/top_level.txt new file mode 100644 index 00000000..c40472e6 --- /dev/null +++ b/Splunk_TA_paloalto/bin/splunk_ta_paloalto/aob_py3/idna-3.3.dist-info/top_level.txt @@ -0,0 +1 @@ +idna diff --git a/Splunk_TA_paloalto/bin/splunk_ta_paloalto/aob_py3/idna-3.4.dist-info/INSTALLER b/Splunk_TA_paloalto/bin/splunk_ta_paloalto/aob_py3/idna-3.4.dist-info/INSTALLER new file mode 100644 index 00000000..a1b589e3 --- /dev/null +++ b/Splunk_TA_paloalto/bin/splunk_ta_paloalto/aob_py3/idna-3.4.dist-info/INSTALLER @@ -0,0 +1 @@ +pip diff --git a/Splunk_TA_paloalto/bin/splunk_ta_paloalto/aob_py3/idna-3.4.dist-info/LICENSE.md b/Splunk_TA_paloalto/bin/splunk_ta_paloalto/aob_py3/idna-3.4.dist-info/LICENSE.md new file mode 100644 index 00000000..b6f87326 --- /dev/null +++ b/Splunk_TA_paloalto/bin/splunk_ta_paloalto/aob_py3/idna-3.4.dist-info/LICENSE.md @@ -0,0 +1,29 @@ +BSD 3-Clause License + +Copyright (c) 2013-2021, Kim Davies +All rights reserved. + +Redistribution and use in source and binary forms, with or without +modification, are permitted provided that the following conditions are met: + +1. Redistributions of source code must retain the above copyright notice, this + list of conditions and the following disclaimer. + +2. Redistributions in binary form must reproduce the above copyright notice, + this list of conditions and the following disclaimer in the documentation + and/or other materials provided with the distribution. + +3. Neither the name of the copyright holder nor the names of its + contributors may be used to endorse or promote products derived from + this software without specific prior written permission. + +THIS SOFTWARE IS PROVIDED BY THE COPYRIGHT HOLDERS AND CONTRIBUTORS "AS IS" +AND ANY EXPRESS OR IMPLIED WARRANTIES, INCLUDING, BUT NOT LIMITED TO, THE +IMPLIED WARRANTIES OF MERCHANTABILITY AND FITNESS FOR A PARTICULAR PURPOSE ARE +DISCLAIMED. IN NO EVENT SHALL THE COPYRIGHT HOLDER OR CONTRIBUTORS BE LIABLE +FOR ANY DIRECT, INDIRECT, INCIDENTAL, SPECIAL, EXEMPLARY, OR CONSEQUENTIAL +DAMAGES (INCLUDING, BUT NOT LIMITED TO, PROCUREMENT OF SUBSTITUTE GOODS OR +SERVICES; LOSS OF USE, DATA, OR PROFITS; OR BUSINESS INTERRUPTION) HOWEVER +CAUSED AND ON ANY THEORY OF LIABILITY, WHETHER IN CONTRACT, STRICT LIABILITY, +OR TORT (INCLUDING NEGLIGENCE OR OTHERWISE) ARISING IN ANY WAY OUT OF THE USE +OF THIS SOFTWARE, EVEN IF ADVISED OF THE POSSIBILITY OF SUCH DAMAGE. diff --git a/Splunk_TA_paloalto/bin/splunk_ta_paloalto/aob_py3/idna-3.4.dist-info/METADATA b/Splunk_TA_paloalto/bin/splunk_ta_paloalto/aob_py3/idna-3.4.dist-info/METADATA new file mode 100644 index 00000000..07f6193b --- /dev/null +++ b/Splunk_TA_paloalto/bin/splunk_ta_paloalto/aob_py3/idna-3.4.dist-info/METADATA @@ -0,0 +1,242 @@ +Metadata-Version: 2.1 +Name: idna +Version: 3.4 +Summary: Internationalized Domain Names in Applications (IDNA) +Author-email: Kim Davies +Requires-Python: >=3.5 +Description-Content-Type: text/x-rst +Classifier: Development Status :: 5 - Production/Stable +Classifier: Intended Audience :: Developers +Classifier: Intended Audience :: System Administrators +Classifier: License :: OSI Approved :: BSD License +Classifier: Operating System :: OS Independent +Classifier: Programming Language :: Python +Classifier: Programming Language :: Python :: 3 +Classifier: Programming Language :: Python :: 3 :: Only +Classifier: Programming Language :: Python :: 3.5 +Classifier: Programming Language :: Python :: 3.6 +Classifier: Programming Language :: Python :: 3.7 +Classifier: Programming Language :: Python :: 3.8 +Classifier: Programming Language :: Python :: 3.9 +Classifier: Programming Language :: Python :: 3.10 +Classifier: Programming Language :: Python :: 3.11 +Classifier: Programming Language :: Python :: Implementation :: CPython +Classifier: Programming Language :: Python :: Implementation :: PyPy +Classifier: Topic :: Internet :: Name Service (DNS) +Classifier: Topic :: Software Development :: Libraries :: Python Modules +Classifier: Topic :: Utilities +Project-URL: Changelog, https://github.com/kjd/idna/blob/master/HISTORY.rst +Project-URL: Issue tracker, https://github.com/kjd/idna/issues +Project-URL: Source, https://github.com/kjd/idna + +Internationalized Domain Names in Applications (IDNA) +===================================================== + +Support for the Internationalized Domain Names in +Applications (IDNA) protocol as specified in `RFC 5891 +`_. This is the latest version of +the protocol and is sometimes referred to as “IDNA 2008”. + +This library also provides support for Unicode Technical +Standard 46, `Unicode IDNA Compatibility Processing +`_. + +This acts as a suitable replacement for the “encodings.idna” +module that comes with the Python standard library, but which +only supports the older superseded IDNA specification (`RFC 3490 +`_). + +Basic functions are simply executed: + +.. code-block:: pycon + + >>> import idna + >>> idna.encode('ドメイン.テスト') + b'xn--eckwd4c7c.xn--zckzah' + >>> print(idna.decode('xn--eckwd4c7c.xn--zckzah')) + ドメイン.テスト + + +Installation +------------ + +This package is available for installation from PyPI: + +.. code-block:: bash + + $ python3 -m pip install idna + + +Usage +----- + +For typical usage, the ``encode`` and ``decode`` functions will take a +domain name argument and perform a conversion to A-labels or U-labels +respectively. + +.. code-block:: pycon + + >>> import idna + >>> idna.encode('ドメイン.テスト') + b'xn--eckwd4c7c.xn--zckzah' + >>> print(idna.decode('xn--eckwd4c7c.xn--zckzah')) + ドメイン.テスト + +You may use the codec encoding and decoding methods using the +``idna.codec`` module: + +.. code-block:: pycon + + >>> import idna.codec + >>> print('домен.испытание'.encode('idna')) + b'xn--d1acufc.xn--80akhbyknj4f' + >>> print(b'xn--d1acufc.xn--80akhbyknj4f'.decode('idna')) + домен.испытание + +Conversions can be applied at a per-label basis using the ``ulabel`` or +``alabel`` functions if necessary: + +.. code-block:: pycon + + >>> idna.alabel('测试') + b'xn--0zwm56d' + +Compatibility Mapping (UTS #46) ++++++++++++++++++++++++++++++++ + +As described in `RFC 5895 `_, the +IDNA specification does not normalize input from different potential +ways a user may input a domain name. This functionality, known as +a “mapping”, is considered by the specification to be a local +user-interface issue distinct from IDNA conversion functionality. + +This library provides one such mapping, that was developed by the +Unicode Consortium. Known as `Unicode IDNA Compatibility Processing +`_, it provides for both a regular +mapping for typical applications, as well as a transitional mapping to +help migrate from older IDNA 2003 applications. + +For example, “Königsgäßchen” is not a permissible label as *LATIN +CAPITAL LETTER K* is not allowed (nor are capital letters in general). +UTS 46 will convert this into lower case prior to applying the IDNA +conversion. + +.. code-block:: pycon + + >>> import idna + >>> idna.encode('Königsgäßchen') + ... + idna.core.InvalidCodepoint: Codepoint U+004B at position 1 of 'Königsgäßchen' not allowed + >>> idna.encode('Königsgäßchen', uts46=True) + b'xn--knigsgchen-b4a3dun' + >>> print(idna.decode('xn--knigsgchen-b4a3dun')) + königsgäßchen + +Transitional processing provides conversions to help transition from +the older 2003 standard to the current standard. For example, in the +original IDNA specification, the *LATIN SMALL LETTER SHARP S* (ß) was +converted into two *LATIN SMALL LETTER S* (ss), whereas in the current +IDNA specification this conversion is not performed. + +.. code-block:: pycon + + >>> idna.encode('Königsgäßchen', uts46=True, transitional=True) + 'xn--knigsgsschen-lcb0w' + +Implementors should use transitional processing with caution, only in +rare cases where conversion from legacy labels to current labels must be +performed (i.e. IDNA implementations that pre-date 2008). For typical +applications that just need to convert labels, transitional processing +is unlikely to be beneficial and could produce unexpected incompatible +results. + +``encodings.idna`` Compatibility +++++++++++++++++++++++++++++++++ + +Function calls from the Python built-in ``encodings.idna`` module are +mapped to their IDNA 2008 equivalents using the ``idna.compat`` module. +Simply substitute the ``import`` clause in your code to refer to the new +module name. + +Exceptions +---------- + +All errors raised during the conversion following the specification +should raise an exception derived from the ``idna.IDNAError`` base +class. + +More specific exceptions that may be generated as ``idna.IDNABidiError`` +when the error reflects an illegal combination of left-to-right and +right-to-left characters in a label; ``idna.InvalidCodepoint`` when +a specific codepoint is an illegal character in an IDN label (i.e. +INVALID); and ``idna.InvalidCodepointContext`` when the codepoint is +illegal based on its positional context (i.e. it is CONTEXTO or CONTEXTJ +but the contextual requirements are not satisfied.) + +Building and Diagnostics +------------------------ + +The IDNA and UTS 46 functionality relies upon pre-calculated lookup +tables for performance. These tables are derived from computing against +eligibility criteria in the respective standards. These tables are +computed using the command-line script ``tools/idna-data``. + +This tool will fetch relevant codepoint data from the Unicode repository +and perform the required calculations to identify eligibility. There are +three main modes: + +* ``idna-data make-libdata``. Generates ``idnadata.py`` and + ``uts46data.py``, the pre-calculated lookup tables using for IDNA and + UTS 46 conversions. Implementors who wish to track this library against + a different Unicode version may use this tool to manually generate a + different version of the ``idnadata.py`` and ``uts46data.py`` files. + +* ``idna-data make-table``. Generate a table of the IDNA disposition + (e.g. PVALID, CONTEXTJ, CONTEXTO) in the format found in Appendix + B.1 of RFC 5892 and the pre-computed tables published by `IANA + `_. + +* ``idna-data U+0061``. Prints debugging output on the various + properties associated with an individual Unicode codepoint (in this + case, U+0061), that are used to assess the IDNA and UTS 46 status of a + codepoint. This is helpful in debugging or analysis. + +The tool accepts a number of arguments, described using ``idna-data +-h``. Most notably, the ``--version`` argument allows the specification +of the version of Unicode to use in computing the table data. For +example, ``idna-data --version 9.0.0 make-libdata`` will generate +library data against Unicode 9.0.0. + + +Additional Notes +---------------- + +* **Packages**. The latest tagged release version is published in the + `Python Package Index `_. + +* **Version support**. This library supports Python 3.5 and higher. + As this library serves as a low-level toolkit for a variety of + applications, many of which strive for broad compatibility with older + Python versions, there is no rush to remove older intepreter support. + Removing support for older versions should be well justified in that the + maintenance burden has become too high. + +* **Python 2**. Python 2 is supported by version 2.x of this library. + While active development of the version 2.x series has ended, notable + issues being corrected may be backported to 2.x. Use "idna<3" in your + requirements file if you need this library for a Python 2 application. + +* **Testing**. The library has a test suite based on each rule of the + IDNA specification, as well as tests that are provided as part of the + Unicode Technical Standard 46, `Unicode IDNA Compatibility Processing + `_. + +* **Emoji**. It is an occasional request to support emoji domains in + this library. Encoding of symbols like emoji is expressly prohibited by + the technical standard IDNA 2008 and emoji domains are broadly phased + out across the domain industry due to associated security risks. For + now, applications that wish need to support these non-compliant labels + may wish to consider trying the encode/decode operation in this library + first, and then falling back to using `encodings.idna`. See `the Github + project `_ for more discussion. + diff --git a/Splunk_TA_paloalto/bin/splunk_ta_paloalto/aob_py3/idna-3.4.dist-info/RECORD b/Splunk_TA_paloalto/bin/splunk_ta_paloalto/aob_py3/idna-3.4.dist-info/RECORD new file mode 100644 index 00000000..ba63a2fe --- /dev/null +++ b/Splunk_TA_paloalto/bin/splunk_ta_paloalto/aob_py3/idna-3.4.dist-info/RECORD @@ -0,0 +1,14 @@ +idna-3.4.dist-info/INSTALLER,sha256=zuuue4knoyJ-UwPPXg8fezS7VCrXJQrAP7zeNuwvFQg,4 +idna-3.4.dist-info/LICENSE.md,sha256=otbk2UC9JNvnuWRc3hmpeSzFHbeuDVrNMBrIYMqj6DY,1523 +idna-3.4.dist-info/METADATA,sha256=8aLSf9MFS7oB26pZh2hprg7eJp0UJSc-3rpf_evp4DA,9830 +idna-3.4.dist-info/RECORD,, +idna-3.4.dist-info/WHEEL,sha256=4TfKIB_xu-04bc2iKz6_zFt-gEFEEDU_31HGhqzOCE8,81 +idna/__init__.py,sha256=KJQN1eQBr8iIK5SKrJ47lXvxG0BJ7Lm38W4zT0v_8lk,849 +idna/codec.py,sha256=6ly5odKfqrytKT9_7UrlGklHnf1DSK2r9C6cSM4sa28,3374 +idna/compat.py,sha256=0_sOEUMT4CVw9doD3vyRhX80X19PwqFoUBs7gWsFME4,321 +idna/core.py,sha256=1JxchwKzkxBSn7R_oCE12oBu3eVux0VzdxolmIad24M,12950 +idna/idnadata.py,sha256=xUjqKqiJV8Ho_XzBpAtv5JFoVPSupK-SUXvtjygUHqw,44375 +idna/intranges.py,sha256=YBr4fRYuWH7kTKS2tXlFjM24ZF1Pdvcir-aywniInqg,1881 +idna/package_data.py,sha256=C_jHJzmX8PI4xq0jpzmcTMxpb5lDsq4o5VyxQzlVrZE,21 +idna/py.typed,sha256=47DEQpj8HBSa-_TImW-5JCeuQeRkm5NMpJWZG3hSuFU,0 +idna/uts46data.py,sha256=zvjZU24s58_uAS850Mcd0NnD0X7_gCMAMjzWNIeUJdc,206539 diff --git a/Splunk_TA_paloalto/bin/splunk_ta_paloalto/aob_py3/idna-3.4.dist-info/WHEEL b/Splunk_TA_paloalto/bin/splunk_ta_paloalto/aob_py3/idna-3.4.dist-info/WHEEL new file mode 100644 index 00000000..668ba4d0 --- /dev/null +++ b/Splunk_TA_paloalto/bin/splunk_ta_paloalto/aob_py3/idna-3.4.dist-info/WHEEL @@ -0,0 +1,4 @@ +Wheel-Version: 1.0 +Generator: flit 3.7.1 +Root-Is-Purelib: true +Tag: py3-none-any diff --git a/Splunk_TA_paloalto/bin/splunk_ta_paloalto/aob_py3/idna/__init__.py b/Splunk_TA_paloalto/bin/splunk_ta_paloalto/aob_py3/idna/__init__.py old mode 100755 new mode 100644 index 847bf935..a40eeafc --- a/Splunk_TA_paloalto/bin/splunk_ta_paloalto/aob_py3/idna/__init__.py +++ b/Splunk_TA_paloalto/bin/splunk_ta_paloalto/aob_py3/idna/__init__.py @@ -1,2 +1,44 @@ from .package_data import __version__ -from .core import * +from .core import ( + IDNABidiError, + IDNAError, + InvalidCodepoint, + InvalidCodepointContext, + alabel, + check_bidi, + check_hyphen_ok, + check_initial_combiner, + check_label, + check_nfc, + decode, + encode, + ulabel, + uts46_remap, + valid_contextj, + valid_contexto, + valid_label_length, + valid_string_length, +) +from .intranges import intranges_contain + +__all__ = [ + "IDNABidiError", + "IDNAError", + "InvalidCodepoint", + "InvalidCodepointContext", + "alabel", + "check_bidi", + "check_hyphen_ok", + "check_initial_combiner", + "check_label", + "check_nfc", + "decode", + "encode", + "intranges_contain", + "ulabel", + "uts46_remap", + "valid_contextj", + "valid_contexto", + "valid_label_length", + "valid_string_length", +] diff --git a/Splunk_TA_paloalto/bin/splunk_ta_paloalto/aob_py3/idna/codec.py b/Splunk_TA_paloalto/bin/splunk_ta_paloalto/aob_py3/idna/codec.py old mode 100755 new mode 100644 index 98c65ead..1ca9ba62 --- a/Splunk_TA_paloalto/bin/splunk_ta_paloalto/aob_py3/idna/codec.py +++ b/Splunk_TA_paloalto/bin/splunk_ta_paloalto/aob_py3/idna/codec.py @@ -1,41 +1,40 @@ from .core import encode, decode, alabel, ulabel, IDNAError import codecs import re +from typing import Tuple, Optional -_unicode_dots_re = re.compile(u'[\u002e\u3002\uff0e\uff61]') +_unicode_dots_re = re.compile('[\u002e\u3002\uff0e\uff61]') class Codec(codecs.Codec): - def encode(self, data, errors='strict'): - + def encode(self, data: str, errors: str = 'strict') -> Tuple[bytes, int]: if errors != 'strict': - raise IDNAError("Unsupported error handling \"{0}\"".format(errors)) + raise IDNAError('Unsupported error handling \"{}\"'.format(errors)) if not data: - return "", 0 + return b"", 0 return encode(data), len(data) - def decode(self, data, errors='strict'): - + def decode(self, data: bytes, errors: str = 'strict') -> Tuple[str, int]: if errors != 'strict': - raise IDNAError("Unsupported error handling \"{0}\"".format(errors)) + raise IDNAError('Unsupported error handling \"{}\"'.format(errors)) if not data: - return u"", 0 + return '', 0 return decode(data), len(data) class IncrementalEncoder(codecs.BufferedIncrementalEncoder): - def _buffer_encode(self, data, errors, final): + def _buffer_encode(self, data: str, errors: str, final: bool) -> Tuple[str, int]: # type: ignore if errors != 'strict': - raise IDNAError("Unsupported error handling \"{0}\"".format(errors)) + raise IDNAError('Unsupported error handling \"{}\"'.format(errors)) if not data: - return ("", 0) + return "", 0 labels = _unicode_dots_re.split(data) - trailing_dot = u'' + trailing_dot = '' if labels: if not labels[-1]: trailing_dot = '.' @@ -55,37 +54,29 @@ def _buffer_encode(self, data, errors, final): size += len(label) # Join with U+002E - result = ".".join(result) + trailing_dot + result_str = '.'.join(result) + trailing_dot # type: ignore size += len(trailing_dot) - return (result, size) + return result_str, size class IncrementalDecoder(codecs.BufferedIncrementalDecoder): - def _buffer_decode(self, data, errors, final): + def _buffer_decode(self, data: str, errors: str, final: bool) -> Tuple[str, int]: # type: ignore if errors != 'strict': - raise IDNAError("Unsupported error handling \"{0}\"".format(errors)) + raise IDNAError('Unsupported error handling \"{}\"'.format(errors)) if not data: - return (u"", 0) - - # IDNA allows decoding to operate on Unicode strings, too. - if isinstance(data, unicode): - labels = _unicode_dots_re.split(data) - else: - # Must be ASCII string - data = str(data) - unicode(data, "ascii") - labels = data.split(".") - - trailing_dot = u'' + return ('', 0) + + labels = _unicode_dots_re.split(data) + trailing_dot = '' if labels: if not labels[-1]: - trailing_dot = u'.' + trailing_dot = '.' del labels[-1] elif not final: # Keep potentially unfinished label until the next call del labels[-1] if labels: - trailing_dot = u'.' + trailing_dot = '.' result = [] size = 0 @@ -95,22 +86,25 @@ def _buffer_decode(self, data, errors, final): size += 1 size += len(label) - result = u".".join(result) + trailing_dot + result_str = '.'.join(result) + trailing_dot size += len(trailing_dot) - return (result, size) + return (result_str, size) class StreamWriter(Codec, codecs.StreamWriter): pass + class StreamReader(Codec, codecs.StreamReader): pass -def getregentry(): + +def getregentry() -> codecs.CodecInfo: + # Compatibility as a search_function for codecs.register() return codecs.CodecInfo( name='idna', - encode=Codec().encode, - decode=Codec().decode, + encode=Codec().encode, # type: ignore + decode=Codec().decode, # type: ignore incrementalencoder=IncrementalEncoder, incrementaldecoder=IncrementalDecoder, streamwriter=StreamWriter, diff --git a/Splunk_TA_paloalto/bin/splunk_ta_paloalto/aob_py3/idna/compat.py b/Splunk_TA_paloalto/bin/splunk_ta_paloalto/aob_py3/idna/compat.py old mode 100755 new mode 100644 index 4d47f336..786e6bda --- a/Splunk_TA_paloalto/bin/splunk_ta_paloalto/aob_py3/idna/compat.py +++ b/Splunk_TA_paloalto/bin/splunk_ta_paloalto/aob_py3/idna/compat.py @@ -1,12 +1,13 @@ from .core import * from .codec import * +from typing import Any, Union -def ToASCII(label): +def ToASCII(label: str) -> bytes: return encode(label) -def ToUnicode(label): +def ToUnicode(label: Union[bytes, bytearray]) -> str: return decode(label) -def nameprep(s): - raise NotImplementedError("IDNA 2008 does not utilise nameprep protocol") +def nameprep(s: Any) -> None: + raise NotImplementedError('IDNA 2008 does not utilise nameprep protocol') diff --git a/Splunk_TA_paloalto/bin/splunk_ta_paloalto/aob_py3/idna/core.py b/Splunk_TA_paloalto/bin/splunk_ta_paloalto/aob_py3/idna/core.py old mode 100755 new mode 100644 index 104624ad..4f300371 --- a/Splunk_TA_paloalto/bin/splunk_ta_paloalto/aob_py3/idna/core.py +++ b/Splunk_TA_paloalto/bin/splunk_ta_paloalto/aob_py3/idna/core.py @@ -2,16 +2,12 @@ import bisect import unicodedata import re -import sys +from typing import Union, Optional from .intranges import intranges_contain _virama_combining_class = 9 _alabel_prefix = b'xn--' -_unicode_dots_re = re.compile(u'[\u002e\u3002\uff0e\uff61]') - -if sys.version_info[0] == 3: - unicode = str - unichr = chr +_unicode_dots_re = re.compile('[\u002e\u3002\uff0e\uff61]') class IDNAError(UnicodeError): """ Base exception for all IDNA-encoding related problems """ @@ -33,46 +29,43 @@ class InvalidCodepointContext(IDNAError): pass -def _combining_class(cp): - v = unicodedata.combining(unichr(cp)) +def _combining_class(cp: int) -> int: + v = unicodedata.combining(chr(cp)) if v == 0: - if not unicodedata.name(unichr(cp)): - raise ValueError("Unknown character in unicodedata") + if not unicodedata.name(chr(cp)): + raise ValueError('Unknown character in unicodedata') return v -def _is_script(cp, script): +def _is_script(cp: str, script: str) -> bool: return intranges_contain(ord(cp), idnadata.scripts[script]) -def _punycode(s): +def _punycode(s: str) -> bytes: return s.encode('punycode') -def _unot(s): - return 'U+{0:04X}'.format(s) - +def _unot(s: int) -> str: + return 'U+{:04X}'.format(s) -def valid_label_length(label): +def valid_label_length(label: Union[bytes, str]) -> bool: if len(label) > 63: return False return True -def valid_string_length(label, trailing_dot): - +def valid_string_length(label: Union[bytes, str], trailing_dot: bool) -> bool: if len(label) > (254 if trailing_dot else 253): return False return True -def check_bidi(label, check_ltr=False): - +def check_bidi(label: str, check_ltr: bool = False) -> bool: # Bidi rules should only be applied if string contains RTL characters bidi_label = False for (idx, cp) in enumerate(label, 1): direction = unicodedata.bidirectional(cp) if direction == '': # String likely comes from a newer version of Unicode - raise IDNABidiError('Unknown directionality in label {0} at position {1}'.format(repr(label), idx)) + raise IDNABidiError('Unknown directionality in label {} at position {}'.format(repr(label), idx)) if direction in ['R', 'AL', 'AN']: bidi_label = True if not bidi_label and not check_ltr: @@ -85,17 +78,17 @@ def check_bidi(label, check_ltr=False): elif direction == 'L': rtl = False else: - raise IDNABidiError('First codepoint in label {0} must be directionality L, R or AL'.format(repr(label))) + raise IDNABidiError('First codepoint in label {} must be directionality L, R or AL'.format(repr(label))) valid_ending = False - number_type = False + number_type = None # type: Optional[str] for (idx, cp) in enumerate(label, 1): direction = unicodedata.bidirectional(cp) if rtl: # Bidi rule 2 if not direction in ['R', 'AL', 'AN', 'EN', 'ES', 'CS', 'ET', 'ON', 'BN', 'NSM']: - raise IDNABidiError('Invalid direction for codepoint at position {0} in a right-to-left label'.format(idx)) + raise IDNABidiError('Invalid direction for codepoint at position {} in a right-to-left label'.format(idx)) # Bidi rule 3 if direction in ['R', 'AL', 'EN', 'AN']: valid_ending = True @@ -111,7 +104,7 @@ def check_bidi(label, check_ltr=False): else: # Bidi rule 5 if not direction in ['L', 'EN', 'ES', 'CS', 'ET', 'ON', 'BN', 'NSM']: - raise IDNABidiError('Invalid direction for codepoint at position {0} in a left-to-right label'.format(idx)) + raise IDNABidiError('Invalid direction for codepoint at position {} in a left-to-right label'.format(idx)) # Bidi rule 6 if direction in ['L', 'EN']: valid_ending = True @@ -124,15 +117,13 @@ def check_bidi(label, check_ltr=False): return True -def check_initial_combiner(label): - +def check_initial_combiner(label: str) -> bool: if unicodedata.category(label[0])[0] == 'M': raise IDNAError('Label begins with an illegal combining character') return True -def check_hyphen_ok(label): - +def check_hyphen_ok(label: str) -> bool: if label[2:4] == '--': raise IDNAError('Label has disallowed hyphens in 3rd and 4th position') if label[0] == '-' or label[-1] == '-': @@ -140,14 +131,12 @@ def check_hyphen_ok(label): return True -def check_nfc(label): - +def check_nfc(label: str) -> None: if unicodedata.normalize('NFC', label) != label: raise IDNAError('Label must be in Normalization Form C') -def valid_contextj(label, pos): - +def valid_contextj(label: str, pos: int) -> bool: cp_value = ord(label[pos]) if cp_value == 0x200c: @@ -190,8 +179,7 @@ def valid_contextj(label, pos): return False -def valid_contexto(label, pos, exception=False): - +def valid_contexto(label: str, pos: int, exception: bool = False) -> bool: cp_value = ord(label[pos]) if cp_value == 0x00b7: @@ -212,7 +200,7 @@ def valid_contexto(label, pos, exception=False): elif cp_value == 0x30fb: for cp in label: - if cp == u'\u30fb': + if cp == '\u30fb': continue if _is_script(cp, 'Hiragana') or _is_script(cp, 'Katakana') or _is_script(cp, 'Han'): return True @@ -230,9 +218,10 @@ def valid_contexto(label, pos, exception=False): return False return True + return False -def check_label(label): +def check_label(label: Union[str, bytes, bytearray]) -> None: if isinstance(label, (bytes, bytearray)): label = label.decode('utf-8') if len(label) == 0: @@ -249,98 +238,111 @@ def check_label(label): elif intranges_contain(cp_value, idnadata.codepoint_classes['CONTEXTJ']): try: if not valid_contextj(label, pos): - raise InvalidCodepointContext('Joiner {0} not allowed at position {1} in {2}'.format( + raise InvalidCodepointContext('Joiner {} not allowed at position {} in {}'.format( _unot(cp_value), pos+1, repr(label))) except ValueError: - raise IDNAError('Unknown codepoint adjacent to joiner {0} at position {1} in {2}'.format( + raise IDNAError('Unknown codepoint adjacent to joiner {} at position {} in {}'.format( _unot(cp_value), pos+1, repr(label))) elif intranges_contain(cp_value, idnadata.codepoint_classes['CONTEXTO']): if not valid_contexto(label, pos): - raise InvalidCodepointContext('Codepoint {0} not allowed at position {1} in {2}'.format(_unot(cp_value), pos+1, repr(label))) + raise InvalidCodepointContext('Codepoint {} not allowed at position {} in {}'.format(_unot(cp_value), pos+1, repr(label))) else: - raise InvalidCodepoint('Codepoint {0} at position {1} of {2} not allowed'.format(_unot(cp_value), pos+1, repr(label))) + raise InvalidCodepoint('Codepoint {} at position {} of {} not allowed'.format(_unot(cp_value), pos+1, repr(label))) check_bidi(label) -def alabel(label): - +def alabel(label: str) -> bytes: try: - label = label.encode('ascii') - ulabel(label) - if not valid_label_length(label): + label_bytes = label.encode('ascii') + ulabel(label_bytes) + if not valid_label_length(label_bytes): raise IDNAError('Label too long') - return label + return label_bytes except UnicodeEncodeError: pass if not label: raise IDNAError('No Input') - label = unicode(label) + label = str(label) check_label(label) - label = _punycode(label) - label = _alabel_prefix + label + label_bytes = _punycode(label) + label_bytes = _alabel_prefix + label_bytes - if not valid_label_length(label): + if not valid_label_length(label_bytes): raise IDNAError('Label too long') - return label - + return label_bytes -def ulabel(label): +def ulabel(label: Union[str, bytes, bytearray]) -> str: if not isinstance(label, (bytes, bytearray)): try: - label = label.encode('ascii') + label_bytes = label.encode('ascii') except UnicodeEncodeError: check_label(label) return label - - label = label.lower() - if label.startswith(_alabel_prefix): - label = label[len(_alabel_prefix):] else: - check_label(label) - return label.decode('ascii') + label_bytes = label + + label_bytes = label_bytes.lower() + if label_bytes.startswith(_alabel_prefix): + label_bytes = label_bytes[len(_alabel_prefix):] + if not label_bytes: + raise IDNAError('Malformed A-label, no Punycode eligible content found') + if label_bytes.decode('ascii')[-1] == '-': + raise IDNAError('A-label must not end with a hyphen') + else: + check_label(label_bytes) + return label_bytes.decode('ascii') - label = label.decode('punycode') + try: + label = label_bytes.decode('punycode') + except UnicodeError: + raise IDNAError('Invalid A-label') check_label(label) return label -def uts46_remap(domain, std3_rules=True, transitional=False): +def uts46_remap(domain: str, std3_rules: bool = True, transitional: bool = False) -> str: """Re-map the characters in the string according to UTS46 processing.""" from .uts46data import uts46data - output = u"" - try: - for pos, char in enumerate(domain): - code_point = ord(char) + output = '' + + for pos, char in enumerate(domain): + code_point = ord(char) + try: uts46row = uts46data[code_point if code_point < 256 else - bisect.bisect_left(uts46data, (code_point, "Z")) - 1] + bisect.bisect_left(uts46data, (code_point, 'Z')) - 1] status = uts46row[1] - replacement = uts46row[2] if len(uts46row) == 3 else None - if (status == "V" or - (status == "D" and not transitional) or - (status == "3" and not std3_rules and replacement is None)): + replacement = None # type: Optional[str] + if len(uts46row) == 3: + replacement = uts46row[2] # type: ignore + if (status == 'V' or + (status == 'D' and not transitional) or + (status == '3' and not std3_rules and replacement is None)): output += char - elif replacement is not None and (status == "M" or - (status == "3" and not std3_rules) or - (status == "D" and transitional)): + elif replacement is not None and (status == 'M' or + (status == '3' and not std3_rules) or + (status == 'D' and transitional)): output += replacement - elif status != "I": + elif status != 'I': raise IndexError() - return unicodedata.normalize("NFC", output) - except IndexError: - raise InvalidCodepoint( - "Codepoint {0} not allowed at position {1} in {2}".format( - _unot(code_point), pos + 1, repr(domain))) + except IndexError: + raise InvalidCodepoint( + 'Codepoint {} not allowed at position {} in {}'.format( + _unot(code_point), pos + 1, repr(domain))) + return unicodedata.normalize('NFC', output) -def encode(s, strict=False, uts46=False, std3_rules=False, transitional=False): +def encode(s: Union[str, bytes, bytearray], strict: bool = False, uts46: bool = False, std3_rules: bool = False, transitional: bool = False) -> bytes: if isinstance(s, (bytes, bytearray)): - s = s.decode("ascii") + try: + s = s.decode('ascii') + except UnicodeDecodeError: + raise IDNAError('should pass a unicode string to the function rather than a byte string.') if uts46: s = uts46_remap(s, std3_rules, transitional) trailing_dot = False @@ -368,10 +370,12 @@ def encode(s, strict=False, uts46=False, std3_rules=False, transitional=False): return s -def decode(s, strict=False, uts46=False, std3_rules=False): - - if isinstance(s, (bytes, bytearray)): - s = s.decode("ascii") +def decode(s: Union[str, bytes, bytearray], strict: bool = False, uts46: bool = False, std3_rules: bool = False) -> str: + try: + if isinstance(s, (bytes, bytearray)): + s = s.decode('ascii') + except UnicodeDecodeError: + raise IDNAError('Invalid ASCII in A-label') if uts46: s = uts46_remap(s, std3_rules, False) trailing_dot = False @@ -379,7 +383,7 @@ def decode(s, strict=False, uts46=False, std3_rules=False): if not strict: labels = _unicode_dots_re.split(s) else: - labels = s.split(u'.') + labels = s.split('.') if not labels or labels == ['']: raise IDNAError('Empty domain') if not labels[-1]: @@ -392,5 +396,5 @@ def decode(s, strict=False, uts46=False, std3_rules=False): else: raise IDNAError('Empty label') if trailing_dot: - result.append(u'') - return u'.'.join(result) + result.append('') + return '.'.join(result) diff --git a/Splunk_TA_paloalto/bin/splunk_ta_paloalto/aob_py3/idna/idnadata.py b/Splunk_TA_paloalto/bin/splunk_ta_paloalto/aob_py3/idna/idnadata.py old mode 100755 new mode 100644 index a80c959d..67db4625 --- a/Splunk_TA_paloalto/bin/splunk_ta_paloalto/aob_py3/idna/idnadata.py +++ b/Splunk_TA_paloalto/bin/splunk_ta_paloalto/aob_py3/idna/idnadata.py @@ -1,6 +1,6 @@ # This file is automatically generated by tools/idna-data -__version__ = "11.0.0" +__version__ = '15.0.0' scripts = { 'Greek': ( 0x37000000374, @@ -48,16 +48,20 @@ 0x300700003008, 0x30210000302a, 0x30380000303c, - 0x340000004db6, - 0x4e0000009ff0, + 0x340000004dc0, + 0x4e000000a000, 0xf9000000fa6e, 0xfa700000fada, - 0x200000002a6d7, - 0x2a7000002b735, + 0x16fe200016fe4, + 0x16ff000016ff2, + 0x200000002a6e0, + 0x2a7000002b73a, 0x2b7400002b81e, 0x2b8200002cea2, 0x2ceb00002ebe1, 0x2f8000002fa1e, + 0x300000003134b, + 0x31350000323b0, ), 'Hebrew': ( 0x591000005c8, @@ -73,7 +77,9 @@ 'Hiragana': ( 0x304100003097, 0x309d000030a0, - 0x1b0010001b11f, + 0x1b0010001b120, + 0x1b1320001b133, + 0x1b1500001b153, 0x1f2000001f201, ), 'Katakana': ( @@ -84,7 +90,13 @@ 0x330000003358, 0xff660000ff70, 0xff710000ff9e, + 0x1aff00001aff4, + 0x1aff50001affc, + 0x1affd0001afff, 0x1b0000001b001, + 0x1b1200001b123, + 0x1b1550001b156, + 0x1b1640001b168, ), } joining_types = { @@ -387,9 +399,9 @@ 0x853: 68, 0x854: 82, 0x855: 68, - 0x856: 85, - 0x857: 85, - 0x858: 85, + 0x856: 82, + 0x857: 82, + 0x858: 82, 0x860: 68, 0x861: 85, 0x862: 68, @@ -401,6 +413,39 @@ 0x868: 68, 0x869: 82, 0x86a: 82, + 0x870: 82, + 0x871: 82, + 0x872: 82, + 0x873: 82, + 0x874: 82, + 0x875: 82, + 0x876: 82, + 0x877: 82, + 0x878: 82, + 0x879: 82, + 0x87a: 82, + 0x87b: 82, + 0x87c: 82, + 0x87d: 82, + 0x87e: 82, + 0x87f: 82, + 0x880: 82, + 0x881: 82, + 0x882: 82, + 0x883: 67, + 0x884: 67, + 0x885: 67, + 0x886: 68, + 0x887: 85, + 0x888: 85, + 0x889: 68, + 0x88a: 68, + 0x88b: 68, + 0x88c: 68, + 0x88d: 68, + 0x88e: 82, + 0x890: 85, + 0x891: 85, 0x8a0: 68, 0x8a1: 68, 0x8a2: 68, @@ -422,6 +467,7 @@ 0x8b2: 82, 0x8b3: 68, 0x8b4: 68, + 0x8b5: 68, 0x8b6: 68, 0x8b7: 68, 0x8b8: 68, @@ -430,6 +476,17 @@ 0x8bb: 68, 0x8bc: 68, 0x8bd: 68, + 0x8be: 68, + 0x8bf: 68, + 0x8c0: 68, + 0x8c1: 68, + 0x8c2: 68, + 0x8c3: 68, + 0x8c4: 68, + 0x8c5: 68, + 0x8c6: 68, + 0x8c7: 68, + 0x8c8: 68, 0x8e2: 85, 0x1806: 85, 0x1807: 68, @@ -754,6 +811,52 @@ 0x10f52: 68, 0x10f53: 68, 0x10f54: 82, + 0x10f70: 68, + 0x10f71: 68, + 0x10f72: 68, + 0x10f73: 68, + 0x10f74: 82, + 0x10f75: 82, + 0x10f76: 68, + 0x10f77: 68, + 0x10f78: 68, + 0x10f79: 68, + 0x10f7a: 68, + 0x10f7b: 68, + 0x10f7c: 68, + 0x10f7d: 68, + 0x10f7e: 68, + 0x10f7f: 68, + 0x10f80: 68, + 0x10f81: 68, + 0x10fb0: 68, + 0x10fb1: 85, + 0x10fb2: 68, + 0x10fb3: 68, + 0x10fb4: 82, + 0x10fb5: 82, + 0x10fb6: 82, + 0x10fb7: 85, + 0x10fb8: 68, + 0x10fb9: 82, + 0x10fba: 82, + 0x10fbb: 68, + 0x10fbc: 68, + 0x10fbd: 82, + 0x10fbe: 68, + 0x10fbf: 68, + 0x10fc0: 85, + 0x10fc1: 68, + 0x10fc2: 82, + 0x10fc3: 82, + 0x10fc4: 68, + 0x10fc5: 85, + 0x10fc6: 85, + 0x10fc7: 85, + 0x10fc8: 85, + 0x10fc9: 82, + 0x10fca: 68, + 0x10fcb: 76, 0x110bd: 85, 0x110cd: 85, 0x1e900: 68, @@ -824,6 +927,7 @@ 0x1e941: 68, 0x1e942: 68, 0x1e943: 68, + 0x1e94b: 84, } codepoint_classes = { 'PVALID': ( @@ -1125,9 +1229,9 @@ 0x8000000082e, 0x8400000085c, 0x8600000086b, - 0x8a0000008b5, - 0x8b6000008be, - 0x8d3000008e2, + 0x87000000888, + 0x8890000088f, + 0x898000008e2, 0x8e300000958, 0x96000000964, 0x96600000970, @@ -1185,7 +1289,7 @@ 0xb3c00000b45, 0xb4700000b49, 0xb4b00000b4e, - 0xb5600000b58, + 0xb5500000b58, 0xb5f00000b64, 0xb6600000b70, 0xb7100000b72, @@ -1209,11 +1313,12 @@ 0xc0e00000c11, 0xc1200000c29, 0xc2a00000c3a, - 0xc3d00000c45, + 0xc3c00000c45, 0xc4600000c49, 0xc4a00000c4e, 0xc5500000c57, 0xc5800000c5b, + 0xc5d00000c5e, 0xc6000000c64, 0xc6600000c70, 0xc8000000c84, @@ -1226,12 +1331,11 @@ 0xcc600000cc9, 0xcca00000cce, 0xcd500000cd7, - 0xcde00000cdf, + 0xcdd00000cdf, 0xce000000ce4, 0xce600000cf0, - 0xcf100000cf3, - 0xd0000000d04, - 0xd0500000d0d, + 0xcf100000cf4, + 0xd0000000d0d, 0xd0e00000d11, 0xd1200000d45, 0xd4600000d49, @@ -1240,7 +1344,7 @@ 0xd5f00000d64, 0xd6600000d70, 0xd7a00000d80, - 0xd8200000d84, + 0xd8100000d84, 0xd8500000d97, 0xd9a00000db2, 0xdb300000dbc, @@ -1258,21 +1362,14 @@ 0xe5000000e5a, 0xe8100000e83, 0xe8400000e85, - 0xe8700000e89, - 0xe8a00000e8b, - 0xe8d00000e8e, - 0xe9400000e98, - 0xe9900000ea0, - 0xea100000ea4, + 0xe8600000e8b, + 0xe8c00000ea4, 0xea500000ea6, - 0xea700000ea8, - 0xeaa00000eac, - 0xead00000eb3, - 0xeb400000eba, - 0xebb00000ebe, + 0xea700000eb3, + 0xeb400000ebe, 0xec000000ec5, 0xec600000ec7, - 0xec800000ece, + 0xec800000ecf, 0xed000000eda, 0xede00000ee0, 0xf0000000f01, @@ -1331,9 +1428,8 @@ 0x16810000169b, 0x16a0000016eb, 0x16f1000016f9, - 0x17000000170d, - 0x170e00001715, - 0x172000001735, + 0x170000001716, + 0x171f00001735, 0x174000001754, 0x17600000176d, 0x176e00001771, @@ -1362,7 +1458,8 @@ 0x1a9000001a9a, 0x1aa700001aa8, 0x1ab000001abe, - 0x1b0000001b4c, + 0x1abf00001acf, + 0x1b0000001b4d, 0x1b5000001b5a, 0x1b6b00001b74, 0x1b8000001bf4, @@ -1370,15 +1467,14 @@ 0x1c4000001c4a, 0x1c4d00001c7e, 0x1cd000001cd3, - 0x1cd400001cfa, + 0x1cd400001cfb, 0x1d0000001d2c, 0x1d2f00001d30, 0x1d3b00001d3c, 0x1d4e00001d4f, 0x1d6b00001d78, 0x1d7900001d9b, - 0x1dc000001dfa, - 0x1dfb00001e00, + 0x1dc000001e00, 0x1e0100001e02, 0x1e0300001e04, 0x1e0500001e06, @@ -1527,7 +1623,7 @@ 0x1ff600001ff7, 0x214e0000214f, 0x218400002185, - 0x2c3000002c5f, + 0x2c3000002c60, 0x2c6100002c62, 0x2c6500002c67, 0x2c6800002c69, @@ -1613,11 +1709,10 @@ 0x30a1000030fb, 0x30fc000030ff, 0x310500003130, - 0x31a0000031bb, + 0x31a0000031c0, 0x31f000003200, - 0x340000004db6, - 0x4e0000009ff0, - 0xa0000000a48d, + 0x340000004dc0, + 0x4e000000a48d, 0xa4d00000a4fe, 0xa5000000a60d, 0xa6100000a62c, @@ -1727,8 +1822,22 @@ 0xa7b50000a7b6, 0xa7b70000a7b8, 0xa7b90000a7ba, - 0xa7f70000a7f8, + 0xa7bb0000a7bc, + 0xa7bd0000a7be, + 0xa7bf0000a7c0, + 0xa7c10000a7c2, + 0xa7c30000a7c4, + 0xa7c80000a7c9, + 0xa7ca0000a7cb, + 0xa7d10000a7d2, + 0xa7d30000a7d4, + 0xa7d50000a7d6, + 0xa7d70000a7d8, + 0xa7d90000a7da, + 0xa7f20000a7f5, + 0xa7f60000a7f8, 0xa7fa0000a828, + 0xa82c0000a82d, 0xa8400000a874, 0xa8800000a8c6, 0xa8d00000a8da, @@ -1753,7 +1862,7 @@ 0xab200000ab27, 0xab280000ab2f, 0xab300000ab5b, - 0xab600000ab66, + 0xab600000ab69, 0xabc00000abeb, 0xabec0000abee, 0xabf00000abfa, @@ -1791,9 +1900,16 @@ 0x104d8000104fc, 0x1050000010528, 0x1053000010564, + 0x10597000105a2, + 0x105a3000105b2, + 0x105b3000105ba, + 0x105bb000105bd, 0x1060000010737, 0x1074000010756, 0x1076000010768, + 0x1078000010786, + 0x10787000107b1, + 0x107b2000107bb, 0x1080000010806, 0x1080800010809, 0x1080a00010836, @@ -1827,26 +1943,33 @@ 0x10cc000010cf3, 0x10d0000010d28, 0x10d3000010d3a, - 0x10f0000010f1d, + 0x10e8000010eaa, + 0x10eab00010ead, + 0x10eb000010eb2, + 0x10efd00010f1d, 0x10f2700010f28, 0x10f3000010f51, + 0x10f7000010f86, + 0x10fb000010fc5, + 0x10fe000010ff7, 0x1100000011047, - 0x1106600011070, + 0x1106600011076, 0x1107f000110bb, + 0x110c2000110c3, 0x110d0000110e9, 0x110f0000110fa, 0x1110000011135, 0x1113600011140, - 0x1114400011147, + 0x1114400011148, 0x1115000011174, 0x1117600011177, 0x11180000111c5, 0x111c9000111cd, - 0x111d0000111db, + 0x111ce000111db, 0x111dc000111dd, 0x1120000011212, 0x1121300011238, - 0x1123e0001123f, + 0x1123e00011242, 0x1128000011287, 0x1128800011289, 0x1128a0001128e, @@ -1871,7 +1994,7 @@ 0x1137000011375, 0x114000001144b, 0x114500001145a, - 0x1145e0001145f, + 0x1145e00011462, 0x11480000114c6, 0x114c7000114c8, 0x114d0000114da, @@ -1881,20 +2004,31 @@ 0x1160000011641, 0x1164400011645, 0x116500001165a, - 0x11680000116b8, + 0x11680000116b9, 0x116c0000116ca, 0x117000001171b, 0x1171d0001172c, 0x117300001173a, + 0x1174000011747, 0x118000001183b, 0x118c0000118ea, - 0x118ff00011900, + 0x118ff00011907, + 0x119090001190a, + 0x1190c00011914, + 0x1191500011917, + 0x1191800011936, + 0x1193700011939, + 0x1193b00011944, + 0x119500001195a, + 0x119a0000119a8, + 0x119aa000119d8, + 0x119da000119e2, + 0x119e3000119e5, 0x11a0000011a3f, 0x11a4700011a48, - 0x11a5000011a84, - 0x11a8600011a9a, + 0x11a5000011a9a, 0x11a9d00011a9e, - 0x11ac000011af9, + 0x11ab000011af9, 0x11c0000011c09, 0x11c0a00011c37, 0x11c3800011c41, @@ -1916,13 +2050,22 @@ 0x11d9300011d99, 0x11da000011daa, 0x11ee000011ef7, + 0x11f0000011f11, + 0x11f1200011f3b, + 0x11f3e00011f43, + 0x11f5000011f5a, + 0x11fb000011fb1, 0x120000001239a, 0x1248000012544, - 0x130000001342f, + 0x12f9000012ff1, + 0x1300000013430, + 0x1344000013456, 0x1440000014647, 0x1680000016a39, 0x16a4000016a5f, 0x16a6000016a6a, + 0x16a7000016abf, + 0x16ac000016aca, 0x16ad000016aee, 0x16af000016af5, 0x16b0000016b37, @@ -1931,39 +2074,68 @@ 0x16b6300016b78, 0x16b7d00016b90, 0x16e6000016e80, - 0x16f0000016f45, - 0x16f5000016f7f, + 0x16f0000016f4b, + 0x16f4f00016f88, 0x16f8f00016fa0, 0x16fe000016fe2, - 0x17000000187f2, - 0x1880000018af3, - 0x1b0000001b11f, + 0x16fe300016fe5, + 0x16ff000016ff2, + 0x17000000187f8, + 0x1880000018cd6, + 0x18d0000018d09, + 0x1aff00001aff4, + 0x1aff50001affc, + 0x1affd0001afff, + 0x1b0000001b123, + 0x1b1320001b133, + 0x1b1500001b153, + 0x1b1550001b156, + 0x1b1640001b168, 0x1b1700001b2fc, 0x1bc000001bc6b, 0x1bc700001bc7d, 0x1bc800001bc89, 0x1bc900001bc9a, 0x1bc9d0001bc9f, + 0x1cf000001cf2e, + 0x1cf300001cf47, 0x1da000001da37, 0x1da3b0001da6d, 0x1da750001da76, 0x1da840001da85, 0x1da9b0001daa0, 0x1daa10001dab0, + 0x1df000001df1f, + 0x1df250001df2b, 0x1e0000001e007, 0x1e0080001e019, 0x1e01b0001e022, 0x1e0230001e025, 0x1e0260001e02b, + 0x1e0300001e06e, + 0x1e08f0001e090, + 0x1e1000001e12d, + 0x1e1300001e13e, + 0x1e1400001e14a, + 0x1e14e0001e14f, + 0x1e2900001e2af, + 0x1e2c00001e2fa, + 0x1e4d00001e4fa, + 0x1e7e00001e7e7, + 0x1e7e80001e7ec, + 0x1e7ed0001e7ef, + 0x1e7f00001e7ff, 0x1e8000001e8c5, 0x1e8d00001e8d7, - 0x1e9220001e94b, + 0x1e9220001e94c, 0x1e9500001e95a, - 0x200000002a6d7, - 0x2a7000002b735, + 0x200000002a6e0, + 0x2a7000002b73a, 0x2b7400002b81e, 0x2b8200002cea2, 0x2ceb00002ebe1, + 0x300000003134b, + 0x31350000323b0, ), 'CONTEXTJ': ( 0x200c0000200e, diff --git a/Splunk_TA_paloalto/bin/splunk_ta_paloalto/aob_py3/idna/intranges.py b/Splunk_TA_paloalto/bin/splunk_ta_paloalto/aob_py3/idna/intranges.py old mode 100755 new mode 100644 index fa8a7356..6a43b047 --- a/Splunk_TA_paloalto/bin/splunk_ta_paloalto/aob_py3/idna/intranges.py +++ b/Splunk_TA_paloalto/bin/splunk_ta_paloalto/aob_py3/idna/intranges.py @@ -6,8 +6,9 @@ """ import bisect +from typing import List, Tuple -def intranges_from_list(list_): +def intranges_from_list(list_: List[int]) -> Tuple[int, ...]: """Represent a list of integers as a sequence of ranges: ((start_0, end_0), (start_1, end_1), ...), such that the original integers are exactly those x such that start_i <= x < end_i for some i. @@ -28,14 +29,14 @@ def intranges_from_list(list_): return tuple(ranges) -def _encode_range(start, end): +def _encode_range(start: int, end: int) -> int: return (start << 32) | end -def _decode_range(r): +def _decode_range(r: int) -> Tuple[int, int]: return (r >> 32), (r & ((1 << 32) - 1)) -def intranges_contain(int_, ranges): +def intranges_contain(int_: int, ranges: Tuple[int, ...]) -> bool: """Determine if `int_` falls into one of the ranges in `ranges`.""" tuple_ = _encode_range(int_, 0) pos = bisect.bisect_left(ranges, tuple_) diff --git a/Splunk_TA_paloalto/bin/splunk_ta_paloalto/aob_py3/idna/package_data.py b/Splunk_TA_paloalto/bin/splunk_ta_paloalto/aob_py3/idna/package_data.py old mode 100755 new mode 100644 index 257e8989..8501893b --- a/Splunk_TA_paloalto/bin/splunk_ta_paloalto/aob_py3/idna/package_data.py +++ b/Splunk_TA_paloalto/bin/splunk_ta_paloalto/aob_py3/idna/package_data.py @@ -1,2 +1,2 @@ -__version__ = '2.8' +__version__ = '3.4' diff --git a/Splunk_TA_paloalto/bin/splunk_ta_paloalto/aob_py3/idna/py.typed b/Splunk_TA_paloalto/bin/splunk_ta_paloalto/aob_py3/idna/py.typed new file mode 100644 index 00000000..e69de29b diff --git a/Splunk_TA_paloalto/bin/splunk_ta_paloalto/aob_py3/idna/uts46data.py b/Splunk_TA_paloalto/bin/splunk_ta_paloalto/aob_py3/idna/uts46data.py old mode 100755 new mode 100644 index a68ed4c0..186796c1 --- a/Splunk_TA_paloalto/bin/splunk_ta_paloalto/aob_py3/idna/uts46data.py +++ b/Splunk_TA_paloalto/bin/splunk_ta_paloalto/aob_py3/idna/uts46data.py @@ -1,11 +1,14 @@ # This file is automatically generated by tools/idna-data # vim: set fileencoding=utf-8 : +from typing import List, Tuple, Union + + """IDNA Mapping Table from UTS46.""" -__version__ = "11.0.0" -def _seg_0(): +__version__ = '15.0.0' +def _seg_0() -> List[Union[Tuple[int, str], Tuple[int, str, str]]]: return [ (0x0, '3'), (0x1, '3'), @@ -72,32 +75,32 @@ def _seg_0(): (0x3E, '3'), (0x3F, '3'), (0x40, '3'), - (0x41, 'M', u'a'), - (0x42, 'M', u'b'), - (0x43, 'M', u'c'), - (0x44, 'M', u'd'), - (0x45, 'M', u'e'), - (0x46, 'M', u'f'), - (0x47, 'M', u'g'), - (0x48, 'M', u'h'), - (0x49, 'M', u'i'), - (0x4A, 'M', u'j'), - (0x4B, 'M', u'k'), - (0x4C, 'M', u'l'), - (0x4D, 'M', u'm'), - (0x4E, 'M', u'n'), - (0x4F, 'M', u'o'), - (0x50, 'M', u'p'), - (0x51, 'M', u'q'), - (0x52, 'M', u'r'), - (0x53, 'M', u's'), - (0x54, 'M', u't'), - (0x55, 'M', u'u'), - (0x56, 'M', u'v'), - (0x57, 'M', u'w'), - (0x58, 'M', u'x'), - (0x59, 'M', u'y'), - (0x5A, 'M', u'z'), + (0x41, 'M', 'a'), + (0x42, 'M', 'b'), + (0x43, 'M', 'c'), + (0x44, 'M', 'd'), + (0x45, 'M', 'e'), + (0x46, 'M', 'f'), + (0x47, 'M', 'g'), + (0x48, 'M', 'h'), + (0x49, 'M', 'i'), + (0x4A, 'M', 'j'), + (0x4B, 'M', 'k'), + (0x4C, 'M', 'l'), + (0x4D, 'M', 'm'), + (0x4E, 'M', 'n'), + (0x4F, 'M', 'o'), + (0x50, 'M', 'p'), + (0x51, 'M', 'q'), + (0x52, 'M', 'r'), + (0x53, 'M', 's'), + (0x54, 'M', 't'), + (0x55, 'M', 'u'), + (0x56, 'M', 'v'), + (0x57, 'M', 'w'), + (0x58, 'M', 'x'), + (0x59, 'M', 'y'), + (0x5A, 'M', 'z'), (0x5B, '3'), (0x5C, '3'), (0x5D, '3'), @@ -109,7 +112,7 @@ def _seg_0(): (0x63, 'V'), ] -def _seg_1(): +def _seg_1() -> List[Union[Tuple[int, str], Tuple[int, str, str]]]: return [ (0x64, 'V'), (0x65, 'V'), @@ -171,7 +174,7 @@ def _seg_1(): (0x9D, 'X'), (0x9E, 'X'), (0x9F, 'X'), - (0xA0, '3', u' '), + (0xA0, '3', ' '), (0xA1, 'V'), (0xA2, 'V'), (0xA3, 'V'), @@ -179,66 +182,66 @@ def _seg_1(): (0xA5, 'V'), (0xA6, 'V'), (0xA7, 'V'), - (0xA8, '3', u' ̈'), + (0xA8, '3', ' ̈'), (0xA9, 'V'), - (0xAA, 'M', u'a'), + (0xAA, 'M', 'a'), (0xAB, 'V'), (0xAC, 'V'), (0xAD, 'I'), (0xAE, 'V'), - (0xAF, '3', u' ̄'), + (0xAF, '3', ' ̄'), (0xB0, 'V'), (0xB1, 'V'), - (0xB2, 'M', u'2'), - (0xB3, 'M', u'3'), - (0xB4, '3', u' ́'), - (0xB5, 'M', u'μ'), + (0xB2, 'M', '2'), + (0xB3, 'M', '3'), + (0xB4, '3', ' ́'), + (0xB5, 'M', 'μ'), (0xB6, 'V'), (0xB7, 'V'), - (0xB8, '3', u' ̧'), - (0xB9, 'M', u'1'), - (0xBA, 'M', u'o'), + (0xB8, '3', ' ̧'), + (0xB9, 'M', '1'), + (0xBA, 'M', 'o'), (0xBB, 'V'), - (0xBC, 'M', u'1⁄4'), - (0xBD, 'M', u'1⁄2'), - (0xBE, 'M', u'3⁄4'), + (0xBC, 'M', '1⁄4'), + (0xBD, 'M', '1⁄2'), + (0xBE, 'M', '3⁄4'), (0xBF, 'V'), - (0xC0, 'M', u'à'), - (0xC1, 'M', u'á'), - (0xC2, 'M', u'â'), - (0xC3, 'M', u'ã'), - (0xC4, 'M', u'ä'), - (0xC5, 'M', u'å'), - (0xC6, 'M', u'æ'), - (0xC7, 'M', u'ç'), + (0xC0, 'M', 'à'), + (0xC1, 'M', 'á'), + (0xC2, 'M', 'â'), + (0xC3, 'M', 'ã'), + (0xC4, 'M', 'ä'), + (0xC5, 'M', 'å'), + (0xC6, 'M', 'æ'), + (0xC7, 'M', 'ç'), ] -def _seg_2(): +def _seg_2() -> List[Union[Tuple[int, str], Tuple[int, str, str]]]: return [ - (0xC8, 'M', u'è'), - (0xC9, 'M', u'é'), - (0xCA, 'M', u'ê'), - (0xCB, 'M', u'ë'), - (0xCC, 'M', u'ì'), - (0xCD, 'M', u'í'), - (0xCE, 'M', u'î'), - (0xCF, 'M', u'ï'), - (0xD0, 'M', u'ð'), - (0xD1, 'M', u'ñ'), - (0xD2, 'M', u'ò'), - (0xD3, 'M', u'ó'), - (0xD4, 'M', u'ô'), - (0xD5, 'M', u'õ'), - (0xD6, 'M', u'ö'), + (0xC8, 'M', 'è'), + (0xC9, 'M', 'é'), + (0xCA, 'M', 'ê'), + (0xCB, 'M', 'ë'), + (0xCC, 'M', 'ì'), + (0xCD, 'M', 'í'), + (0xCE, 'M', 'î'), + (0xCF, 'M', 'ï'), + (0xD0, 'M', 'ð'), + (0xD1, 'M', 'ñ'), + (0xD2, 'M', 'ò'), + (0xD3, 'M', 'ó'), + (0xD4, 'M', 'ô'), + (0xD5, 'M', 'õ'), + (0xD6, 'M', 'ö'), (0xD7, 'V'), - (0xD8, 'M', u'ø'), - (0xD9, 'M', u'ù'), - (0xDA, 'M', u'ú'), - (0xDB, 'M', u'û'), - (0xDC, 'M', u'ü'), - (0xDD, 'M', u'ý'), - (0xDE, 'M', u'þ'), - (0xDF, 'D', u'ss'), + (0xD8, 'M', 'ø'), + (0xD9, 'M', 'ù'), + (0xDA, 'M', 'ú'), + (0xDB, 'M', 'û'), + (0xDC, 'M', 'ü'), + (0xDD, 'M', 'ý'), + (0xDE, 'M', 'þ'), + (0xDF, 'D', 'ss'), (0xE0, 'V'), (0xE1, 'V'), (0xE2, 'V'), @@ -271,765 +274,765 @@ def _seg_2(): (0xFD, 'V'), (0xFE, 'V'), (0xFF, 'V'), - (0x100, 'M', u'ā'), + (0x100, 'M', 'ā'), (0x101, 'V'), - (0x102, 'M', u'ă'), + (0x102, 'M', 'ă'), (0x103, 'V'), - (0x104, 'M', u'ą'), + (0x104, 'M', 'ą'), (0x105, 'V'), - (0x106, 'M', u'ć'), + (0x106, 'M', 'ć'), (0x107, 'V'), - (0x108, 'M', u'ĉ'), + (0x108, 'M', 'ĉ'), (0x109, 'V'), - (0x10A, 'M', u'ċ'), + (0x10A, 'M', 'ċ'), (0x10B, 'V'), - (0x10C, 'M', u'č'), + (0x10C, 'M', 'č'), (0x10D, 'V'), - (0x10E, 'M', u'ď'), + (0x10E, 'M', 'ď'), (0x10F, 'V'), - (0x110, 'M', u'đ'), + (0x110, 'M', 'đ'), (0x111, 'V'), - (0x112, 'M', u'ē'), + (0x112, 'M', 'ē'), (0x113, 'V'), - (0x114, 'M', u'ĕ'), + (0x114, 'M', 'ĕ'), (0x115, 'V'), - (0x116, 'M', u'ė'), + (0x116, 'M', 'ė'), (0x117, 'V'), - (0x118, 'M', u'ę'), + (0x118, 'M', 'ę'), (0x119, 'V'), - (0x11A, 'M', u'ě'), + (0x11A, 'M', 'ě'), (0x11B, 'V'), - (0x11C, 'M', u'ĝ'), + (0x11C, 'M', 'ĝ'), (0x11D, 'V'), - (0x11E, 'M', u'ğ'), + (0x11E, 'M', 'ğ'), (0x11F, 'V'), - (0x120, 'M', u'ġ'), + (0x120, 'M', 'ġ'), (0x121, 'V'), - (0x122, 'M', u'ģ'), + (0x122, 'M', 'ģ'), (0x123, 'V'), - (0x124, 'M', u'ĥ'), + (0x124, 'M', 'ĥ'), (0x125, 'V'), - (0x126, 'M', u'ħ'), + (0x126, 'M', 'ħ'), (0x127, 'V'), - (0x128, 'M', u'ĩ'), + (0x128, 'M', 'ĩ'), (0x129, 'V'), - (0x12A, 'M', u'ī'), + (0x12A, 'M', 'ī'), (0x12B, 'V'), ] -def _seg_3(): +def _seg_3() -> List[Union[Tuple[int, str], Tuple[int, str, str]]]: return [ - (0x12C, 'M', u'ĭ'), + (0x12C, 'M', 'ĭ'), (0x12D, 'V'), - (0x12E, 'M', u'į'), + (0x12E, 'M', 'į'), (0x12F, 'V'), - (0x130, 'M', u'i̇'), + (0x130, 'M', 'i̇'), (0x131, 'V'), - (0x132, 'M', u'ij'), - (0x134, 'M', u'ĵ'), + (0x132, 'M', 'ij'), + (0x134, 'M', 'ĵ'), (0x135, 'V'), - (0x136, 'M', u'ķ'), + (0x136, 'M', 'ķ'), (0x137, 'V'), - (0x139, 'M', u'ĺ'), + (0x139, 'M', 'ĺ'), (0x13A, 'V'), - (0x13B, 'M', u'ļ'), + (0x13B, 'M', 'ļ'), (0x13C, 'V'), - (0x13D, 'M', u'ľ'), + (0x13D, 'M', 'ľ'), (0x13E, 'V'), - (0x13F, 'M', u'l·'), - (0x141, 'M', u'ł'), + (0x13F, 'M', 'l·'), + (0x141, 'M', 'ł'), (0x142, 'V'), - (0x143, 'M', u'ń'), + (0x143, 'M', 'ń'), (0x144, 'V'), - (0x145, 'M', u'ņ'), + (0x145, 'M', 'ņ'), (0x146, 'V'), - (0x147, 'M', u'ň'), + (0x147, 'M', 'ň'), (0x148, 'V'), - (0x149, 'M', u'ʼn'), - (0x14A, 'M', u'ŋ'), + (0x149, 'M', 'ʼn'), + (0x14A, 'M', 'ŋ'), (0x14B, 'V'), - (0x14C, 'M', u'ō'), + (0x14C, 'M', 'ō'), (0x14D, 'V'), - (0x14E, 'M', u'ŏ'), + (0x14E, 'M', 'ŏ'), (0x14F, 'V'), - (0x150, 'M', u'ő'), + (0x150, 'M', 'ő'), (0x151, 'V'), - (0x152, 'M', u'œ'), + (0x152, 'M', 'œ'), (0x153, 'V'), - (0x154, 'M', u'ŕ'), + (0x154, 'M', 'ŕ'), (0x155, 'V'), - (0x156, 'M', u'ŗ'), + (0x156, 'M', 'ŗ'), (0x157, 'V'), - (0x158, 'M', u'ř'), + (0x158, 'M', 'ř'), (0x159, 'V'), - (0x15A, 'M', u'ś'), + (0x15A, 'M', 'ś'), (0x15B, 'V'), - (0x15C, 'M', u'ŝ'), + (0x15C, 'M', 'ŝ'), (0x15D, 'V'), - (0x15E, 'M', u'ş'), + (0x15E, 'M', 'ş'), (0x15F, 'V'), - (0x160, 'M', u'š'), + (0x160, 'M', 'š'), (0x161, 'V'), - (0x162, 'M', u'ţ'), + (0x162, 'M', 'ţ'), (0x163, 'V'), - (0x164, 'M', u'ť'), + (0x164, 'M', 'ť'), (0x165, 'V'), - (0x166, 'M', u'ŧ'), + (0x166, 'M', 'ŧ'), (0x167, 'V'), - (0x168, 'M', u'ũ'), + (0x168, 'M', 'ũ'), (0x169, 'V'), - (0x16A, 'M', u'ū'), + (0x16A, 'M', 'ū'), (0x16B, 'V'), - (0x16C, 'M', u'ŭ'), + (0x16C, 'M', 'ŭ'), (0x16D, 'V'), - (0x16E, 'M', u'ů'), + (0x16E, 'M', 'ů'), (0x16F, 'V'), - (0x170, 'M', u'ű'), + (0x170, 'M', 'ű'), (0x171, 'V'), - (0x172, 'M', u'ų'), + (0x172, 'M', 'ų'), (0x173, 'V'), - (0x174, 'M', u'ŵ'), + (0x174, 'M', 'ŵ'), (0x175, 'V'), - (0x176, 'M', u'ŷ'), + (0x176, 'M', 'ŷ'), (0x177, 'V'), - (0x178, 'M', u'ÿ'), - (0x179, 'M', u'ź'), + (0x178, 'M', 'ÿ'), + (0x179, 'M', 'ź'), (0x17A, 'V'), - (0x17B, 'M', u'ż'), + (0x17B, 'M', 'ż'), (0x17C, 'V'), - (0x17D, 'M', u'ž'), + (0x17D, 'M', 'ž'), (0x17E, 'V'), - (0x17F, 'M', u's'), + (0x17F, 'M', 's'), (0x180, 'V'), - (0x181, 'M', u'ɓ'), - (0x182, 'M', u'ƃ'), + (0x181, 'M', 'ɓ'), + (0x182, 'M', 'ƃ'), (0x183, 'V'), - (0x184, 'M', u'ƅ'), + (0x184, 'M', 'ƅ'), (0x185, 'V'), - (0x186, 'M', u'ɔ'), - (0x187, 'M', u'ƈ'), + (0x186, 'M', 'ɔ'), + (0x187, 'M', 'ƈ'), (0x188, 'V'), - (0x189, 'M', u'ɖ'), - (0x18A, 'M', u'ɗ'), - (0x18B, 'M', u'ƌ'), + (0x189, 'M', 'ɖ'), + (0x18A, 'M', 'ɗ'), + (0x18B, 'M', 'ƌ'), (0x18C, 'V'), - (0x18E, 'M', u'ǝ'), - (0x18F, 'M', u'ə'), - (0x190, 'M', u'ɛ'), - (0x191, 'M', u'ƒ'), + (0x18E, 'M', 'ǝ'), + (0x18F, 'M', 'ə'), + (0x190, 'M', 'ɛ'), + (0x191, 'M', 'ƒ'), (0x192, 'V'), - (0x193, 'M', u'ɠ'), + (0x193, 'M', 'ɠ'), ] -def _seg_4(): +def _seg_4() -> List[Union[Tuple[int, str], Tuple[int, str, str]]]: return [ - (0x194, 'M', u'ɣ'), + (0x194, 'M', 'ɣ'), (0x195, 'V'), - (0x196, 'M', u'ɩ'), - (0x197, 'M', u'ɨ'), - (0x198, 'M', u'ƙ'), + (0x196, 'M', 'ɩ'), + (0x197, 'M', 'ɨ'), + (0x198, 'M', 'ƙ'), (0x199, 'V'), - (0x19C, 'M', u'ɯ'), - (0x19D, 'M', u'ɲ'), + (0x19C, 'M', 'ɯ'), + (0x19D, 'M', 'ɲ'), (0x19E, 'V'), - (0x19F, 'M', u'ɵ'), - (0x1A0, 'M', u'ơ'), + (0x19F, 'M', 'ɵ'), + (0x1A0, 'M', 'ơ'), (0x1A1, 'V'), - (0x1A2, 'M', u'ƣ'), + (0x1A2, 'M', 'ƣ'), (0x1A3, 'V'), - (0x1A4, 'M', u'ƥ'), + (0x1A4, 'M', 'ƥ'), (0x1A5, 'V'), - (0x1A6, 'M', u'ʀ'), - (0x1A7, 'M', u'ƨ'), + (0x1A6, 'M', 'ʀ'), + (0x1A7, 'M', 'ƨ'), (0x1A8, 'V'), - (0x1A9, 'M', u'ʃ'), + (0x1A9, 'M', 'ʃ'), (0x1AA, 'V'), - (0x1AC, 'M', u'ƭ'), + (0x1AC, 'M', 'ƭ'), (0x1AD, 'V'), - (0x1AE, 'M', u'ʈ'), - (0x1AF, 'M', u'ư'), + (0x1AE, 'M', 'ʈ'), + (0x1AF, 'M', 'ư'), (0x1B0, 'V'), - (0x1B1, 'M', u'ʊ'), - (0x1B2, 'M', u'ʋ'), - (0x1B3, 'M', u'ƴ'), + (0x1B1, 'M', 'ʊ'), + (0x1B2, 'M', 'ʋ'), + (0x1B3, 'M', 'ƴ'), (0x1B4, 'V'), - (0x1B5, 'M', u'ƶ'), + (0x1B5, 'M', 'ƶ'), (0x1B6, 'V'), - (0x1B7, 'M', u'ʒ'), - (0x1B8, 'M', u'ƹ'), + (0x1B7, 'M', 'ʒ'), + (0x1B8, 'M', 'ƹ'), (0x1B9, 'V'), - (0x1BC, 'M', u'ƽ'), + (0x1BC, 'M', 'ƽ'), (0x1BD, 'V'), - (0x1C4, 'M', u'dž'), - (0x1C7, 'M', u'lj'), - (0x1CA, 'M', u'nj'), - (0x1CD, 'M', u'ǎ'), + (0x1C4, 'M', 'dž'), + (0x1C7, 'M', 'lj'), + (0x1CA, 'M', 'nj'), + (0x1CD, 'M', 'ǎ'), (0x1CE, 'V'), - (0x1CF, 'M', u'ǐ'), + (0x1CF, 'M', 'ǐ'), (0x1D0, 'V'), - (0x1D1, 'M', u'ǒ'), + (0x1D1, 'M', 'ǒ'), (0x1D2, 'V'), - (0x1D3, 'M', u'ǔ'), + (0x1D3, 'M', 'ǔ'), (0x1D4, 'V'), - (0x1D5, 'M', u'ǖ'), + (0x1D5, 'M', 'ǖ'), (0x1D6, 'V'), - (0x1D7, 'M', u'ǘ'), + (0x1D7, 'M', 'ǘ'), (0x1D8, 'V'), - (0x1D9, 'M', u'ǚ'), + (0x1D9, 'M', 'ǚ'), (0x1DA, 'V'), - (0x1DB, 'M', u'ǜ'), + (0x1DB, 'M', 'ǜ'), (0x1DC, 'V'), - (0x1DE, 'M', u'ǟ'), + (0x1DE, 'M', 'ǟ'), (0x1DF, 'V'), - (0x1E0, 'M', u'ǡ'), + (0x1E0, 'M', 'ǡ'), (0x1E1, 'V'), - (0x1E2, 'M', u'ǣ'), + (0x1E2, 'M', 'ǣ'), (0x1E3, 'V'), - (0x1E4, 'M', u'ǥ'), + (0x1E4, 'M', 'ǥ'), (0x1E5, 'V'), - (0x1E6, 'M', u'ǧ'), + (0x1E6, 'M', 'ǧ'), (0x1E7, 'V'), - (0x1E8, 'M', u'ǩ'), + (0x1E8, 'M', 'ǩ'), (0x1E9, 'V'), - (0x1EA, 'M', u'ǫ'), + (0x1EA, 'M', 'ǫ'), (0x1EB, 'V'), - (0x1EC, 'M', u'ǭ'), + (0x1EC, 'M', 'ǭ'), (0x1ED, 'V'), - (0x1EE, 'M', u'ǯ'), + (0x1EE, 'M', 'ǯ'), (0x1EF, 'V'), - (0x1F1, 'M', u'dz'), - (0x1F4, 'M', u'ǵ'), + (0x1F1, 'M', 'dz'), + (0x1F4, 'M', 'ǵ'), (0x1F5, 'V'), - (0x1F6, 'M', u'ƕ'), - (0x1F7, 'M', u'ƿ'), - (0x1F8, 'M', u'ǹ'), + (0x1F6, 'M', 'ƕ'), + (0x1F7, 'M', 'ƿ'), + (0x1F8, 'M', 'ǹ'), (0x1F9, 'V'), - (0x1FA, 'M', u'ǻ'), + (0x1FA, 'M', 'ǻ'), (0x1FB, 'V'), - (0x1FC, 'M', u'ǽ'), + (0x1FC, 'M', 'ǽ'), (0x1FD, 'V'), - (0x1FE, 'M', u'ǿ'), + (0x1FE, 'M', 'ǿ'), (0x1FF, 'V'), - (0x200, 'M', u'ȁ'), + (0x200, 'M', 'ȁ'), (0x201, 'V'), - (0x202, 'M', u'ȃ'), + (0x202, 'M', 'ȃ'), (0x203, 'V'), - (0x204, 'M', u'ȅ'), + (0x204, 'M', 'ȅ'), (0x205, 'V'), - (0x206, 'M', u'ȇ'), + (0x206, 'M', 'ȇ'), (0x207, 'V'), - (0x208, 'M', u'ȉ'), + (0x208, 'M', 'ȉ'), (0x209, 'V'), - (0x20A, 'M', u'ȋ'), + (0x20A, 'M', 'ȋ'), (0x20B, 'V'), - (0x20C, 'M', u'ȍ'), + (0x20C, 'M', 'ȍ'), ] -def _seg_5(): +def _seg_5() -> List[Union[Tuple[int, str], Tuple[int, str, str]]]: return [ (0x20D, 'V'), - (0x20E, 'M', u'ȏ'), + (0x20E, 'M', 'ȏ'), (0x20F, 'V'), - (0x210, 'M', u'ȑ'), + (0x210, 'M', 'ȑ'), (0x211, 'V'), - (0x212, 'M', u'ȓ'), + (0x212, 'M', 'ȓ'), (0x213, 'V'), - (0x214, 'M', u'ȕ'), + (0x214, 'M', 'ȕ'), (0x215, 'V'), - (0x216, 'M', u'ȗ'), + (0x216, 'M', 'ȗ'), (0x217, 'V'), - (0x218, 'M', u'ș'), + (0x218, 'M', 'ș'), (0x219, 'V'), - (0x21A, 'M', u'ț'), + (0x21A, 'M', 'ț'), (0x21B, 'V'), - (0x21C, 'M', u'ȝ'), + (0x21C, 'M', 'ȝ'), (0x21D, 'V'), - (0x21E, 'M', u'ȟ'), + (0x21E, 'M', 'ȟ'), (0x21F, 'V'), - (0x220, 'M', u'ƞ'), + (0x220, 'M', 'ƞ'), (0x221, 'V'), - (0x222, 'M', u'ȣ'), + (0x222, 'M', 'ȣ'), (0x223, 'V'), - (0x224, 'M', u'ȥ'), + (0x224, 'M', 'ȥ'), (0x225, 'V'), - (0x226, 'M', u'ȧ'), + (0x226, 'M', 'ȧ'), (0x227, 'V'), - (0x228, 'M', u'ȩ'), + (0x228, 'M', 'ȩ'), (0x229, 'V'), - (0x22A, 'M', u'ȫ'), + (0x22A, 'M', 'ȫ'), (0x22B, 'V'), - (0x22C, 'M', u'ȭ'), + (0x22C, 'M', 'ȭ'), (0x22D, 'V'), - (0x22E, 'M', u'ȯ'), + (0x22E, 'M', 'ȯ'), (0x22F, 'V'), - (0x230, 'M', u'ȱ'), + (0x230, 'M', 'ȱ'), (0x231, 'V'), - (0x232, 'M', u'ȳ'), + (0x232, 'M', 'ȳ'), (0x233, 'V'), - (0x23A, 'M', u'ⱥ'), - (0x23B, 'M', u'ȼ'), + (0x23A, 'M', 'ⱥ'), + (0x23B, 'M', 'ȼ'), (0x23C, 'V'), - (0x23D, 'M', u'ƚ'), - (0x23E, 'M', u'ⱦ'), + (0x23D, 'M', 'ƚ'), + (0x23E, 'M', 'ⱦ'), (0x23F, 'V'), - (0x241, 'M', u'ɂ'), + (0x241, 'M', 'ɂ'), (0x242, 'V'), - (0x243, 'M', u'ƀ'), - (0x244, 'M', u'ʉ'), - (0x245, 'M', u'ʌ'), - (0x246, 'M', u'ɇ'), + (0x243, 'M', 'ƀ'), + (0x244, 'M', 'ʉ'), + (0x245, 'M', 'ʌ'), + (0x246, 'M', 'ɇ'), (0x247, 'V'), - (0x248, 'M', u'ɉ'), + (0x248, 'M', 'ɉ'), (0x249, 'V'), - (0x24A, 'M', u'ɋ'), + (0x24A, 'M', 'ɋ'), (0x24B, 'V'), - (0x24C, 'M', u'ɍ'), + (0x24C, 'M', 'ɍ'), (0x24D, 'V'), - (0x24E, 'M', u'ɏ'), + (0x24E, 'M', 'ɏ'), (0x24F, 'V'), - (0x2B0, 'M', u'h'), - (0x2B1, 'M', u'ɦ'), - (0x2B2, 'M', u'j'), - (0x2B3, 'M', u'r'), - (0x2B4, 'M', u'ɹ'), - (0x2B5, 'M', u'ɻ'), - (0x2B6, 'M', u'ʁ'), - (0x2B7, 'M', u'w'), - (0x2B8, 'M', u'y'), + (0x2B0, 'M', 'h'), + (0x2B1, 'M', 'ɦ'), + (0x2B2, 'M', 'j'), + (0x2B3, 'M', 'r'), + (0x2B4, 'M', 'ɹ'), + (0x2B5, 'M', 'ɻ'), + (0x2B6, 'M', 'ʁ'), + (0x2B7, 'M', 'w'), + (0x2B8, 'M', 'y'), (0x2B9, 'V'), - (0x2D8, '3', u' ̆'), - (0x2D9, '3', u' ̇'), - (0x2DA, '3', u' ̊'), - (0x2DB, '3', u' ̨'), - (0x2DC, '3', u' ̃'), - (0x2DD, '3', u' ̋'), + (0x2D8, '3', ' ̆'), + (0x2D9, '3', ' ̇'), + (0x2DA, '3', ' ̊'), + (0x2DB, '3', ' ̨'), + (0x2DC, '3', ' ̃'), + (0x2DD, '3', ' ̋'), (0x2DE, 'V'), - (0x2E0, 'M', u'ɣ'), - (0x2E1, 'M', u'l'), - (0x2E2, 'M', u's'), - (0x2E3, 'M', u'x'), - (0x2E4, 'M', u'ʕ'), + (0x2E0, 'M', 'ɣ'), + (0x2E1, 'M', 'l'), + (0x2E2, 'M', 's'), + (0x2E3, 'M', 'x'), + (0x2E4, 'M', 'ʕ'), (0x2E5, 'V'), - (0x340, 'M', u'̀'), - (0x341, 'M', u'́'), + (0x340, 'M', '̀'), + (0x341, 'M', '́'), (0x342, 'V'), - (0x343, 'M', u'̓'), - (0x344, 'M', u'̈́'), - (0x345, 'M', u'ι'), + (0x343, 'M', '̓'), + (0x344, 'M', '̈́'), + (0x345, 'M', 'ι'), (0x346, 'V'), (0x34F, 'I'), (0x350, 'V'), - (0x370, 'M', u'ͱ'), + (0x370, 'M', 'ͱ'), (0x371, 'V'), - (0x372, 'M', u'ͳ'), + (0x372, 'M', 'ͳ'), (0x373, 'V'), - (0x374, 'M', u'ʹ'), + (0x374, 'M', 'ʹ'), (0x375, 'V'), - (0x376, 'M', u'ͷ'), + (0x376, 'M', 'ͷ'), (0x377, 'V'), ] -def _seg_6(): +def _seg_6() -> List[Union[Tuple[int, str], Tuple[int, str, str]]]: return [ (0x378, 'X'), - (0x37A, '3', u' ι'), + (0x37A, '3', ' ι'), (0x37B, 'V'), - (0x37E, '3', u';'), - (0x37F, 'M', u'ϳ'), + (0x37E, '3', ';'), + (0x37F, 'M', 'ϳ'), (0x380, 'X'), - (0x384, '3', u' ́'), - (0x385, '3', u' ̈́'), - (0x386, 'M', u'ά'), - (0x387, 'M', u'·'), - (0x388, 'M', u'έ'), - (0x389, 'M', u'ή'), - (0x38A, 'M', u'ί'), + (0x384, '3', ' ́'), + (0x385, '3', ' ̈́'), + (0x386, 'M', 'ά'), + (0x387, 'M', '·'), + (0x388, 'M', 'έ'), + (0x389, 'M', 'ή'), + (0x38A, 'M', 'ί'), (0x38B, 'X'), - (0x38C, 'M', u'ό'), + (0x38C, 'M', 'ό'), (0x38D, 'X'), - (0x38E, 'M', u'ύ'), - (0x38F, 'M', u'ώ'), + (0x38E, 'M', 'ύ'), + (0x38F, 'M', 'ώ'), (0x390, 'V'), - (0x391, 'M', u'α'), - (0x392, 'M', u'β'), - (0x393, 'M', u'γ'), - (0x394, 'M', u'δ'), - (0x395, 'M', u'ε'), - (0x396, 'M', u'ζ'), - (0x397, 'M', u'η'), - (0x398, 'M', u'θ'), - (0x399, 'M', u'ι'), - (0x39A, 'M', u'κ'), - (0x39B, 'M', u'λ'), - (0x39C, 'M', u'μ'), - (0x39D, 'M', u'ν'), - (0x39E, 'M', u'ξ'), - (0x39F, 'M', u'ο'), - (0x3A0, 'M', u'π'), - (0x3A1, 'M', u'ρ'), + (0x391, 'M', 'α'), + (0x392, 'M', 'β'), + (0x393, 'M', 'γ'), + (0x394, 'M', 'δ'), + (0x395, 'M', 'ε'), + (0x396, 'M', 'ζ'), + (0x397, 'M', 'η'), + (0x398, 'M', 'θ'), + (0x399, 'M', 'ι'), + (0x39A, 'M', 'κ'), + (0x39B, 'M', 'λ'), + (0x39C, 'M', 'μ'), + (0x39D, 'M', 'ν'), + (0x39E, 'M', 'ξ'), + (0x39F, 'M', 'ο'), + (0x3A0, 'M', 'π'), + (0x3A1, 'M', 'ρ'), (0x3A2, 'X'), - (0x3A3, 'M', u'σ'), - (0x3A4, 'M', u'τ'), - (0x3A5, 'M', u'υ'), - (0x3A6, 'M', u'φ'), - (0x3A7, 'M', u'χ'), - (0x3A8, 'M', u'ψ'), - (0x3A9, 'M', u'ω'), - (0x3AA, 'M', u'ϊ'), - (0x3AB, 'M', u'ϋ'), + (0x3A3, 'M', 'σ'), + (0x3A4, 'M', 'τ'), + (0x3A5, 'M', 'υ'), + (0x3A6, 'M', 'φ'), + (0x3A7, 'M', 'χ'), + (0x3A8, 'M', 'ψ'), + (0x3A9, 'M', 'ω'), + (0x3AA, 'M', 'ϊ'), + (0x3AB, 'M', 'ϋ'), (0x3AC, 'V'), - (0x3C2, 'D', u'σ'), + (0x3C2, 'D', 'σ'), (0x3C3, 'V'), - (0x3CF, 'M', u'ϗ'), - (0x3D0, 'M', u'β'), - (0x3D1, 'M', u'θ'), - (0x3D2, 'M', u'υ'), - (0x3D3, 'M', u'ύ'), - (0x3D4, 'M', u'ϋ'), - (0x3D5, 'M', u'φ'), - (0x3D6, 'M', u'π'), + (0x3CF, 'M', 'ϗ'), + (0x3D0, 'M', 'β'), + (0x3D1, 'M', 'θ'), + (0x3D2, 'M', 'υ'), + (0x3D3, 'M', 'ύ'), + (0x3D4, 'M', 'ϋ'), + (0x3D5, 'M', 'φ'), + (0x3D6, 'M', 'π'), (0x3D7, 'V'), - (0x3D8, 'M', u'ϙ'), + (0x3D8, 'M', 'ϙ'), (0x3D9, 'V'), - (0x3DA, 'M', u'ϛ'), + (0x3DA, 'M', 'ϛ'), (0x3DB, 'V'), - (0x3DC, 'M', u'ϝ'), + (0x3DC, 'M', 'ϝ'), (0x3DD, 'V'), - (0x3DE, 'M', u'ϟ'), + (0x3DE, 'M', 'ϟ'), (0x3DF, 'V'), - (0x3E0, 'M', u'ϡ'), + (0x3E0, 'M', 'ϡ'), (0x3E1, 'V'), - (0x3E2, 'M', u'ϣ'), + (0x3E2, 'M', 'ϣ'), (0x3E3, 'V'), - (0x3E4, 'M', u'ϥ'), + (0x3E4, 'M', 'ϥ'), (0x3E5, 'V'), - (0x3E6, 'M', u'ϧ'), + (0x3E6, 'M', 'ϧ'), (0x3E7, 'V'), - (0x3E8, 'M', u'ϩ'), + (0x3E8, 'M', 'ϩ'), (0x3E9, 'V'), - (0x3EA, 'M', u'ϫ'), + (0x3EA, 'M', 'ϫ'), (0x3EB, 'V'), - (0x3EC, 'M', u'ϭ'), + (0x3EC, 'M', 'ϭ'), (0x3ED, 'V'), - (0x3EE, 'M', u'ϯ'), + (0x3EE, 'M', 'ϯ'), (0x3EF, 'V'), - (0x3F0, 'M', u'κ'), - (0x3F1, 'M', u'ρ'), - (0x3F2, 'M', u'σ'), + (0x3F0, 'M', 'κ'), + (0x3F1, 'M', 'ρ'), + (0x3F2, 'M', 'σ'), (0x3F3, 'V'), - (0x3F4, 'M', u'θ'), - (0x3F5, 'M', u'ε'), + (0x3F4, 'M', 'θ'), + (0x3F5, 'M', 'ε'), (0x3F6, 'V'), - (0x3F7, 'M', u'ϸ'), + (0x3F7, 'M', 'ϸ'), (0x3F8, 'V'), - (0x3F9, 'M', u'σ'), - (0x3FA, 'M', u'ϻ'), + (0x3F9, 'M', 'σ'), + (0x3FA, 'M', 'ϻ'), (0x3FB, 'V'), - (0x3FD, 'M', u'ͻ'), - (0x3FE, 'M', u'ͼ'), - (0x3FF, 'M', u'ͽ'), - (0x400, 'M', u'ѐ'), - (0x401, 'M', u'ё'), - (0x402, 'M', u'ђ'), + (0x3FD, 'M', 'ͻ'), + (0x3FE, 'M', 'ͼ'), + (0x3FF, 'M', 'ͽ'), + (0x400, 'M', 'ѐ'), + (0x401, 'M', 'ё'), + (0x402, 'M', 'ђ'), ] -def _seg_7(): +def _seg_7() -> List[Union[Tuple[int, str], Tuple[int, str, str]]]: return [ - (0x403, 'M', u'ѓ'), - (0x404, 'M', u'є'), - (0x405, 'M', u'ѕ'), - (0x406, 'M', u'і'), - (0x407, 'M', u'ї'), - (0x408, 'M', u'ј'), - (0x409, 'M', u'љ'), - (0x40A, 'M', u'њ'), - (0x40B, 'M', u'ћ'), - (0x40C, 'M', u'ќ'), - (0x40D, 'M', u'ѝ'), - (0x40E, 'M', u'ў'), - (0x40F, 'M', u'џ'), - (0x410, 'M', u'а'), - (0x411, 'M', u'б'), - (0x412, 'M', u'в'), - (0x413, 'M', u'г'), - (0x414, 'M', u'д'), - (0x415, 'M', u'е'), - (0x416, 'M', u'ж'), - (0x417, 'M', u'з'), - (0x418, 'M', u'и'), - (0x419, 'M', u'й'), - (0x41A, 'M', u'к'), - (0x41B, 'M', u'л'), - (0x41C, 'M', u'м'), - (0x41D, 'M', u'н'), - (0x41E, 'M', u'о'), - (0x41F, 'M', u'п'), - (0x420, 'M', u'р'), - (0x421, 'M', u'с'), - (0x422, 'M', u'т'), - (0x423, 'M', u'у'), - (0x424, 'M', u'ф'), - (0x425, 'M', u'х'), - (0x426, 'M', u'ц'), - (0x427, 'M', u'ч'), - (0x428, 'M', u'ш'), - (0x429, 'M', u'щ'), - (0x42A, 'M', u'ъ'), - (0x42B, 'M', u'ы'), - (0x42C, 'M', u'ь'), - (0x42D, 'M', u'э'), - (0x42E, 'M', u'ю'), - (0x42F, 'M', u'я'), + (0x403, 'M', 'ѓ'), + (0x404, 'M', 'є'), + (0x405, 'M', 'ѕ'), + (0x406, 'M', 'і'), + (0x407, 'M', 'ї'), + (0x408, 'M', 'ј'), + (0x409, 'M', 'љ'), + (0x40A, 'M', 'њ'), + (0x40B, 'M', 'ћ'), + (0x40C, 'M', 'ќ'), + (0x40D, 'M', 'ѝ'), + (0x40E, 'M', 'ў'), + (0x40F, 'M', 'џ'), + (0x410, 'M', 'а'), + (0x411, 'M', 'б'), + (0x412, 'M', 'в'), + (0x413, 'M', 'г'), + (0x414, 'M', 'д'), + (0x415, 'M', 'е'), + (0x416, 'M', 'ж'), + (0x417, 'M', 'з'), + (0x418, 'M', 'и'), + (0x419, 'M', 'й'), + (0x41A, 'M', 'к'), + (0x41B, 'M', 'л'), + (0x41C, 'M', 'м'), + (0x41D, 'M', 'н'), + (0x41E, 'M', 'о'), + (0x41F, 'M', 'п'), + (0x420, 'M', 'р'), + (0x421, 'M', 'с'), + (0x422, 'M', 'т'), + (0x423, 'M', 'у'), + (0x424, 'M', 'ф'), + (0x425, 'M', 'х'), + (0x426, 'M', 'ц'), + (0x427, 'M', 'ч'), + (0x428, 'M', 'ш'), + (0x429, 'M', 'щ'), + (0x42A, 'M', 'ъ'), + (0x42B, 'M', 'ы'), + (0x42C, 'M', 'ь'), + (0x42D, 'M', 'э'), + (0x42E, 'M', 'ю'), + (0x42F, 'M', 'я'), (0x430, 'V'), - (0x460, 'M', u'ѡ'), + (0x460, 'M', 'ѡ'), (0x461, 'V'), - (0x462, 'M', u'ѣ'), + (0x462, 'M', 'ѣ'), (0x463, 'V'), - (0x464, 'M', u'ѥ'), + (0x464, 'M', 'ѥ'), (0x465, 'V'), - (0x466, 'M', u'ѧ'), + (0x466, 'M', 'ѧ'), (0x467, 'V'), - (0x468, 'M', u'ѩ'), + (0x468, 'M', 'ѩ'), (0x469, 'V'), - (0x46A, 'M', u'ѫ'), + (0x46A, 'M', 'ѫ'), (0x46B, 'V'), - (0x46C, 'M', u'ѭ'), + (0x46C, 'M', 'ѭ'), (0x46D, 'V'), - (0x46E, 'M', u'ѯ'), + (0x46E, 'M', 'ѯ'), (0x46F, 'V'), - (0x470, 'M', u'ѱ'), + (0x470, 'M', 'ѱ'), (0x471, 'V'), - (0x472, 'M', u'ѳ'), + (0x472, 'M', 'ѳ'), (0x473, 'V'), - (0x474, 'M', u'ѵ'), + (0x474, 'M', 'ѵ'), (0x475, 'V'), - (0x476, 'M', u'ѷ'), + (0x476, 'M', 'ѷ'), (0x477, 'V'), - (0x478, 'M', u'ѹ'), + (0x478, 'M', 'ѹ'), (0x479, 'V'), - (0x47A, 'M', u'ѻ'), + (0x47A, 'M', 'ѻ'), (0x47B, 'V'), - (0x47C, 'M', u'ѽ'), + (0x47C, 'M', 'ѽ'), (0x47D, 'V'), - (0x47E, 'M', u'ѿ'), + (0x47E, 'M', 'ѿ'), (0x47F, 'V'), - (0x480, 'M', u'ҁ'), + (0x480, 'M', 'ҁ'), (0x481, 'V'), - (0x48A, 'M', u'ҋ'), + (0x48A, 'M', 'ҋ'), (0x48B, 'V'), - (0x48C, 'M', u'ҍ'), + (0x48C, 'M', 'ҍ'), (0x48D, 'V'), - (0x48E, 'M', u'ҏ'), + (0x48E, 'M', 'ҏ'), (0x48F, 'V'), - (0x490, 'M', u'ґ'), + (0x490, 'M', 'ґ'), (0x491, 'V'), - (0x492, 'M', u'ғ'), + (0x492, 'M', 'ғ'), (0x493, 'V'), - (0x494, 'M', u'ҕ'), + (0x494, 'M', 'ҕ'), (0x495, 'V'), - (0x496, 'M', u'җ'), + (0x496, 'M', 'җ'), (0x497, 'V'), - (0x498, 'M', u'ҙ'), + (0x498, 'M', 'ҙ'), (0x499, 'V'), - (0x49A, 'M', u'қ'), + (0x49A, 'M', 'қ'), (0x49B, 'V'), - (0x49C, 'M', u'ҝ'), + (0x49C, 'M', 'ҝ'), (0x49D, 'V'), ] -def _seg_8(): +def _seg_8() -> List[Union[Tuple[int, str], Tuple[int, str, str]]]: return [ - (0x49E, 'M', u'ҟ'), + (0x49E, 'M', 'ҟ'), (0x49F, 'V'), - (0x4A0, 'M', u'ҡ'), + (0x4A0, 'M', 'ҡ'), (0x4A1, 'V'), - (0x4A2, 'M', u'ң'), + (0x4A2, 'M', 'ң'), (0x4A3, 'V'), - (0x4A4, 'M', u'ҥ'), + (0x4A4, 'M', 'ҥ'), (0x4A5, 'V'), - (0x4A6, 'M', u'ҧ'), + (0x4A6, 'M', 'ҧ'), (0x4A7, 'V'), - (0x4A8, 'M', u'ҩ'), + (0x4A8, 'M', 'ҩ'), (0x4A9, 'V'), - (0x4AA, 'M', u'ҫ'), + (0x4AA, 'M', 'ҫ'), (0x4AB, 'V'), - (0x4AC, 'M', u'ҭ'), + (0x4AC, 'M', 'ҭ'), (0x4AD, 'V'), - (0x4AE, 'M', u'ү'), + (0x4AE, 'M', 'ү'), (0x4AF, 'V'), - (0x4B0, 'M', u'ұ'), + (0x4B0, 'M', 'ұ'), (0x4B1, 'V'), - (0x4B2, 'M', u'ҳ'), + (0x4B2, 'M', 'ҳ'), (0x4B3, 'V'), - (0x4B4, 'M', u'ҵ'), + (0x4B4, 'M', 'ҵ'), (0x4B5, 'V'), - (0x4B6, 'M', u'ҷ'), + (0x4B6, 'M', 'ҷ'), (0x4B7, 'V'), - (0x4B8, 'M', u'ҹ'), + (0x4B8, 'M', 'ҹ'), (0x4B9, 'V'), - (0x4BA, 'M', u'һ'), + (0x4BA, 'M', 'һ'), (0x4BB, 'V'), - (0x4BC, 'M', u'ҽ'), + (0x4BC, 'M', 'ҽ'), (0x4BD, 'V'), - (0x4BE, 'M', u'ҿ'), + (0x4BE, 'M', 'ҿ'), (0x4BF, 'V'), (0x4C0, 'X'), - (0x4C1, 'M', u'ӂ'), + (0x4C1, 'M', 'ӂ'), (0x4C2, 'V'), - (0x4C3, 'M', u'ӄ'), + (0x4C3, 'M', 'ӄ'), (0x4C4, 'V'), - (0x4C5, 'M', u'ӆ'), + (0x4C5, 'M', 'ӆ'), (0x4C6, 'V'), - (0x4C7, 'M', u'ӈ'), + (0x4C7, 'M', 'ӈ'), (0x4C8, 'V'), - (0x4C9, 'M', u'ӊ'), + (0x4C9, 'M', 'ӊ'), (0x4CA, 'V'), - (0x4CB, 'M', u'ӌ'), + (0x4CB, 'M', 'ӌ'), (0x4CC, 'V'), - (0x4CD, 'M', u'ӎ'), + (0x4CD, 'M', 'ӎ'), (0x4CE, 'V'), - (0x4D0, 'M', u'ӑ'), + (0x4D0, 'M', 'ӑ'), (0x4D1, 'V'), - (0x4D2, 'M', u'ӓ'), + (0x4D2, 'M', 'ӓ'), (0x4D3, 'V'), - (0x4D4, 'M', u'ӕ'), + (0x4D4, 'M', 'ӕ'), (0x4D5, 'V'), - (0x4D6, 'M', u'ӗ'), + (0x4D6, 'M', 'ӗ'), (0x4D7, 'V'), - (0x4D8, 'M', u'ә'), + (0x4D8, 'M', 'ә'), (0x4D9, 'V'), - (0x4DA, 'M', u'ӛ'), + (0x4DA, 'M', 'ӛ'), (0x4DB, 'V'), - (0x4DC, 'M', u'ӝ'), + (0x4DC, 'M', 'ӝ'), (0x4DD, 'V'), - (0x4DE, 'M', u'ӟ'), + (0x4DE, 'M', 'ӟ'), (0x4DF, 'V'), - (0x4E0, 'M', u'ӡ'), + (0x4E0, 'M', 'ӡ'), (0x4E1, 'V'), - (0x4E2, 'M', u'ӣ'), + (0x4E2, 'M', 'ӣ'), (0x4E3, 'V'), - (0x4E4, 'M', u'ӥ'), + (0x4E4, 'M', 'ӥ'), (0x4E5, 'V'), - (0x4E6, 'M', u'ӧ'), + (0x4E6, 'M', 'ӧ'), (0x4E7, 'V'), - (0x4E8, 'M', u'ө'), + (0x4E8, 'M', 'ө'), (0x4E9, 'V'), - (0x4EA, 'M', u'ӫ'), + (0x4EA, 'M', 'ӫ'), (0x4EB, 'V'), - (0x4EC, 'M', u'ӭ'), + (0x4EC, 'M', 'ӭ'), (0x4ED, 'V'), - (0x4EE, 'M', u'ӯ'), + (0x4EE, 'M', 'ӯ'), (0x4EF, 'V'), - (0x4F0, 'M', u'ӱ'), + (0x4F0, 'M', 'ӱ'), (0x4F1, 'V'), - (0x4F2, 'M', u'ӳ'), + (0x4F2, 'M', 'ӳ'), (0x4F3, 'V'), - (0x4F4, 'M', u'ӵ'), + (0x4F4, 'M', 'ӵ'), (0x4F5, 'V'), - (0x4F6, 'M', u'ӷ'), + (0x4F6, 'M', 'ӷ'), (0x4F7, 'V'), - (0x4F8, 'M', u'ӹ'), + (0x4F8, 'M', 'ӹ'), (0x4F9, 'V'), - (0x4FA, 'M', u'ӻ'), + (0x4FA, 'M', 'ӻ'), (0x4FB, 'V'), - (0x4FC, 'M', u'ӽ'), + (0x4FC, 'M', 'ӽ'), (0x4FD, 'V'), - (0x4FE, 'M', u'ӿ'), + (0x4FE, 'M', 'ӿ'), (0x4FF, 'V'), - (0x500, 'M', u'ԁ'), + (0x500, 'M', 'ԁ'), (0x501, 'V'), - (0x502, 'M', u'ԃ'), + (0x502, 'M', 'ԃ'), ] -def _seg_9(): +def _seg_9() -> List[Union[Tuple[int, str], Tuple[int, str, str]]]: return [ (0x503, 'V'), - (0x504, 'M', u'ԅ'), + (0x504, 'M', 'ԅ'), (0x505, 'V'), - (0x506, 'M', u'ԇ'), + (0x506, 'M', 'ԇ'), (0x507, 'V'), - (0x508, 'M', u'ԉ'), + (0x508, 'M', 'ԉ'), (0x509, 'V'), - (0x50A, 'M', u'ԋ'), + (0x50A, 'M', 'ԋ'), (0x50B, 'V'), - (0x50C, 'M', u'ԍ'), + (0x50C, 'M', 'ԍ'), (0x50D, 'V'), - (0x50E, 'M', u'ԏ'), + (0x50E, 'M', 'ԏ'), (0x50F, 'V'), - (0x510, 'M', u'ԑ'), + (0x510, 'M', 'ԑ'), (0x511, 'V'), - (0x512, 'M', u'ԓ'), + (0x512, 'M', 'ԓ'), (0x513, 'V'), - (0x514, 'M', u'ԕ'), + (0x514, 'M', 'ԕ'), (0x515, 'V'), - (0x516, 'M', u'ԗ'), + (0x516, 'M', 'ԗ'), (0x517, 'V'), - (0x518, 'M', u'ԙ'), + (0x518, 'M', 'ԙ'), (0x519, 'V'), - (0x51A, 'M', u'ԛ'), + (0x51A, 'M', 'ԛ'), (0x51B, 'V'), - (0x51C, 'M', u'ԝ'), + (0x51C, 'M', 'ԝ'), (0x51D, 'V'), - (0x51E, 'M', u'ԟ'), + (0x51E, 'M', 'ԟ'), (0x51F, 'V'), - (0x520, 'M', u'ԡ'), + (0x520, 'M', 'ԡ'), (0x521, 'V'), - (0x522, 'M', u'ԣ'), + (0x522, 'M', 'ԣ'), (0x523, 'V'), - (0x524, 'M', u'ԥ'), + (0x524, 'M', 'ԥ'), (0x525, 'V'), - (0x526, 'M', u'ԧ'), + (0x526, 'M', 'ԧ'), (0x527, 'V'), - (0x528, 'M', u'ԩ'), + (0x528, 'M', 'ԩ'), (0x529, 'V'), - (0x52A, 'M', u'ԫ'), + (0x52A, 'M', 'ԫ'), (0x52B, 'V'), - (0x52C, 'M', u'ԭ'), + (0x52C, 'M', 'ԭ'), (0x52D, 'V'), - (0x52E, 'M', u'ԯ'), + (0x52E, 'M', 'ԯ'), (0x52F, 'V'), (0x530, 'X'), - (0x531, 'M', u'ա'), - (0x532, 'M', u'բ'), - (0x533, 'M', u'գ'), - (0x534, 'M', u'դ'), - (0x535, 'M', u'ե'), - (0x536, 'M', u'զ'), - (0x537, 'M', u'է'), - (0x538, 'M', u'ը'), - (0x539, 'M', u'թ'), - (0x53A, 'M', u'ժ'), - (0x53B, 'M', u'ի'), - (0x53C, 'M', u'լ'), - (0x53D, 'M', u'խ'), - (0x53E, 'M', u'ծ'), - (0x53F, 'M', u'կ'), - (0x540, 'M', u'հ'), - (0x541, 'M', u'ձ'), - (0x542, 'M', u'ղ'), - (0x543, 'M', u'ճ'), - (0x544, 'M', u'մ'), - (0x545, 'M', u'յ'), - (0x546, 'M', u'ն'), - (0x547, 'M', u'շ'), - (0x548, 'M', u'ո'), - (0x549, 'M', u'չ'), - (0x54A, 'M', u'պ'), - (0x54B, 'M', u'ջ'), - (0x54C, 'M', u'ռ'), - (0x54D, 'M', u'ս'), - (0x54E, 'M', u'վ'), - (0x54F, 'M', u'տ'), - (0x550, 'M', u'ր'), - (0x551, 'M', u'ց'), - (0x552, 'M', u'ւ'), - (0x553, 'M', u'փ'), - (0x554, 'M', u'ք'), - (0x555, 'M', u'օ'), - (0x556, 'M', u'ֆ'), + (0x531, 'M', 'ա'), + (0x532, 'M', 'բ'), + (0x533, 'M', 'գ'), + (0x534, 'M', 'դ'), + (0x535, 'M', 'ե'), + (0x536, 'M', 'զ'), + (0x537, 'M', 'է'), + (0x538, 'M', 'ը'), + (0x539, 'M', 'թ'), + (0x53A, 'M', 'ժ'), + (0x53B, 'M', 'ի'), + (0x53C, 'M', 'լ'), + (0x53D, 'M', 'խ'), + (0x53E, 'M', 'ծ'), + (0x53F, 'M', 'կ'), + (0x540, 'M', 'հ'), + (0x541, 'M', 'ձ'), + (0x542, 'M', 'ղ'), + (0x543, 'M', 'ճ'), + (0x544, 'M', 'մ'), + (0x545, 'M', 'յ'), + (0x546, 'M', 'ն'), + (0x547, 'M', 'շ'), + (0x548, 'M', 'ո'), + (0x549, 'M', 'չ'), + (0x54A, 'M', 'պ'), + (0x54B, 'M', 'ջ'), + (0x54C, 'M', 'ռ'), + (0x54D, 'M', 'ս'), + (0x54E, 'M', 'վ'), + (0x54F, 'M', 'տ'), + (0x550, 'M', 'ր'), + (0x551, 'M', 'ց'), + (0x552, 'M', 'ւ'), + (0x553, 'M', 'փ'), + (0x554, 'M', 'ք'), + (0x555, 'M', 'օ'), + (0x556, 'M', 'ֆ'), (0x557, 'X'), (0x559, 'V'), - (0x587, 'M', u'եւ'), + (0x587, 'M', 'եւ'), (0x588, 'V'), (0x58B, 'X'), (0x58D, 'V'), @@ -1042,15 +1045,15 @@ def _seg_9(): (0x5F5, 'X'), (0x606, 'V'), (0x61C, 'X'), - (0x61E, 'V'), + (0x61D, 'V'), ] -def _seg_10(): +def _seg_10() -> List[Union[Tuple[int, str], Tuple[int, str, str]]]: return [ - (0x675, 'M', u'اٴ'), - (0x676, 'M', u'وٴ'), - (0x677, 'M', u'ۇٴ'), - (0x678, 'M', u'يٴ'), + (0x675, 'M', 'اٴ'), + (0x676, 'M', 'وٴ'), + (0x677, 'M', 'ۇٴ'), + (0x678, 'M', 'يٴ'), (0x679, 'V'), (0x6DD, 'X'), (0x6DE, 'V'), @@ -1071,21 +1074,19 @@ def _seg_10(): (0x85F, 'X'), (0x860, 'V'), (0x86B, 'X'), - (0x8A0, 'V'), - (0x8B5, 'X'), - (0x8B6, 'V'), - (0x8BE, 'X'), - (0x8D3, 'V'), + (0x870, 'V'), + (0x88F, 'X'), + (0x898, 'V'), (0x8E2, 'X'), (0x8E3, 'V'), - (0x958, 'M', u'क़'), - (0x959, 'M', u'ख़'), - (0x95A, 'M', u'ग़'), - (0x95B, 'M', u'ज़'), - (0x95C, 'M', u'ड़'), - (0x95D, 'M', u'ढ़'), - (0x95E, 'M', u'फ़'), - (0x95F, 'M', u'य़'), + (0x958, 'M', 'क़'), + (0x959, 'M', 'ख़'), + (0x95A, 'M', 'ग़'), + (0x95B, 'M', 'ज़'), + (0x95C, 'M', 'ड़'), + (0x95D, 'M', 'ढ़'), + (0x95E, 'M', 'फ़'), + (0x95F, 'M', 'य़'), (0x960, 'V'), (0x984, 'X'), (0x985, 'V'), @@ -1108,10 +1109,10 @@ def _seg_10(): (0x9CF, 'X'), (0x9D7, 'V'), (0x9D8, 'X'), - (0x9DC, 'M', u'ড়'), - (0x9DD, 'M', u'ঢ়'), + (0x9DC, 'M', 'ড়'), + (0x9DD, 'M', 'ঢ়'), (0x9DE, 'X'), - (0x9DF, 'M', u'য়'), + (0x9DF, 'M', 'য়'), (0x9E0, 'V'), (0x9E4, 'X'), (0x9E6, 'V'), @@ -1127,10 +1128,10 @@ def _seg_10(): (0xA2A, 'V'), (0xA31, 'X'), (0xA32, 'V'), - (0xA33, 'M', u'ਲ਼'), + (0xA33, 'M', 'ਲ਼'), (0xA34, 'X'), (0xA35, 'V'), - (0xA36, 'M', u'ਸ਼'), + (0xA36, 'M', 'ਸ਼'), (0xA37, 'X'), (0xA38, 'V'), (0xA3A, 'X'), @@ -1144,16 +1145,16 @@ def _seg_10(): (0xA4E, 'X'), (0xA51, 'V'), (0xA52, 'X'), - (0xA59, 'M', u'ਖ਼'), - (0xA5A, 'M', u'ਗ਼'), - (0xA5B, 'M', u'ਜ਼'), + (0xA59, 'M', 'ਖ਼'), + (0xA5A, 'M', 'ਗ਼'), + (0xA5B, 'M', 'ਜ਼'), + (0xA5C, 'V'), + (0xA5D, 'X'), ] -def _seg_11(): +def _seg_11() -> List[Union[Tuple[int, str], Tuple[int, str, str]]]: return [ - (0xA5C, 'V'), - (0xA5D, 'X'), - (0xA5E, 'M', u'ਫ਼'), + (0xA5E, 'M', 'ਫ਼'), (0xA5F, 'X'), (0xA66, 'V'), (0xA77, 'X'), @@ -1205,10 +1206,10 @@ def _seg_11(): (0xB49, 'X'), (0xB4B, 'V'), (0xB4E, 'X'), - (0xB56, 'V'), + (0xB55, 'V'), (0xB58, 'X'), - (0xB5C, 'M', u'ଡ଼'), - (0xB5D, 'M', u'ଢ଼'), + (0xB5C, 'M', 'ଡ଼'), + (0xB5D, 'M', 'ଢ଼'), (0xB5E, 'X'), (0xB5F, 'V'), (0xB64, 'X'), @@ -1251,14 +1252,14 @@ def _seg_11(): (0xC0E, 'V'), (0xC11, 'X'), (0xC12, 'V'), + (0xC29, 'X'), + (0xC2A, 'V'), ] -def _seg_12(): +def _seg_12() -> List[Union[Tuple[int, str], Tuple[int, str, str]]]: return [ - (0xC29, 'X'), - (0xC2A, 'V'), (0xC3A, 'X'), - (0xC3D, 'V'), + (0xC3C, 'V'), (0xC45, 'X'), (0xC46, 'V'), (0xC49, 'X'), @@ -1268,11 +1269,13 @@ def _seg_12(): (0xC57, 'X'), (0xC58, 'V'), (0xC5B, 'X'), + (0xC5D, 'V'), + (0xC5E, 'X'), (0xC60, 'V'), (0xC64, 'X'), (0xC66, 'V'), (0xC70, 'X'), - (0xC78, 'V'), + (0xC77, 'V'), (0xC8D, 'X'), (0xC8E, 'V'), (0xC91, 'X'), @@ -1290,17 +1293,15 @@ def _seg_12(): (0xCCE, 'X'), (0xCD5, 'V'), (0xCD7, 'X'), - (0xCDE, 'V'), + (0xCDD, 'V'), (0xCDF, 'X'), (0xCE0, 'V'), (0xCE4, 'X'), (0xCE6, 'V'), (0xCF0, 'X'), (0xCF1, 'V'), - (0xCF3, 'X'), + (0xCF4, 'X'), (0xD00, 'V'), - (0xD04, 'X'), - (0xD05, 'V'), (0xD0D, 'X'), (0xD0E, 'V'), (0xD11, 'X'), @@ -1314,7 +1315,7 @@ def _seg_12(): (0xD64, 'X'), (0xD66, 'V'), (0xD80, 'X'), - (0xD82, 'V'), + (0xD81, 'V'), (0xD84, 'X'), (0xD85, 'V'), (0xD97, 'X'), @@ -1339,7 +1340,7 @@ def _seg_12(): (0xDF2, 'V'), (0xDF5, 'X'), (0xE01, 'V'), - (0xE33, 'M', u'ํา'), + (0xE33, 'M', 'ํา'), (0xE34, 'V'), (0xE3B, 'X'), (0xE3F, 'V'), @@ -1348,88 +1349,74 @@ def _seg_12(): (0xE83, 'X'), (0xE84, 'V'), (0xE85, 'X'), - (0xE87, 'V'), - (0xE89, 'X'), - (0xE8A, 'V'), + (0xE86, 'V'), (0xE8B, 'X'), - (0xE8D, 'V'), - (0xE8E, 'X'), - (0xE94, 'V'), - ] - -def _seg_13(): - return [ - (0xE98, 'X'), - (0xE99, 'V'), - (0xEA0, 'X'), - (0xEA1, 'V'), + (0xE8C, 'V'), (0xEA4, 'X'), (0xEA5, 'V'), (0xEA6, 'X'), (0xEA7, 'V'), - (0xEA8, 'X'), - (0xEAA, 'V'), - (0xEAC, 'X'), - (0xEAD, 'V'), - (0xEB3, 'M', u'ໍາ'), + (0xEB3, 'M', 'ໍາ'), (0xEB4, 'V'), - (0xEBA, 'X'), - (0xEBB, 'V'), + ] + +def _seg_13() -> List[Union[Tuple[int, str], Tuple[int, str, str]]]: + return [ (0xEBE, 'X'), (0xEC0, 'V'), (0xEC5, 'X'), (0xEC6, 'V'), (0xEC7, 'X'), (0xEC8, 'V'), - (0xECE, 'X'), + (0xECF, 'X'), (0xED0, 'V'), (0xEDA, 'X'), - (0xEDC, 'M', u'ຫນ'), - (0xEDD, 'M', u'ຫມ'), + (0xEDC, 'M', 'ຫນ'), + (0xEDD, 'M', 'ຫມ'), (0xEDE, 'V'), (0xEE0, 'X'), (0xF00, 'V'), - (0xF0C, 'M', u'་'), + (0xF0C, 'M', '་'), (0xF0D, 'V'), - (0xF43, 'M', u'གྷ'), + (0xF43, 'M', 'གྷ'), (0xF44, 'V'), (0xF48, 'X'), (0xF49, 'V'), - (0xF4D, 'M', u'ཌྷ'), + (0xF4D, 'M', 'ཌྷ'), (0xF4E, 'V'), - (0xF52, 'M', u'དྷ'), + (0xF52, 'M', 'དྷ'), (0xF53, 'V'), - (0xF57, 'M', u'བྷ'), + (0xF57, 'M', 'བྷ'), (0xF58, 'V'), - (0xF5C, 'M', u'ཛྷ'), + (0xF5C, 'M', 'ཛྷ'), (0xF5D, 'V'), - (0xF69, 'M', u'ཀྵ'), + (0xF69, 'M', 'ཀྵ'), (0xF6A, 'V'), (0xF6D, 'X'), (0xF71, 'V'), - (0xF73, 'M', u'ཱི'), + (0xF73, 'M', 'ཱི'), (0xF74, 'V'), - (0xF75, 'M', u'ཱུ'), - (0xF76, 'M', u'ྲྀ'), - (0xF77, 'M', u'ྲཱྀ'), - (0xF78, 'M', u'ླྀ'), - (0xF79, 'M', u'ླཱྀ'), + (0xF75, 'M', 'ཱུ'), + (0xF76, 'M', 'ྲྀ'), + (0xF77, 'M', 'ྲཱྀ'), + (0xF78, 'M', 'ླྀ'), + (0xF79, 'M', 'ླཱྀ'), (0xF7A, 'V'), - (0xF81, 'M', u'ཱྀ'), + (0xF81, 'M', 'ཱྀ'), (0xF82, 'V'), - (0xF93, 'M', u'ྒྷ'), + (0xF93, 'M', 'ྒྷ'), (0xF94, 'V'), (0xF98, 'X'), (0xF99, 'V'), - (0xF9D, 'M', u'ྜྷ'), + (0xF9D, 'M', 'ྜྷ'), (0xF9E, 'V'), - (0xFA2, 'M', u'ྡྷ'), + (0xFA2, 'M', 'ྡྷ'), (0xFA3, 'V'), - (0xFA7, 'M', u'ྦྷ'), + (0xFA7, 'M', 'ྦྷ'), (0xFA8, 'V'), - (0xFAC, 'M', u'ྫྷ'), + (0xFAC, 'M', 'ྫྷ'), (0xFAD, 'V'), - (0xFB9, 'M', u'ྐྵ'), + (0xFB9, 'M', 'ྐྵ'), (0xFBA, 'V'), (0xFBD, 'X'), (0xFBE, 'V'), @@ -1438,12 +1425,12 @@ def _seg_13(): (0xFDB, 'X'), (0x1000, 'V'), (0x10A0, 'X'), - (0x10C7, 'M', u'ⴧ'), + (0x10C7, 'M', 'ⴧ'), (0x10C8, 'X'), - (0x10CD, 'M', u'ⴭ'), + (0x10CD, 'M', 'ⴭ'), (0x10CE, 'X'), (0x10D0, 'V'), - (0x10FC, 'M', u'ნ'), + (0x10FC, 'M', 'ნ'), (0x10FD, 'V'), (0x115F, 'X'), (0x1161, 'V'), @@ -1459,10 +1446,6 @@ def _seg_13(): (0x1260, 'V'), (0x1289, 'X'), (0x128A, 'V'), - ] - -def _seg_14(): - return [ (0x128E, 'X'), (0x1290, 'V'), (0x12B1, 'X'), @@ -1479,6 +1462,10 @@ def _seg_14(): (0x12D8, 'V'), (0x1311, 'X'), (0x1312, 'V'), + ] + +def _seg_14() -> List[Union[Tuple[int, str], Tuple[int, str, str]]]: + return [ (0x1316, 'X'), (0x1318, 'V'), (0x135B, 'X'), @@ -1488,12 +1475,12 @@ def _seg_14(): (0x139A, 'X'), (0x13A0, 'V'), (0x13F6, 'X'), - (0x13F8, 'M', u'Ᏸ'), - (0x13F9, 'M', u'Ᏹ'), - (0x13FA, 'M', u'Ᏺ'), - (0x13FB, 'M', u'Ᏻ'), - (0x13FC, 'M', u'Ᏼ'), - (0x13FD, 'M', u'Ᏽ'), + (0x13F8, 'M', 'Ᏸ'), + (0x13F9, 'M', 'Ᏹ'), + (0x13FA, 'M', 'Ᏺ'), + (0x13FB, 'M', 'Ᏻ'), + (0x13FC, 'M', 'Ᏼ'), + (0x13FD, 'M', 'Ᏽ'), (0x13FE, 'X'), (0x1400, 'V'), (0x1680, 'X'), @@ -1502,10 +1489,8 @@ def _seg_14(): (0x16A0, 'V'), (0x16F9, 'X'), (0x1700, 'V'), - (0x170D, 'X'), - (0x170E, 'V'), - (0x1715, 'X'), - (0x1720, 'V'), + (0x1716, 'X'), + (0x171F, 'V'), (0x1737, 'X'), (0x1740, 'V'), (0x1754, 'X'), @@ -1528,6 +1513,7 @@ def _seg_14(): (0x1807, 'V'), (0x180B, 'I'), (0x180E, 'X'), + (0x180F, 'I'), (0x1810, 'V'), (0x181A, 'X'), (0x1820, 'V'), @@ -1563,19 +1549,15 @@ def _seg_14(): (0x1A7F, 'V'), (0x1A8A, 'X'), (0x1A90, 'V'), - ] - -def _seg_15(): - return [ (0x1A9A, 'X'), (0x1AA0, 'V'), (0x1AAE, 'X'), (0x1AB0, 'V'), - (0x1ABF, 'X'), + (0x1ACF, 'X'), (0x1B00, 'V'), - (0x1B4C, 'X'), + (0x1B4D, 'X'), (0x1B50, 'V'), - (0x1B7D, 'X'), + (0x1B7F, 'X'), (0x1B80, 'V'), (0x1BF4, 'X'), (0x1BFC, 'V'), @@ -1583,1148 +1565,1193 @@ def _seg_15(): (0x1C3B, 'V'), (0x1C4A, 'X'), (0x1C4D, 'V'), - (0x1C80, 'M', u'в'), - (0x1C81, 'M', u'д'), - (0x1C82, 'M', u'о'), - (0x1C83, 'M', u'с'), - (0x1C84, 'M', u'т'), - (0x1C86, 'M', u'ъ'), - (0x1C87, 'M', u'ѣ'), - (0x1C88, 'M', u'ꙋ'), + (0x1C80, 'M', 'в'), + ] + +def _seg_15() -> List[Union[Tuple[int, str], Tuple[int, str, str]]]: + return [ + (0x1C81, 'M', 'д'), + (0x1C82, 'M', 'о'), + (0x1C83, 'M', 'с'), + (0x1C84, 'M', 'т'), + (0x1C86, 'M', 'ъ'), + (0x1C87, 'M', 'ѣ'), + (0x1C88, 'M', 'ꙋ'), (0x1C89, 'X'), + (0x1C90, 'M', 'ა'), + (0x1C91, 'M', 'ბ'), + (0x1C92, 'M', 'გ'), + (0x1C93, 'M', 'დ'), + (0x1C94, 'M', 'ე'), + (0x1C95, 'M', 'ვ'), + (0x1C96, 'M', 'ზ'), + (0x1C97, 'M', 'თ'), + (0x1C98, 'M', 'ი'), + (0x1C99, 'M', 'კ'), + (0x1C9A, 'M', 'ლ'), + (0x1C9B, 'M', 'მ'), + (0x1C9C, 'M', 'ნ'), + (0x1C9D, 'M', 'ო'), + (0x1C9E, 'M', 'პ'), + (0x1C9F, 'M', 'ჟ'), + (0x1CA0, 'M', 'რ'), + (0x1CA1, 'M', 'ს'), + (0x1CA2, 'M', 'ტ'), + (0x1CA3, 'M', 'უ'), + (0x1CA4, 'M', 'ფ'), + (0x1CA5, 'M', 'ქ'), + (0x1CA6, 'M', 'ღ'), + (0x1CA7, 'M', 'ყ'), + (0x1CA8, 'M', 'შ'), + (0x1CA9, 'M', 'ჩ'), + (0x1CAA, 'M', 'ც'), + (0x1CAB, 'M', 'ძ'), + (0x1CAC, 'M', 'წ'), + (0x1CAD, 'M', 'ჭ'), + (0x1CAE, 'M', 'ხ'), + (0x1CAF, 'M', 'ჯ'), + (0x1CB0, 'M', 'ჰ'), + (0x1CB1, 'M', 'ჱ'), + (0x1CB2, 'M', 'ჲ'), + (0x1CB3, 'M', 'ჳ'), + (0x1CB4, 'M', 'ჴ'), + (0x1CB5, 'M', 'ჵ'), + (0x1CB6, 'M', 'ჶ'), + (0x1CB7, 'M', 'ჷ'), + (0x1CB8, 'M', 'ჸ'), + (0x1CB9, 'M', 'ჹ'), + (0x1CBA, 'M', 'ჺ'), + (0x1CBB, 'X'), + (0x1CBD, 'M', 'ჽ'), + (0x1CBE, 'M', 'ჾ'), + (0x1CBF, 'M', 'ჿ'), (0x1CC0, 'V'), (0x1CC8, 'X'), (0x1CD0, 'V'), - (0x1CFA, 'X'), + (0x1CFB, 'X'), (0x1D00, 'V'), - (0x1D2C, 'M', u'a'), - (0x1D2D, 'M', u'æ'), - (0x1D2E, 'M', u'b'), + (0x1D2C, 'M', 'a'), + (0x1D2D, 'M', 'æ'), + (0x1D2E, 'M', 'b'), (0x1D2F, 'V'), - (0x1D30, 'M', u'd'), - (0x1D31, 'M', u'e'), - (0x1D32, 'M', u'ǝ'), - (0x1D33, 'M', u'g'), - (0x1D34, 'M', u'h'), - (0x1D35, 'M', u'i'), - (0x1D36, 'M', u'j'), - (0x1D37, 'M', u'k'), - (0x1D38, 'M', u'l'), - (0x1D39, 'M', u'm'), - (0x1D3A, 'M', u'n'), + (0x1D30, 'M', 'd'), + (0x1D31, 'M', 'e'), + (0x1D32, 'M', 'ǝ'), + (0x1D33, 'M', 'g'), + (0x1D34, 'M', 'h'), + (0x1D35, 'M', 'i'), + (0x1D36, 'M', 'j'), + (0x1D37, 'M', 'k'), + (0x1D38, 'M', 'l'), + (0x1D39, 'M', 'm'), + (0x1D3A, 'M', 'n'), (0x1D3B, 'V'), - (0x1D3C, 'M', u'o'), - (0x1D3D, 'M', u'ȣ'), - (0x1D3E, 'M', u'p'), - (0x1D3F, 'M', u'r'), - (0x1D40, 'M', u't'), - (0x1D41, 'M', u'u'), - (0x1D42, 'M', u'w'), - (0x1D43, 'M', u'a'), - (0x1D44, 'M', u'ɐ'), - (0x1D45, 'M', u'ɑ'), - (0x1D46, 'M', u'ᴂ'), - (0x1D47, 'M', u'b'), - (0x1D48, 'M', u'd'), - (0x1D49, 'M', u'e'), - (0x1D4A, 'M', u'ə'), - (0x1D4B, 'M', u'ɛ'), - (0x1D4C, 'M', u'ɜ'), - (0x1D4D, 'M', u'g'), + (0x1D3C, 'M', 'o'), + (0x1D3D, 'M', 'ȣ'), + (0x1D3E, 'M', 'p'), + (0x1D3F, 'M', 'r'), + (0x1D40, 'M', 't'), + (0x1D41, 'M', 'u'), + (0x1D42, 'M', 'w'), + (0x1D43, 'M', 'a'), + (0x1D44, 'M', 'ɐ'), + (0x1D45, 'M', 'ɑ'), + (0x1D46, 'M', 'ᴂ'), + (0x1D47, 'M', 'b'), + (0x1D48, 'M', 'd'), + (0x1D49, 'M', 'e'), + (0x1D4A, 'M', 'ə'), + (0x1D4B, 'M', 'ɛ'), + (0x1D4C, 'M', 'ɜ'), + (0x1D4D, 'M', 'g'), (0x1D4E, 'V'), - (0x1D4F, 'M', u'k'), - (0x1D50, 'M', u'm'), - (0x1D51, 'M', u'ŋ'), - (0x1D52, 'M', u'o'), - (0x1D53, 'M', u'ɔ'), - (0x1D54, 'M', u'ᴖ'), - (0x1D55, 'M', u'ᴗ'), - (0x1D56, 'M', u'p'), - (0x1D57, 'M', u't'), - (0x1D58, 'M', u'u'), - (0x1D59, 'M', u'ᴝ'), - (0x1D5A, 'M', u'ɯ'), - (0x1D5B, 'M', u'v'), - (0x1D5C, 'M', u'ᴥ'), - (0x1D5D, 'M', u'β'), - (0x1D5E, 'M', u'γ'), - (0x1D5F, 'M', u'δ'), - (0x1D60, 'M', u'φ'), - (0x1D61, 'M', u'χ'), - (0x1D62, 'M', u'i'), - (0x1D63, 'M', u'r'), - (0x1D64, 'M', u'u'), - (0x1D65, 'M', u'v'), - (0x1D66, 'M', u'β'), - (0x1D67, 'M', u'γ'), - (0x1D68, 'M', u'ρ'), - (0x1D69, 'M', u'φ'), - (0x1D6A, 'M', u'χ'), - (0x1D6B, 'V'), - (0x1D78, 'M', u'н'), - (0x1D79, 'V'), - (0x1D9B, 'M', u'ɒ'), - (0x1D9C, 'M', u'c'), - (0x1D9D, 'M', u'ɕ'), - (0x1D9E, 'M', u'ð'), + (0x1D4F, 'M', 'k'), + (0x1D50, 'M', 'm'), + (0x1D51, 'M', 'ŋ'), + (0x1D52, 'M', 'o'), + (0x1D53, 'M', 'ɔ'), ] -def _seg_16(): +def _seg_16() -> List[Union[Tuple[int, str], Tuple[int, str, str]]]: return [ - (0x1D9F, 'M', u'ɜ'), - (0x1DA0, 'M', u'f'), - (0x1DA1, 'M', u'ɟ'), - (0x1DA2, 'M', u'ɡ'), - (0x1DA3, 'M', u'ɥ'), - (0x1DA4, 'M', u'ɨ'), - (0x1DA5, 'M', u'ɩ'), - (0x1DA6, 'M', u'ɪ'), - (0x1DA7, 'M', u'ᵻ'), - (0x1DA8, 'M', u'ʝ'), - (0x1DA9, 'M', u'ɭ'), - (0x1DAA, 'M', u'ᶅ'), - (0x1DAB, 'M', u'ʟ'), - (0x1DAC, 'M', u'ɱ'), - (0x1DAD, 'M', u'ɰ'), - (0x1DAE, 'M', u'ɲ'), - (0x1DAF, 'M', u'ɳ'), - (0x1DB0, 'M', u'ɴ'), - (0x1DB1, 'M', u'ɵ'), - (0x1DB2, 'M', u'ɸ'), - (0x1DB3, 'M', u'ʂ'), - (0x1DB4, 'M', u'ʃ'), - (0x1DB5, 'M', u'ƫ'), - (0x1DB6, 'M', u'ʉ'), - (0x1DB7, 'M', u'ʊ'), - (0x1DB8, 'M', u'ᴜ'), - (0x1DB9, 'M', u'ʋ'), - (0x1DBA, 'M', u'ʌ'), - (0x1DBB, 'M', u'z'), - (0x1DBC, 'M', u'ʐ'), - (0x1DBD, 'M', u'ʑ'), - (0x1DBE, 'M', u'ʒ'), - (0x1DBF, 'M', u'θ'), + (0x1D54, 'M', 'ᴖ'), + (0x1D55, 'M', 'ᴗ'), + (0x1D56, 'M', 'p'), + (0x1D57, 'M', 't'), + (0x1D58, 'M', 'u'), + (0x1D59, 'M', 'ᴝ'), + (0x1D5A, 'M', 'ɯ'), + (0x1D5B, 'M', 'v'), + (0x1D5C, 'M', 'ᴥ'), + (0x1D5D, 'M', 'β'), + (0x1D5E, 'M', 'γ'), + (0x1D5F, 'M', 'δ'), + (0x1D60, 'M', 'φ'), + (0x1D61, 'M', 'χ'), + (0x1D62, 'M', 'i'), + (0x1D63, 'M', 'r'), + (0x1D64, 'M', 'u'), + (0x1D65, 'M', 'v'), + (0x1D66, 'M', 'β'), + (0x1D67, 'M', 'γ'), + (0x1D68, 'M', 'ρ'), + (0x1D69, 'M', 'φ'), + (0x1D6A, 'M', 'χ'), + (0x1D6B, 'V'), + (0x1D78, 'M', 'н'), + (0x1D79, 'V'), + (0x1D9B, 'M', 'ɒ'), + (0x1D9C, 'M', 'c'), + (0x1D9D, 'M', 'ɕ'), + (0x1D9E, 'M', 'ð'), + (0x1D9F, 'M', 'ɜ'), + (0x1DA0, 'M', 'f'), + (0x1DA1, 'M', 'ɟ'), + (0x1DA2, 'M', 'ɡ'), + (0x1DA3, 'M', 'ɥ'), + (0x1DA4, 'M', 'ɨ'), + (0x1DA5, 'M', 'ɩ'), + (0x1DA6, 'M', 'ɪ'), + (0x1DA7, 'M', 'ᵻ'), + (0x1DA8, 'M', 'ʝ'), + (0x1DA9, 'M', 'ɭ'), + (0x1DAA, 'M', 'ᶅ'), + (0x1DAB, 'M', 'ʟ'), + (0x1DAC, 'M', 'ɱ'), + (0x1DAD, 'M', 'ɰ'), + (0x1DAE, 'M', 'ɲ'), + (0x1DAF, 'M', 'ɳ'), + (0x1DB0, 'M', 'ɴ'), + (0x1DB1, 'M', 'ɵ'), + (0x1DB2, 'M', 'ɸ'), + (0x1DB3, 'M', 'ʂ'), + (0x1DB4, 'M', 'ʃ'), + (0x1DB5, 'M', 'ƫ'), + (0x1DB6, 'M', 'ʉ'), + (0x1DB7, 'M', 'ʊ'), + (0x1DB8, 'M', 'ᴜ'), + (0x1DB9, 'M', 'ʋ'), + (0x1DBA, 'M', 'ʌ'), + (0x1DBB, 'M', 'z'), + (0x1DBC, 'M', 'ʐ'), + (0x1DBD, 'M', 'ʑ'), + (0x1DBE, 'M', 'ʒ'), + (0x1DBF, 'M', 'θ'), (0x1DC0, 'V'), - (0x1DFA, 'X'), - (0x1DFB, 'V'), - (0x1E00, 'M', u'ḁ'), + (0x1E00, 'M', 'ḁ'), (0x1E01, 'V'), - (0x1E02, 'M', u'ḃ'), + (0x1E02, 'M', 'ḃ'), (0x1E03, 'V'), - (0x1E04, 'M', u'ḅ'), + (0x1E04, 'M', 'ḅ'), (0x1E05, 'V'), - (0x1E06, 'M', u'ḇ'), + (0x1E06, 'M', 'ḇ'), (0x1E07, 'V'), - (0x1E08, 'M', u'ḉ'), + (0x1E08, 'M', 'ḉ'), (0x1E09, 'V'), - (0x1E0A, 'M', u'ḋ'), + (0x1E0A, 'M', 'ḋ'), (0x1E0B, 'V'), - (0x1E0C, 'M', u'ḍ'), + (0x1E0C, 'M', 'ḍ'), (0x1E0D, 'V'), - (0x1E0E, 'M', u'ḏ'), + (0x1E0E, 'M', 'ḏ'), (0x1E0F, 'V'), - (0x1E10, 'M', u'ḑ'), + (0x1E10, 'M', 'ḑ'), (0x1E11, 'V'), - (0x1E12, 'M', u'ḓ'), + (0x1E12, 'M', 'ḓ'), (0x1E13, 'V'), - (0x1E14, 'M', u'ḕ'), + (0x1E14, 'M', 'ḕ'), (0x1E15, 'V'), - (0x1E16, 'M', u'ḗ'), + (0x1E16, 'M', 'ḗ'), (0x1E17, 'V'), - (0x1E18, 'M', u'ḙ'), + (0x1E18, 'M', 'ḙ'), (0x1E19, 'V'), - (0x1E1A, 'M', u'ḛ'), + (0x1E1A, 'M', 'ḛ'), (0x1E1B, 'V'), - (0x1E1C, 'M', u'ḝ'), + (0x1E1C, 'M', 'ḝ'), (0x1E1D, 'V'), - (0x1E1E, 'M', u'ḟ'), + (0x1E1E, 'M', 'ḟ'), (0x1E1F, 'V'), - (0x1E20, 'M', u'ḡ'), + (0x1E20, 'M', 'ḡ'), (0x1E21, 'V'), - (0x1E22, 'M', u'ḣ'), + (0x1E22, 'M', 'ḣ'), (0x1E23, 'V'), - (0x1E24, 'M', u'ḥ'), + ] + +def _seg_17() -> List[Union[Tuple[int, str], Tuple[int, str, str]]]: + return [ + (0x1E24, 'M', 'ḥ'), (0x1E25, 'V'), - (0x1E26, 'M', u'ḧ'), + (0x1E26, 'M', 'ḧ'), (0x1E27, 'V'), - (0x1E28, 'M', u'ḩ'), + (0x1E28, 'M', 'ḩ'), (0x1E29, 'V'), - (0x1E2A, 'M', u'ḫ'), + (0x1E2A, 'M', 'ḫ'), (0x1E2B, 'V'), - (0x1E2C, 'M', u'ḭ'), + (0x1E2C, 'M', 'ḭ'), (0x1E2D, 'V'), - (0x1E2E, 'M', u'ḯ'), + (0x1E2E, 'M', 'ḯ'), (0x1E2F, 'V'), - (0x1E30, 'M', u'ḱ'), + (0x1E30, 'M', 'ḱ'), (0x1E31, 'V'), - (0x1E32, 'M', u'ḳ'), + (0x1E32, 'M', 'ḳ'), (0x1E33, 'V'), - (0x1E34, 'M', u'ḵ'), + (0x1E34, 'M', 'ḵ'), (0x1E35, 'V'), - (0x1E36, 'M', u'ḷ'), + (0x1E36, 'M', 'ḷ'), (0x1E37, 'V'), - (0x1E38, 'M', u'ḹ'), + (0x1E38, 'M', 'ḹ'), (0x1E39, 'V'), - (0x1E3A, 'M', u'ḻ'), + (0x1E3A, 'M', 'ḻ'), (0x1E3B, 'V'), - (0x1E3C, 'M', u'ḽ'), + (0x1E3C, 'M', 'ḽ'), (0x1E3D, 'V'), - (0x1E3E, 'M', u'ḿ'), + (0x1E3E, 'M', 'ḿ'), (0x1E3F, 'V'), - ] - -def _seg_17(): - return [ - (0x1E40, 'M', u'ṁ'), + (0x1E40, 'M', 'ṁ'), (0x1E41, 'V'), - (0x1E42, 'M', u'ṃ'), + (0x1E42, 'M', 'ṃ'), (0x1E43, 'V'), - (0x1E44, 'M', u'ṅ'), + (0x1E44, 'M', 'ṅ'), (0x1E45, 'V'), - (0x1E46, 'M', u'ṇ'), + (0x1E46, 'M', 'ṇ'), (0x1E47, 'V'), - (0x1E48, 'M', u'ṉ'), + (0x1E48, 'M', 'ṉ'), (0x1E49, 'V'), - (0x1E4A, 'M', u'ṋ'), + (0x1E4A, 'M', 'ṋ'), (0x1E4B, 'V'), - (0x1E4C, 'M', u'ṍ'), + (0x1E4C, 'M', 'ṍ'), (0x1E4D, 'V'), - (0x1E4E, 'M', u'ṏ'), + (0x1E4E, 'M', 'ṏ'), (0x1E4F, 'V'), - (0x1E50, 'M', u'ṑ'), + (0x1E50, 'M', 'ṑ'), (0x1E51, 'V'), - (0x1E52, 'M', u'ṓ'), + (0x1E52, 'M', 'ṓ'), (0x1E53, 'V'), - (0x1E54, 'M', u'ṕ'), + (0x1E54, 'M', 'ṕ'), (0x1E55, 'V'), - (0x1E56, 'M', u'ṗ'), + (0x1E56, 'M', 'ṗ'), (0x1E57, 'V'), - (0x1E58, 'M', u'ṙ'), + (0x1E58, 'M', 'ṙ'), (0x1E59, 'V'), - (0x1E5A, 'M', u'ṛ'), + (0x1E5A, 'M', 'ṛ'), (0x1E5B, 'V'), - (0x1E5C, 'M', u'ṝ'), + (0x1E5C, 'M', 'ṝ'), (0x1E5D, 'V'), - (0x1E5E, 'M', u'ṟ'), + (0x1E5E, 'M', 'ṟ'), (0x1E5F, 'V'), - (0x1E60, 'M', u'ṡ'), + (0x1E60, 'M', 'ṡ'), (0x1E61, 'V'), - (0x1E62, 'M', u'ṣ'), + (0x1E62, 'M', 'ṣ'), (0x1E63, 'V'), - (0x1E64, 'M', u'ṥ'), + (0x1E64, 'M', 'ṥ'), (0x1E65, 'V'), - (0x1E66, 'M', u'ṧ'), + (0x1E66, 'M', 'ṧ'), (0x1E67, 'V'), - (0x1E68, 'M', u'ṩ'), + (0x1E68, 'M', 'ṩ'), (0x1E69, 'V'), - (0x1E6A, 'M', u'ṫ'), + (0x1E6A, 'M', 'ṫ'), (0x1E6B, 'V'), - (0x1E6C, 'M', u'ṭ'), + (0x1E6C, 'M', 'ṭ'), (0x1E6D, 'V'), - (0x1E6E, 'M', u'ṯ'), + (0x1E6E, 'M', 'ṯ'), (0x1E6F, 'V'), - (0x1E70, 'M', u'ṱ'), + (0x1E70, 'M', 'ṱ'), (0x1E71, 'V'), - (0x1E72, 'M', u'ṳ'), + (0x1E72, 'M', 'ṳ'), (0x1E73, 'V'), - (0x1E74, 'M', u'ṵ'), + (0x1E74, 'M', 'ṵ'), (0x1E75, 'V'), - (0x1E76, 'M', u'ṷ'), + (0x1E76, 'M', 'ṷ'), (0x1E77, 'V'), - (0x1E78, 'M', u'ṹ'), + (0x1E78, 'M', 'ṹ'), (0x1E79, 'V'), - (0x1E7A, 'M', u'ṻ'), + (0x1E7A, 'M', 'ṻ'), (0x1E7B, 'V'), - (0x1E7C, 'M', u'ṽ'), + (0x1E7C, 'M', 'ṽ'), (0x1E7D, 'V'), - (0x1E7E, 'M', u'ṿ'), + (0x1E7E, 'M', 'ṿ'), (0x1E7F, 'V'), - (0x1E80, 'M', u'ẁ'), + (0x1E80, 'M', 'ẁ'), (0x1E81, 'V'), - (0x1E82, 'M', u'ẃ'), + (0x1E82, 'M', 'ẃ'), (0x1E83, 'V'), - (0x1E84, 'M', u'ẅ'), + (0x1E84, 'M', 'ẅ'), (0x1E85, 'V'), - (0x1E86, 'M', u'ẇ'), + (0x1E86, 'M', 'ẇ'), (0x1E87, 'V'), - (0x1E88, 'M', u'ẉ'), + ] + +def _seg_18() -> List[Union[Tuple[int, str], Tuple[int, str, str]]]: + return [ + (0x1E88, 'M', 'ẉ'), (0x1E89, 'V'), - (0x1E8A, 'M', u'ẋ'), + (0x1E8A, 'M', 'ẋ'), (0x1E8B, 'V'), - (0x1E8C, 'M', u'ẍ'), + (0x1E8C, 'M', 'ẍ'), (0x1E8D, 'V'), - (0x1E8E, 'M', u'ẏ'), + (0x1E8E, 'M', 'ẏ'), (0x1E8F, 'V'), - (0x1E90, 'M', u'ẑ'), + (0x1E90, 'M', 'ẑ'), (0x1E91, 'V'), - (0x1E92, 'M', u'ẓ'), + (0x1E92, 'M', 'ẓ'), (0x1E93, 'V'), - (0x1E94, 'M', u'ẕ'), + (0x1E94, 'M', 'ẕ'), (0x1E95, 'V'), - (0x1E9A, 'M', u'aʾ'), - (0x1E9B, 'M', u'ṡ'), + (0x1E9A, 'M', 'aʾ'), + (0x1E9B, 'M', 'ṡ'), (0x1E9C, 'V'), - (0x1E9E, 'M', u'ss'), + (0x1E9E, 'M', 'ss'), (0x1E9F, 'V'), - (0x1EA0, 'M', u'ạ'), + (0x1EA0, 'M', 'ạ'), (0x1EA1, 'V'), - (0x1EA2, 'M', u'ả'), + (0x1EA2, 'M', 'ả'), (0x1EA3, 'V'), - (0x1EA4, 'M', u'ấ'), + (0x1EA4, 'M', 'ấ'), (0x1EA5, 'V'), - (0x1EA6, 'M', u'ầ'), + (0x1EA6, 'M', 'ầ'), (0x1EA7, 'V'), - (0x1EA8, 'M', u'ẩ'), - ] - -def _seg_18(): - return [ + (0x1EA8, 'M', 'ẩ'), (0x1EA9, 'V'), - (0x1EAA, 'M', u'ẫ'), + (0x1EAA, 'M', 'ẫ'), (0x1EAB, 'V'), - (0x1EAC, 'M', u'ậ'), + (0x1EAC, 'M', 'ậ'), (0x1EAD, 'V'), - (0x1EAE, 'M', u'ắ'), + (0x1EAE, 'M', 'ắ'), (0x1EAF, 'V'), - (0x1EB0, 'M', u'ằ'), + (0x1EB0, 'M', 'ằ'), (0x1EB1, 'V'), - (0x1EB2, 'M', u'ẳ'), + (0x1EB2, 'M', 'ẳ'), (0x1EB3, 'V'), - (0x1EB4, 'M', u'ẵ'), + (0x1EB4, 'M', 'ẵ'), (0x1EB5, 'V'), - (0x1EB6, 'M', u'ặ'), + (0x1EB6, 'M', 'ặ'), (0x1EB7, 'V'), - (0x1EB8, 'M', u'ẹ'), + (0x1EB8, 'M', 'ẹ'), (0x1EB9, 'V'), - (0x1EBA, 'M', u'ẻ'), + (0x1EBA, 'M', 'ẻ'), (0x1EBB, 'V'), - (0x1EBC, 'M', u'ẽ'), + (0x1EBC, 'M', 'ẽ'), (0x1EBD, 'V'), - (0x1EBE, 'M', u'ế'), + (0x1EBE, 'M', 'ế'), (0x1EBF, 'V'), - (0x1EC0, 'M', u'ề'), + (0x1EC0, 'M', 'ề'), (0x1EC1, 'V'), - (0x1EC2, 'M', u'ể'), + (0x1EC2, 'M', 'ể'), (0x1EC3, 'V'), - (0x1EC4, 'M', u'ễ'), + (0x1EC4, 'M', 'ễ'), (0x1EC5, 'V'), - (0x1EC6, 'M', u'ệ'), + (0x1EC6, 'M', 'ệ'), (0x1EC7, 'V'), - (0x1EC8, 'M', u'ỉ'), + (0x1EC8, 'M', 'ỉ'), (0x1EC9, 'V'), - (0x1ECA, 'M', u'ị'), + (0x1ECA, 'M', 'ị'), (0x1ECB, 'V'), - (0x1ECC, 'M', u'ọ'), + (0x1ECC, 'M', 'ọ'), (0x1ECD, 'V'), - (0x1ECE, 'M', u'ỏ'), + (0x1ECE, 'M', 'ỏ'), (0x1ECF, 'V'), - (0x1ED0, 'M', u'ố'), + (0x1ED0, 'M', 'ố'), (0x1ED1, 'V'), - (0x1ED2, 'M', u'ồ'), + (0x1ED2, 'M', 'ồ'), (0x1ED3, 'V'), - (0x1ED4, 'M', u'ổ'), + (0x1ED4, 'M', 'ổ'), (0x1ED5, 'V'), - (0x1ED6, 'M', u'ỗ'), + (0x1ED6, 'M', 'ỗ'), (0x1ED7, 'V'), - (0x1ED8, 'M', u'ộ'), + (0x1ED8, 'M', 'ộ'), (0x1ED9, 'V'), - (0x1EDA, 'M', u'ớ'), + (0x1EDA, 'M', 'ớ'), (0x1EDB, 'V'), - (0x1EDC, 'M', u'ờ'), + (0x1EDC, 'M', 'ờ'), (0x1EDD, 'V'), - (0x1EDE, 'M', u'ở'), + (0x1EDE, 'M', 'ở'), (0x1EDF, 'V'), - (0x1EE0, 'M', u'ỡ'), + (0x1EE0, 'M', 'ỡ'), (0x1EE1, 'V'), - (0x1EE2, 'M', u'ợ'), + (0x1EE2, 'M', 'ợ'), (0x1EE3, 'V'), - (0x1EE4, 'M', u'ụ'), + (0x1EE4, 'M', 'ụ'), (0x1EE5, 'V'), - (0x1EE6, 'M', u'ủ'), + (0x1EE6, 'M', 'ủ'), (0x1EE7, 'V'), - (0x1EE8, 'M', u'ứ'), + (0x1EE8, 'M', 'ứ'), (0x1EE9, 'V'), - (0x1EEA, 'M', u'ừ'), + (0x1EEA, 'M', 'ừ'), (0x1EEB, 'V'), - (0x1EEC, 'M', u'ử'), + (0x1EEC, 'M', 'ử'), (0x1EED, 'V'), - (0x1EEE, 'M', u'ữ'), + (0x1EEE, 'M', 'ữ'), (0x1EEF, 'V'), - (0x1EF0, 'M', u'ự'), + (0x1EF0, 'M', 'ự'), + ] + +def _seg_19() -> List[Union[Tuple[int, str], Tuple[int, str, str]]]: + return [ (0x1EF1, 'V'), - (0x1EF2, 'M', u'ỳ'), + (0x1EF2, 'M', 'ỳ'), (0x1EF3, 'V'), - (0x1EF4, 'M', u'ỵ'), + (0x1EF4, 'M', 'ỵ'), (0x1EF5, 'V'), - (0x1EF6, 'M', u'ỷ'), + (0x1EF6, 'M', 'ỷ'), (0x1EF7, 'V'), - (0x1EF8, 'M', u'ỹ'), + (0x1EF8, 'M', 'ỹ'), (0x1EF9, 'V'), - (0x1EFA, 'M', u'ỻ'), + (0x1EFA, 'M', 'ỻ'), (0x1EFB, 'V'), - (0x1EFC, 'M', u'ỽ'), + (0x1EFC, 'M', 'ỽ'), (0x1EFD, 'V'), - (0x1EFE, 'M', u'ỿ'), + (0x1EFE, 'M', 'ỿ'), (0x1EFF, 'V'), - (0x1F08, 'M', u'ἀ'), - (0x1F09, 'M', u'ἁ'), - (0x1F0A, 'M', u'ἂ'), - (0x1F0B, 'M', u'ἃ'), - (0x1F0C, 'M', u'ἄ'), - (0x1F0D, 'M', u'ἅ'), - (0x1F0E, 'M', u'ἆ'), - (0x1F0F, 'M', u'ἇ'), + (0x1F08, 'M', 'ἀ'), + (0x1F09, 'M', 'ἁ'), + (0x1F0A, 'M', 'ἂ'), + (0x1F0B, 'M', 'ἃ'), + (0x1F0C, 'M', 'ἄ'), + (0x1F0D, 'M', 'ἅ'), + (0x1F0E, 'M', 'ἆ'), + (0x1F0F, 'M', 'ἇ'), (0x1F10, 'V'), (0x1F16, 'X'), - (0x1F18, 'M', u'ἐ'), - (0x1F19, 'M', u'ἑ'), - (0x1F1A, 'M', u'ἒ'), - ] - -def _seg_19(): - return [ - (0x1F1B, 'M', u'ἓ'), - (0x1F1C, 'M', u'ἔ'), - (0x1F1D, 'M', u'ἕ'), + (0x1F18, 'M', 'ἐ'), + (0x1F19, 'M', 'ἑ'), + (0x1F1A, 'M', 'ἒ'), + (0x1F1B, 'M', 'ἓ'), + (0x1F1C, 'M', 'ἔ'), + (0x1F1D, 'M', 'ἕ'), (0x1F1E, 'X'), (0x1F20, 'V'), - (0x1F28, 'M', u'ἠ'), - (0x1F29, 'M', u'ἡ'), - (0x1F2A, 'M', u'ἢ'), - (0x1F2B, 'M', u'ἣ'), - (0x1F2C, 'M', u'ἤ'), - (0x1F2D, 'M', u'ἥ'), - (0x1F2E, 'M', u'ἦ'), - (0x1F2F, 'M', u'ἧ'), + (0x1F28, 'M', 'ἠ'), + (0x1F29, 'M', 'ἡ'), + (0x1F2A, 'M', 'ἢ'), + (0x1F2B, 'M', 'ἣ'), + (0x1F2C, 'M', 'ἤ'), + (0x1F2D, 'M', 'ἥ'), + (0x1F2E, 'M', 'ἦ'), + (0x1F2F, 'M', 'ἧ'), (0x1F30, 'V'), - (0x1F38, 'M', u'ἰ'), - (0x1F39, 'M', u'ἱ'), - (0x1F3A, 'M', u'ἲ'), - (0x1F3B, 'M', u'ἳ'), - (0x1F3C, 'M', u'ἴ'), - (0x1F3D, 'M', u'ἵ'), - (0x1F3E, 'M', u'ἶ'), - (0x1F3F, 'M', u'ἷ'), + (0x1F38, 'M', 'ἰ'), + (0x1F39, 'M', 'ἱ'), + (0x1F3A, 'M', 'ἲ'), + (0x1F3B, 'M', 'ἳ'), + (0x1F3C, 'M', 'ἴ'), + (0x1F3D, 'M', 'ἵ'), + (0x1F3E, 'M', 'ἶ'), + (0x1F3F, 'M', 'ἷ'), (0x1F40, 'V'), (0x1F46, 'X'), - (0x1F48, 'M', u'ὀ'), - (0x1F49, 'M', u'ὁ'), - (0x1F4A, 'M', u'ὂ'), - (0x1F4B, 'M', u'ὃ'), - (0x1F4C, 'M', u'ὄ'), - (0x1F4D, 'M', u'ὅ'), + (0x1F48, 'M', 'ὀ'), + (0x1F49, 'M', 'ὁ'), + (0x1F4A, 'M', 'ὂ'), + (0x1F4B, 'M', 'ὃ'), + (0x1F4C, 'M', 'ὄ'), + (0x1F4D, 'M', 'ὅ'), (0x1F4E, 'X'), (0x1F50, 'V'), (0x1F58, 'X'), - (0x1F59, 'M', u'ὑ'), + (0x1F59, 'M', 'ὑ'), (0x1F5A, 'X'), - (0x1F5B, 'M', u'ὓ'), + (0x1F5B, 'M', 'ὓ'), (0x1F5C, 'X'), - (0x1F5D, 'M', u'ὕ'), + (0x1F5D, 'M', 'ὕ'), (0x1F5E, 'X'), - (0x1F5F, 'M', u'ὗ'), + (0x1F5F, 'M', 'ὗ'), (0x1F60, 'V'), - (0x1F68, 'M', u'ὠ'), - (0x1F69, 'M', u'ὡ'), - (0x1F6A, 'M', u'ὢ'), - (0x1F6B, 'M', u'ὣ'), - (0x1F6C, 'M', u'ὤ'), - (0x1F6D, 'M', u'ὥ'), - (0x1F6E, 'M', u'ὦ'), - (0x1F6F, 'M', u'ὧ'), + (0x1F68, 'M', 'ὠ'), + (0x1F69, 'M', 'ὡ'), + (0x1F6A, 'M', 'ὢ'), + (0x1F6B, 'M', 'ὣ'), + (0x1F6C, 'M', 'ὤ'), + (0x1F6D, 'M', 'ὥ'), + (0x1F6E, 'M', 'ὦ'), + (0x1F6F, 'M', 'ὧ'), (0x1F70, 'V'), - (0x1F71, 'M', u'ά'), + (0x1F71, 'M', 'ά'), (0x1F72, 'V'), - (0x1F73, 'M', u'έ'), + (0x1F73, 'M', 'έ'), (0x1F74, 'V'), - (0x1F75, 'M', u'ή'), + (0x1F75, 'M', 'ή'), (0x1F76, 'V'), - (0x1F77, 'M', u'ί'), + (0x1F77, 'M', 'ί'), (0x1F78, 'V'), - (0x1F79, 'M', u'ό'), + (0x1F79, 'M', 'ό'), (0x1F7A, 'V'), - (0x1F7B, 'M', u'ύ'), + (0x1F7B, 'M', 'ύ'), (0x1F7C, 'V'), - (0x1F7D, 'M', u'ώ'), + (0x1F7D, 'M', 'ώ'), (0x1F7E, 'X'), - (0x1F80, 'M', u'ἀι'), - (0x1F81, 'M', u'ἁι'), - (0x1F82, 'M', u'ἂι'), - (0x1F83, 'M', u'ἃι'), - (0x1F84, 'M', u'ἄι'), - (0x1F85, 'M', u'ἅι'), - (0x1F86, 'M', u'ἆι'), - (0x1F87, 'M', u'ἇι'), - (0x1F88, 'M', u'ἀι'), - (0x1F89, 'M', u'ἁι'), - (0x1F8A, 'M', u'ἂι'), - (0x1F8B, 'M', u'ἃι'), - (0x1F8C, 'M', u'ἄι'), - (0x1F8D, 'M', u'ἅι'), - (0x1F8E, 'M', u'ἆι'), - (0x1F8F, 'M', u'ἇι'), - (0x1F90, 'M', u'ἠι'), - (0x1F91, 'M', u'ἡι'), - (0x1F92, 'M', u'ἢι'), - (0x1F93, 'M', u'ἣι'), - (0x1F94, 'M', u'ἤι'), - (0x1F95, 'M', u'ἥι'), - (0x1F96, 'M', u'ἦι'), - (0x1F97, 'M', u'ἧι'), - (0x1F98, 'M', u'ἠι'), - (0x1F99, 'M', u'ἡι'), - (0x1F9A, 'M', u'ἢι'), - (0x1F9B, 'M', u'ἣι'), - (0x1F9C, 'M', u'ἤι'), - (0x1F9D, 'M', u'ἥι'), - (0x1F9E, 'M', u'ἦι'), - (0x1F9F, 'M', u'ἧι'), - (0x1FA0, 'M', u'ὠι'), - (0x1FA1, 'M', u'ὡι'), - (0x1FA2, 'M', u'ὢι'), - (0x1FA3, 'M', u'ὣι'), + (0x1F80, 'M', 'ἀι'), + (0x1F81, 'M', 'ἁι'), + (0x1F82, 'M', 'ἂι'), + (0x1F83, 'M', 'ἃι'), + (0x1F84, 'M', 'ἄι'), + (0x1F85, 'M', 'ἅι'), + (0x1F86, 'M', 'ἆι'), + (0x1F87, 'M', 'ἇι'), ] -def _seg_20(): +def _seg_20() -> List[Union[Tuple[int, str], Tuple[int, str, str]]]: return [ - (0x1FA4, 'M', u'ὤι'), - (0x1FA5, 'M', u'ὥι'), - (0x1FA6, 'M', u'ὦι'), - (0x1FA7, 'M', u'ὧι'), - (0x1FA8, 'M', u'ὠι'), - (0x1FA9, 'M', u'ὡι'), - (0x1FAA, 'M', u'ὢι'), - (0x1FAB, 'M', u'ὣι'), - (0x1FAC, 'M', u'ὤι'), - (0x1FAD, 'M', u'ὥι'), - (0x1FAE, 'M', u'ὦι'), - (0x1FAF, 'M', u'ὧι'), + (0x1F88, 'M', 'ἀι'), + (0x1F89, 'M', 'ἁι'), + (0x1F8A, 'M', 'ἂι'), + (0x1F8B, 'M', 'ἃι'), + (0x1F8C, 'M', 'ἄι'), + (0x1F8D, 'M', 'ἅι'), + (0x1F8E, 'M', 'ἆι'), + (0x1F8F, 'M', 'ἇι'), + (0x1F90, 'M', 'ἠι'), + (0x1F91, 'M', 'ἡι'), + (0x1F92, 'M', 'ἢι'), + (0x1F93, 'M', 'ἣι'), + (0x1F94, 'M', 'ἤι'), + (0x1F95, 'M', 'ἥι'), + (0x1F96, 'M', 'ἦι'), + (0x1F97, 'M', 'ἧι'), + (0x1F98, 'M', 'ἠι'), + (0x1F99, 'M', 'ἡι'), + (0x1F9A, 'M', 'ἢι'), + (0x1F9B, 'M', 'ἣι'), + (0x1F9C, 'M', 'ἤι'), + (0x1F9D, 'M', 'ἥι'), + (0x1F9E, 'M', 'ἦι'), + (0x1F9F, 'M', 'ἧι'), + (0x1FA0, 'M', 'ὠι'), + (0x1FA1, 'M', 'ὡι'), + (0x1FA2, 'M', 'ὢι'), + (0x1FA3, 'M', 'ὣι'), + (0x1FA4, 'M', 'ὤι'), + (0x1FA5, 'M', 'ὥι'), + (0x1FA6, 'M', 'ὦι'), + (0x1FA7, 'M', 'ὧι'), + (0x1FA8, 'M', 'ὠι'), + (0x1FA9, 'M', 'ὡι'), + (0x1FAA, 'M', 'ὢι'), + (0x1FAB, 'M', 'ὣι'), + (0x1FAC, 'M', 'ὤι'), + (0x1FAD, 'M', 'ὥι'), + (0x1FAE, 'M', 'ὦι'), + (0x1FAF, 'M', 'ὧι'), (0x1FB0, 'V'), - (0x1FB2, 'M', u'ὰι'), - (0x1FB3, 'M', u'αι'), - (0x1FB4, 'M', u'άι'), + (0x1FB2, 'M', 'ὰι'), + (0x1FB3, 'M', 'αι'), + (0x1FB4, 'M', 'άι'), (0x1FB5, 'X'), (0x1FB6, 'V'), - (0x1FB7, 'M', u'ᾶι'), - (0x1FB8, 'M', u'ᾰ'), - (0x1FB9, 'M', u'ᾱ'), - (0x1FBA, 'M', u'ὰ'), - (0x1FBB, 'M', u'ά'), - (0x1FBC, 'M', u'αι'), - (0x1FBD, '3', u' ̓'), - (0x1FBE, 'M', u'ι'), - (0x1FBF, '3', u' ̓'), - (0x1FC0, '3', u' ͂'), - (0x1FC1, '3', u' ̈͂'), - (0x1FC2, 'M', u'ὴι'), - (0x1FC3, 'M', u'ηι'), - (0x1FC4, 'M', u'ήι'), + (0x1FB7, 'M', 'ᾶι'), + (0x1FB8, 'M', 'ᾰ'), + (0x1FB9, 'M', 'ᾱ'), + (0x1FBA, 'M', 'ὰ'), + (0x1FBB, 'M', 'ά'), + (0x1FBC, 'M', 'αι'), + (0x1FBD, '3', ' ̓'), + (0x1FBE, 'M', 'ι'), + (0x1FBF, '3', ' ̓'), + (0x1FC0, '3', ' ͂'), + (0x1FC1, '3', ' ̈͂'), + (0x1FC2, 'M', 'ὴι'), + (0x1FC3, 'M', 'ηι'), + (0x1FC4, 'M', 'ήι'), (0x1FC5, 'X'), (0x1FC6, 'V'), - (0x1FC7, 'M', u'ῆι'), - (0x1FC8, 'M', u'ὲ'), - (0x1FC9, 'M', u'έ'), - (0x1FCA, 'M', u'ὴ'), - (0x1FCB, 'M', u'ή'), - (0x1FCC, 'M', u'ηι'), - (0x1FCD, '3', u' ̓̀'), - (0x1FCE, '3', u' ̓́'), - (0x1FCF, '3', u' ̓͂'), + (0x1FC7, 'M', 'ῆι'), + (0x1FC8, 'M', 'ὲ'), + (0x1FC9, 'M', 'έ'), + (0x1FCA, 'M', 'ὴ'), + (0x1FCB, 'M', 'ή'), + (0x1FCC, 'M', 'ηι'), + (0x1FCD, '3', ' ̓̀'), + (0x1FCE, '3', ' ̓́'), + (0x1FCF, '3', ' ̓͂'), (0x1FD0, 'V'), - (0x1FD3, 'M', u'ΐ'), + (0x1FD3, 'M', 'ΐ'), (0x1FD4, 'X'), (0x1FD6, 'V'), - (0x1FD8, 'M', u'ῐ'), - (0x1FD9, 'M', u'ῑ'), - (0x1FDA, 'M', u'ὶ'), - (0x1FDB, 'M', u'ί'), + (0x1FD8, 'M', 'ῐ'), + (0x1FD9, 'M', 'ῑ'), + (0x1FDA, 'M', 'ὶ'), + (0x1FDB, 'M', 'ί'), (0x1FDC, 'X'), - (0x1FDD, '3', u' ̔̀'), - (0x1FDE, '3', u' ̔́'), - (0x1FDF, '3', u' ̔͂'), + (0x1FDD, '3', ' ̔̀'), + (0x1FDE, '3', ' ̔́'), + (0x1FDF, '3', ' ̔͂'), (0x1FE0, 'V'), - (0x1FE3, 'M', u'ΰ'), + (0x1FE3, 'M', 'ΰ'), (0x1FE4, 'V'), - (0x1FE8, 'M', u'ῠ'), - (0x1FE9, 'M', u'ῡ'), - (0x1FEA, 'M', u'ὺ'), - (0x1FEB, 'M', u'ύ'), - (0x1FEC, 'M', u'ῥ'), - (0x1FED, '3', u' ̈̀'), - (0x1FEE, '3', u' ̈́'), - (0x1FEF, '3', u'`'), + (0x1FE8, 'M', 'ῠ'), + (0x1FE9, 'M', 'ῡ'), + (0x1FEA, 'M', 'ὺ'), + (0x1FEB, 'M', 'ύ'), + (0x1FEC, 'M', 'ῥ'), + (0x1FED, '3', ' ̈̀'), + (0x1FEE, '3', ' ̈́'), + (0x1FEF, '3', '`'), (0x1FF0, 'X'), - (0x1FF2, 'M', u'ὼι'), - (0x1FF3, 'M', u'ωι'), - (0x1FF4, 'M', u'ώι'), + (0x1FF2, 'M', 'ὼι'), + (0x1FF3, 'M', 'ωι'), + (0x1FF4, 'M', 'ώι'), (0x1FF5, 'X'), (0x1FF6, 'V'), - (0x1FF7, 'M', u'ῶι'), - (0x1FF8, 'M', u'ὸ'), - (0x1FF9, 'M', u'ό'), - (0x1FFA, 'M', u'ὼ'), - (0x1FFB, 'M', u'ώ'), - (0x1FFC, 'M', u'ωι'), - (0x1FFD, '3', u' ́'), - (0x1FFE, '3', u' ̔'), + ] + +def _seg_21() -> List[Union[Tuple[int, str], Tuple[int, str, str]]]: + return [ + (0x1FF7, 'M', 'ῶι'), + (0x1FF8, 'M', 'ὸ'), + (0x1FF9, 'M', 'ό'), + (0x1FFA, 'M', 'ὼ'), + (0x1FFB, 'M', 'ώ'), + (0x1FFC, 'M', 'ωι'), + (0x1FFD, '3', ' ́'), + (0x1FFE, '3', ' ̔'), (0x1FFF, 'X'), - (0x2000, '3', u' '), + (0x2000, '3', ' '), (0x200B, 'I'), - (0x200C, 'D', u''), + (0x200C, 'D', ''), (0x200E, 'X'), (0x2010, 'V'), - (0x2011, 'M', u'‐'), + (0x2011, 'M', '‐'), (0x2012, 'V'), - (0x2017, '3', u' ̳'), + (0x2017, '3', ' ̳'), (0x2018, 'V'), (0x2024, 'X'), (0x2027, 'V'), (0x2028, 'X'), - (0x202F, '3', u' '), + (0x202F, '3', ' '), (0x2030, 'V'), - (0x2033, 'M', u'′′'), - (0x2034, 'M', u'′′′'), + (0x2033, 'M', '′′'), + (0x2034, 'M', '′′′'), (0x2035, 'V'), - (0x2036, 'M', u'‵‵'), - (0x2037, 'M', u'‵‵‵'), - ] - -def _seg_21(): - return [ + (0x2036, 'M', '‵‵'), + (0x2037, 'M', '‵‵‵'), (0x2038, 'V'), - (0x203C, '3', u'!!'), + (0x203C, '3', '!!'), (0x203D, 'V'), - (0x203E, '3', u' ̅'), + (0x203E, '3', ' ̅'), (0x203F, 'V'), - (0x2047, '3', u'??'), - (0x2048, '3', u'?!'), - (0x2049, '3', u'!?'), + (0x2047, '3', '??'), + (0x2048, '3', '?!'), + (0x2049, '3', '!?'), (0x204A, 'V'), - (0x2057, 'M', u'′′′′'), + (0x2057, 'M', '′′′′'), (0x2058, 'V'), - (0x205F, '3', u' '), + (0x205F, '3', ' '), (0x2060, 'I'), (0x2061, 'X'), (0x2064, 'I'), (0x2065, 'X'), - (0x2070, 'M', u'0'), - (0x2071, 'M', u'i'), + (0x2070, 'M', '0'), + (0x2071, 'M', 'i'), (0x2072, 'X'), - (0x2074, 'M', u'4'), - (0x2075, 'M', u'5'), - (0x2076, 'M', u'6'), - (0x2077, 'M', u'7'), - (0x2078, 'M', u'8'), - (0x2079, 'M', u'9'), - (0x207A, '3', u'+'), - (0x207B, 'M', u'−'), - (0x207C, '3', u'='), - (0x207D, '3', u'('), - (0x207E, '3', u')'), - (0x207F, 'M', u'n'), - (0x2080, 'M', u'0'), - (0x2081, 'M', u'1'), - (0x2082, 'M', u'2'), - (0x2083, 'M', u'3'), - (0x2084, 'M', u'4'), - (0x2085, 'M', u'5'), - (0x2086, 'M', u'6'), - (0x2087, 'M', u'7'), - (0x2088, 'M', u'8'), - (0x2089, 'M', u'9'), - (0x208A, '3', u'+'), - (0x208B, 'M', u'−'), - (0x208C, '3', u'='), - (0x208D, '3', u'('), - (0x208E, '3', u')'), + (0x2074, 'M', '4'), + (0x2075, 'M', '5'), + (0x2076, 'M', '6'), + (0x2077, 'M', '7'), + (0x2078, 'M', '8'), + (0x2079, 'M', '9'), + (0x207A, '3', '+'), + (0x207B, 'M', '−'), + (0x207C, '3', '='), + (0x207D, '3', '('), + (0x207E, '3', ')'), + (0x207F, 'M', 'n'), + (0x2080, 'M', '0'), + (0x2081, 'M', '1'), + (0x2082, 'M', '2'), + (0x2083, 'M', '3'), + (0x2084, 'M', '4'), + (0x2085, 'M', '5'), + (0x2086, 'M', '6'), + (0x2087, 'M', '7'), + (0x2088, 'M', '8'), + (0x2089, 'M', '9'), + (0x208A, '3', '+'), + (0x208B, 'M', '−'), + (0x208C, '3', '='), + (0x208D, '3', '('), + (0x208E, '3', ')'), (0x208F, 'X'), - (0x2090, 'M', u'a'), - (0x2091, 'M', u'e'), - (0x2092, 'M', u'o'), - (0x2093, 'M', u'x'), - (0x2094, 'M', u'ə'), - (0x2095, 'M', u'h'), - (0x2096, 'M', u'k'), - (0x2097, 'M', u'l'), - (0x2098, 'M', u'm'), - (0x2099, 'M', u'n'), - (0x209A, 'M', u'p'), - (0x209B, 'M', u's'), - (0x209C, 'M', u't'), + (0x2090, 'M', 'a'), + (0x2091, 'M', 'e'), + (0x2092, 'M', 'o'), + (0x2093, 'M', 'x'), + (0x2094, 'M', 'ə'), + (0x2095, 'M', 'h'), + (0x2096, 'M', 'k'), + (0x2097, 'M', 'l'), + (0x2098, 'M', 'm'), + (0x2099, 'M', 'n'), + (0x209A, 'M', 'p'), + (0x209B, 'M', 's'), + (0x209C, 'M', 't'), (0x209D, 'X'), (0x20A0, 'V'), - (0x20A8, 'M', u'rs'), + (0x20A8, 'M', 'rs'), (0x20A9, 'V'), - (0x20C0, 'X'), + (0x20C1, 'X'), (0x20D0, 'V'), (0x20F1, 'X'), - (0x2100, '3', u'a/c'), - (0x2101, '3', u'a/s'), - (0x2102, 'M', u'c'), - (0x2103, 'M', u'°c'), + (0x2100, '3', 'a/c'), + (0x2101, '3', 'a/s'), + (0x2102, 'M', 'c'), + (0x2103, 'M', '°c'), (0x2104, 'V'), - (0x2105, '3', u'c/o'), - (0x2106, '3', u'c/u'), - (0x2107, 'M', u'ɛ'), + ] + +def _seg_22() -> List[Union[Tuple[int, str], Tuple[int, str, str]]]: + return [ + (0x2105, '3', 'c/o'), + (0x2106, '3', 'c/u'), + (0x2107, 'M', 'ɛ'), (0x2108, 'V'), - (0x2109, 'M', u'°f'), - (0x210A, 'M', u'g'), - (0x210B, 'M', u'h'), - (0x210F, 'M', u'ħ'), - (0x2110, 'M', u'i'), - (0x2112, 'M', u'l'), + (0x2109, 'M', '°f'), + (0x210A, 'M', 'g'), + (0x210B, 'M', 'h'), + (0x210F, 'M', 'ħ'), + (0x2110, 'M', 'i'), + (0x2112, 'M', 'l'), (0x2114, 'V'), - (0x2115, 'M', u'n'), - (0x2116, 'M', u'no'), + (0x2115, 'M', 'n'), + (0x2116, 'M', 'no'), (0x2117, 'V'), - (0x2119, 'M', u'p'), - (0x211A, 'M', u'q'), - (0x211B, 'M', u'r'), + (0x2119, 'M', 'p'), + (0x211A, 'M', 'q'), + (0x211B, 'M', 'r'), (0x211E, 'V'), - (0x2120, 'M', u'sm'), - (0x2121, 'M', u'tel'), - (0x2122, 'M', u'tm'), + (0x2120, 'M', 'sm'), + (0x2121, 'M', 'tel'), + (0x2122, 'M', 'tm'), (0x2123, 'V'), - (0x2124, 'M', u'z'), + (0x2124, 'M', 'z'), (0x2125, 'V'), - (0x2126, 'M', u'ω'), + (0x2126, 'M', 'ω'), (0x2127, 'V'), - (0x2128, 'M', u'z'), + (0x2128, 'M', 'z'), (0x2129, 'V'), - ] - -def _seg_22(): - return [ - (0x212A, 'M', u'k'), - (0x212B, 'M', u'å'), - (0x212C, 'M', u'b'), - (0x212D, 'M', u'c'), + (0x212A, 'M', 'k'), + (0x212B, 'M', 'å'), + (0x212C, 'M', 'b'), + (0x212D, 'M', 'c'), (0x212E, 'V'), - (0x212F, 'M', u'e'), - (0x2131, 'M', u'f'), + (0x212F, 'M', 'e'), + (0x2131, 'M', 'f'), (0x2132, 'X'), - (0x2133, 'M', u'm'), - (0x2134, 'M', u'o'), - (0x2135, 'M', u'א'), - (0x2136, 'M', u'ב'), - (0x2137, 'M', u'ג'), - (0x2138, 'M', u'ד'), - (0x2139, 'M', u'i'), + (0x2133, 'M', 'm'), + (0x2134, 'M', 'o'), + (0x2135, 'M', 'א'), + (0x2136, 'M', 'ב'), + (0x2137, 'M', 'ג'), + (0x2138, 'M', 'ד'), + (0x2139, 'M', 'i'), (0x213A, 'V'), - (0x213B, 'M', u'fax'), - (0x213C, 'M', u'π'), - (0x213D, 'M', u'γ'), - (0x213F, 'M', u'π'), - (0x2140, 'M', u'∑'), + (0x213B, 'M', 'fax'), + (0x213C, 'M', 'π'), + (0x213D, 'M', 'γ'), + (0x213F, 'M', 'π'), + (0x2140, 'M', '∑'), (0x2141, 'V'), - (0x2145, 'M', u'd'), - (0x2147, 'M', u'e'), - (0x2148, 'M', u'i'), - (0x2149, 'M', u'j'), + (0x2145, 'M', 'd'), + (0x2147, 'M', 'e'), + (0x2148, 'M', 'i'), + (0x2149, 'M', 'j'), (0x214A, 'V'), - (0x2150, 'M', u'1⁄7'), - (0x2151, 'M', u'1⁄9'), - (0x2152, 'M', u'1⁄10'), - (0x2153, 'M', u'1⁄3'), - (0x2154, 'M', u'2⁄3'), - (0x2155, 'M', u'1⁄5'), - (0x2156, 'M', u'2⁄5'), - (0x2157, 'M', u'3⁄5'), - (0x2158, 'M', u'4⁄5'), - (0x2159, 'M', u'1⁄6'), - (0x215A, 'M', u'5⁄6'), - (0x215B, 'M', u'1⁄8'), - (0x215C, 'M', u'3⁄8'), - (0x215D, 'M', u'5⁄8'), - (0x215E, 'M', u'7⁄8'), - (0x215F, 'M', u'1⁄'), - (0x2160, 'M', u'i'), - (0x2161, 'M', u'ii'), - (0x2162, 'M', u'iii'), - (0x2163, 'M', u'iv'), - (0x2164, 'M', u'v'), - (0x2165, 'M', u'vi'), - (0x2166, 'M', u'vii'), - (0x2167, 'M', u'viii'), - (0x2168, 'M', u'ix'), - (0x2169, 'M', u'x'), - (0x216A, 'M', u'xi'), - (0x216B, 'M', u'xii'), - (0x216C, 'M', u'l'), - (0x216D, 'M', u'c'), - (0x216E, 'M', u'd'), - (0x216F, 'M', u'm'), - (0x2170, 'M', u'i'), - (0x2171, 'M', u'ii'), - (0x2172, 'M', u'iii'), - (0x2173, 'M', u'iv'), - (0x2174, 'M', u'v'), - (0x2175, 'M', u'vi'), - (0x2176, 'M', u'vii'), - (0x2177, 'M', u'viii'), - (0x2178, 'M', u'ix'), - (0x2179, 'M', u'x'), - (0x217A, 'M', u'xi'), - (0x217B, 'M', u'xii'), - (0x217C, 'M', u'l'), - (0x217D, 'M', u'c'), - (0x217E, 'M', u'd'), - (0x217F, 'M', u'm'), + (0x2150, 'M', '1⁄7'), + (0x2151, 'M', '1⁄9'), + (0x2152, 'M', '1⁄10'), + (0x2153, 'M', '1⁄3'), + (0x2154, 'M', '2⁄3'), + (0x2155, 'M', '1⁄5'), + (0x2156, 'M', '2⁄5'), + (0x2157, 'M', '3⁄5'), + (0x2158, 'M', '4⁄5'), + (0x2159, 'M', '1⁄6'), + (0x215A, 'M', '5⁄6'), + (0x215B, 'M', '1⁄8'), + (0x215C, 'M', '3⁄8'), + (0x215D, 'M', '5⁄8'), + (0x215E, 'M', '7⁄8'), + (0x215F, 'M', '1⁄'), + (0x2160, 'M', 'i'), + (0x2161, 'M', 'ii'), + (0x2162, 'M', 'iii'), + (0x2163, 'M', 'iv'), + (0x2164, 'M', 'v'), + (0x2165, 'M', 'vi'), + (0x2166, 'M', 'vii'), + (0x2167, 'M', 'viii'), + (0x2168, 'M', 'ix'), + (0x2169, 'M', 'x'), + (0x216A, 'M', 'xi'), + (0x216B, 'M', 'xii'), + (0x216C, 'M', 'l'), + (0x216D, 'M', 'c'), + (0x216E, 'M', 'd'), + (0x216F, 'M', 'm'), + (0x2170, 'M', 'i'), + (0x2171, 'M', 'ii'), + (0x2172, 'M', 'iii'), + (0x2173, 'M', 'iv'), + (0x2174, 'M', 'v'), + (0x2175, 'M', 'vi'), + (0x2176, 'M', 'vii'), + (0x2177, 'M', 'viii'), + (0x2178, 'M', 'ix'), + (0x2179, 'M', 'x'), + (0x217A, 'M', 'xi'), + (0x217B, 'M', 'xii'), + (0x217C, 'M', 'l'), + ] + +def _seg_23() -> List[Union[Tuple[int, str], Tuple[int, str, str]]]: + return [ + (0x217D, 'M', 'c'), + (0x217E, 'M', 'd'), + (0x217F, 'M', 'm'), (0x2180, 'V'), (0x2183, 'X'), (0x2184, 'V'), - (0x2189, 'M', u'0⁄3'), + (0x2189, 'M', '0⁄3'), (0x218A, 'V'), (0x218C, 'X'), (0x2190, 'V'), - (0x222C, 'M', u'∫∫'), - (0x222D, 'M', u'∫∫∫'), + (0x222C, 'M', '∫∫'), + (0x222D, 'M', '∫∫∫'), (0x222E, 'V'), - (0x222F, 'M', u'∮∮'), - (0x2230, 'M', u'∮∮∮'), + (0x222F, 'M', '∮∮'), + (0x2230, 'M', '∮∮∮'), (0x2231, 'V'), (0x2260, '3'), (0x2261, 'V'), (0x226E, '3'), (0x2270, 'V'), - (0x2329, 'M', u'〈'), - (0x232A, 'M', u'〉'), + (0x2329, 'M', '〈'), + (0x232A, 'M', '〉'), (0x232B, 'V'), (0x2427, 'X'), (0x2440, 'V'), (0x244B, 'X'), - (0x2460, 'M', u'1'), - (0x2461, 'M', u'2'), - ] - -def _seg_23(): - return [ - (0x2462, 'M', u'3'), - (0x2463, 'M', u'4'), - (0x2464, 'M', u'5'), - (0x2465, 'M', u'6'), - (0x2466, 'M', u'7'), - (0x2467, 'M', u'8'), - (0x2468, 'M', u'9'), - (0x2469, 'M', u'10'), - (0x246A, 'M', u'11'), - (0x246B, 'M', u'12'), - (0x246C, 'M', u'13'), - (0x246D, 'M', u'14'), - (0x246E, 'M', u'15'), - (0x246F, 'M', u'16'), - (0x2470, 'M', u'17'), - (0x2471, 'M', u'18'), - (0x2472, 'M', u'19'), - (0x2473, 'M', u'20'), - (0x2474, '3', u'(1)'), - (0x2475, '3', u'(2)'), - (0x2476, '3', u'(3)'), - (0x2477, '3', u'(4)'), - (0x2478, '3', u'(5)'), - (0x2479, '3', u'(6)'), - (0x247A, '3', u'(7)'), - (0x247B, '3', u'(8)'), - (0x247C, '3', u'(9)'), - (0x247D, '3', u'(10)'), - (0x247E, '3', u'(11)'), - (0x247F, '3', u'(12)'), - (0x2480, '3', u'(13)'), - (0x2481, '3', u'(14)'), - (0x2482, '3', u'(15)'), - (0x2483, '3', u'(16)'), - (0x2484, '3', u'(17)'), - (0x2485, '3', u'(18)'), - (0x2486, '3', u'(19)'), - (0x2487, '3', u'(20)'), + (0x2460, 'M', '1'), + (0x2461, 'M', '2'), + (0x2462, 'M', '3'), + (0x2463, 'M', '4'), + (0x2464, 'M', '5'), + (0x2465, 'M', '6'), + (0x2466, 'M', '7'), + (0x2467, 'M', '8'), + (0x2468, 'M', '9'), + (0x2469, 'M', '10'), + (0x246A, 'M', '11'), + (0x246B, 'M', '12'), + (0x246C, 'M', '13'), + (0x246D, 'M', '14'), + (0x246E, 'M', '15'), + (0x246F, 'M', '16'), + (0x2470, 'M', '17'), + (0x2471, 'M', '18'), + (0x2472, 'M', '19'), + (0x2473, 'M', '20'), + (0x2474, '3', '(1)'), + (0x2475, '3', '(2)'), + (0x2476, '3', '(3)'), + (0x2477, '3', '(4)'), + (0x2478, '3', '(5)'), + (0x2479, '3', '(6)'), + (0x247A, '3', '(7)'), + (0x247B, '3', '(8)'), + (0x247C, '3', '(9)'), + (0x247D, '3', '(10)'), + (0x247E, '3', '(11)'), + (0x247F, '3', '(12)'), + (0x2480, '3', '(13)'), + (0x2481, '3', '(14)'), + (0x2482, '3', '(15)'), + (0x2483, '3', '(16)'), + (0x2484, '3', '(17)'), + (0x2485, '3', '(18)'), + (0x2486, '3', '(19)'), + (0x2487, '3', '(20)'), (0x2488, 'X'), - (0x249C, '3', u'(a)'), - (0x249D, '3', u'(b)'), - (0x249E, '3', u'(c)'), - (0x249F, '3', u'(d)'), - (0x24A0, '3', u'(e)'), - (0x24A1, '3', u'(f)'), - (0x24A2, '3', u'(g)'), - (0x24A3, '3', u'(h)'), - (0x24A4, '3', u'(i)'), - (0x24A5, '3', u'(j)'), - (0x24A6, '3', u'(k)'), - (0x24A7, '3', u'(l)'), - (0x24A8, '3', u'(m)'), - (0x24A9, '3', u'(n)'), - (0x24AA, '3', u'(o)'), - (0x24AB, '3', u'(p)'), - (0x24AC, '3', u'(q)'), - (0x24AD, '3', u'(r)'), - (0x24AE, '3', u'(s)'), - (0x24AF, '3', u'(t)'), - (0x24B0, '3', u'(u)'), - (0x24B1, '3', u'(v)'), - (0x24B2, '3', u'(w)'), - (0x24B3, '3', u'(x)'), - (0x24B4, '3', u'(y)'), - (0x24B5, '3', u'(z)'), - (0x24B6, 'M', u'a'), - (0x24B7, 'M', u'b'), - (0x24B8, 'M', u'c'), - (0x24B9, 'M', u'd'), - (0x24BA, 'M', u'e'), - (0x24BB, 'M', u'f'), - (0x24BC, 'M', u'g'), - (0x24BD, 'M', u'h'), - (0x24BE, 'M', u'i'), - (0x24BF, 'M', u'j'), - (0x24C0, 'M', u'k'), - (0x24C1, 'M', u'l'), - (0x24C2, 'M', u'm'), - (0x24C3, 'M', u'n'), - (0x24C4, 'M', u'o'), - (0x24C5, 'M', u'p'), - (0x24C6, 'M', u'q'), - (0x24C7, 'M', u'r'), - (0x24C8, 'M', u's'), - (0x24C9, 'M', u't'), - (0x24CA, 'M', u'u'), - (0x24CB, 'M', u'v'), - (0x24CC, 'M', u'w'), - (0x24CD, 'M', u'x'), - (0x24CE, 'M', u'y'), - (0x24CF, 'M', u'z'), - (0x24D0, 'M', u'a'), - (0x24D1, 'M', u'b'), - (0x24D2, 'M', u'c'), - (0x24D3, 'M', u'd'), - (0x24D4, 'M', u'e'), - (0x24D5, 'M', u'f'), - (0x24D6, 'M', u'g'), - (0x24D7, 'M', u'h'), - (0x24D8, 'M', u'i'), + (0x249C, '3', '(a)'), + (0x249D, '3', '(b)'), + (0x249E, '3', '(c)'), + (0x249F, '3', '(d)'), + (0x24A0, '3', '(e)'), + (0x24A1, '3', '(f)'), + (0x24A2, '3', '(g)'), + (0x24A3, '3', '(h)'), + (0x24A4, '3', '(i)'), + (0x24A5, '3', '(j)'), + (0x24A6, '3', '(k)'), + (0x24A7, '3', '(l)'), + (0x24A8, '3', '(m)'), + (0x24A9, '3', '(n)'), + (0x24AA, '3', '(o)'), + (0x24AB, '3', '(p)'), + (0x24AC, '3', '(q)'), + (0x24AD, '3', '(r)'), + (0x24AE, '3', '(s)'), + (0x24AF, '3', '(t)'), + (0x24B0, '3', '(u)'), + (0x24B1, '3', '(v)'), + (0x24B2, '3', '(w)'), + (0x24B3, '3', '(x)'), + (0x24B4, '3', '(y)'), + (0x24B5, '3', '(z)'), + (0x24B6, 'M', 'a'), + (0x24B7, 'M', 'b'), + (0x24B8, 'M', 'c'), + (0x24B9, 'M', 'd'), + (0x24BA, 'M', 'e'), + (0x24BB, 'M', 'f'), + (0x24BC, 'M', 'g'), ] -def _seg_24(): +def _seg_24() -> List[Union[Tuple[int, str], Tuple[int, str, str]]]: return [ - (0x24D9, 'M', u'j'), - (0x24DA, 'M', u'k'), - (0x24DB, 'M', u'l'), - (0x24DC, 'M', u'm'), - (0x24DD, 'M', u'n'), - (0x24DE, 'M', u'o'), - (0x24DF, 'M', u'p'), - (0x24E0, 'M', u'q'), - (0x24E1, 'M', u'r'), - (0x24E2, 'M', u's'), - (0x24E3, 'M', u't'), - (0x24E4, 'M', u'u'), - (0x24E5, 'M', u'v'), - (0x24E6, 'M', u'w'), - (0x24E7, 'M', u'x'), - (0x24E8, 'M', u'y'), - (0x24E9, 'M', u'z'), - (0x24EA, 'M', u'0'), + (0x24BD, 'M', 'h'), + (0x24BE, 'M', 'i'), + (0x24BF, 'M', 'j'), + (0x24C0, 'M', 'k'), + (0x24C1, 'M', 'l'), + (0x24C2, 'M', 'm'), + (0x24C3, 'M', 'n'), + (0x24C4, 'M', 'o'), + (0x24C5, 'M', 'p'), + (0x24C6, 'M', 'q'), + (0x24C7, 'M', 'r'), + (0x24C8, 'M', 's'), + (0x24C9, 'M', 't'), + (0x24CA, 'M', 'u'), + (0x24CB, 'M', 'v'), + (0x24CC, 'M', 'w'), + (0x24CD, 'M', 'x'), + (0x24CE, 'M', 'y'), + (0x24CF, 'M', 'z'), + (0x24D0, 'M', 'a'), + (0x24D1, 'M', 'b'), + (0x24D2, 'M', 'c'), + (0x24D3, 'M', 'd'), + (0x24D4, 'M', 'e'), + (0x24D5, 'M', 'f'), + (0x24D6, 'M', 'g'), + (0x24D7, 'M', 'h'), + (0x24D8, 'M', 'i'), + (0x24D9, 'M', 'j'), + (0x24DA, 'M', 'k'), + (0x24DB, 'M', 'l'), + (0x24DC, 'M', 'm'), + (0x24DD, 'M', 'n'), + (0x24DE, 'M', 'o'), + (0x24DF, 'M', 'p'), + (0x24E0, 'M', 'q'), + (0x24E1, 'M', 'r'), + (0x24E2, 'M', 's'), + (0x24E3, 'M', 't'), + (0x24E4, 'M', 'u'), + (0x24E5, 'M', 'v'), + (0x24E6, 'M', 'w'), + (0x24E7, 'M', 'x'), + (0x24E8, 'M', 'y'), + (0x24E9, 'M', 'z'), + (0x24EA, 'M', '0'), (0x24EB, 'V'), - (0x2A0C, 'M', u'∫∫∫∫'), + (0x2A0C, 'M', '∫∫∫∫'), (0x2A0D, 'V'), - (0x2A74, '3', u'::='), - (0x2A75, '3', u'=='), - (0x2A76, '3', u'==='), + (0x2A74, '3', '::='), + (0x2A75, '3', '=='), + (0x2A76, '3', '==='), (0x2A77, 'V'), - (0x2ADC, 'M', u'⫝̸'), + (0x2ADC, 'M', '⫝̸'), (0x2ADD, 'V'), (0x2B74, 'X'), (0x2B76, 'V'), (0x2B96, 'X'), - (0x2B98, 'V'), - (0x2BC9, 'X'), - (0x2BCA, 'V'), - (0x2BFF, 'X'), - (0x2C00, 'M', u'ⰰ'), - (0x2C01, 'M', u'ⰱ'), - (0x2C02, 'M', u'ⰲ'), - (0x2C03, 'M', u'ⰳ'), - (0x2C04, 'M', u'ⰴ'), - (0x2C05, 'M', u'ⰵ'), - (0x2C06, 'M', u'ⰶ'), - (0x2C07, 'M', u'ⰷ'), - (0x2C08, 'M', u'ⰸ'), - (0x2C09, 'M', u'ⰹ'), - (0x2C0A, 'M', u'ⰺ'), - (0x2C0B, 'M', u'ⰻ'), - (0x2C0C, 'M', u'ⰼ'), - (0x2C0D, 'M', u'ⰽ'), - (0x2C0E, 'M', u'ⰾ'), - (0x2C0F, 'M', u'ⰿ'), - (0x2C10, 'M', u'ⱀ'), - (0x2C11, 'M', u'ⱁ'), - (0x2C12, 'M', u'ⱂ'), - (0x2C13, 'M', u'ⱃ'), - (0x2C14, 'M', u'ⱄ'), - (0x2C15, 'M', u'ⱅ'), - (0x2C16, 'M', u'ⱆ'), - (0x2C17, 'M', u'ⱇ'), - (0x2C18, 'M', u'ⱈ'), - (0x2C19, 'M', u'ⱉ'), - (0x2C1A, 'M', u'ⱊ'), - (0x2C1B, 'M', u'ⱋ'), - (0x2C1C, 'M', u'ⱌ'), - (0x2C1D, 'M', u'ⱍ'), - (0x2C1E, 'M', u'ⱎ'), - (0x2C1F, 'M', u'ⱏ'), - (0x2C20, 'M', u'ⱐ'), - (0x2C21, 'M', u'ⱑ'), - (0x2C22, 'M', u'ⱒ'), - (0x2C23, 'M', u'ⱓ'), - (0x2C24, 'M', u'ⱔ'), - (0x2C25, 'M', u'ⱕ'), - (0x2C26, 'M', u'ⱖ'), - (0x2C27, 'M', u'ⱗ'), - (0x2C28, 'M', u'ⱘ'), - (0x2C29, 'M', u'ⱙ'), - (0x2C2A, 'M', u'ⱚ'), - (0x2C2B, 'M', u'ⱛ'), - (0x2C2C, 'M', u'ⱜ'), - (0x2C2D, 'M', u'ⱝ'), - (0x2C2E, 'M', u'ⱞ'), - (0x2C2F, 'X'), + (0x2B97, 'V'), + (0x2C00, 'M', 'ⰰ'), + (0x2C01, 'M', 'ⰱ'), + (0x2C02, 'M', 'ⰲ'), + (0x2C03, 'M', 'ⰳ'), + (0x2C04, 'M', 'ⰴ'), + (0x2C05, 'M', 'ⰵ'), + (0x2C06, 'M', 'ⰶ'), + (0x2C07, 'M', 'ⰷ'), + (0x2C08, 'M', 'ⰸ'), + (0x2C09, 'M', 'ⰹ'), + (0x2C0A, 'M', 'ⰺ'), + (0x2C0B, 'M', 'ⰻ'), + (0x2C0C, 'M', 'ⰼ'), + (0x2C0D, 'M', 'ⰽ'), + (0x2C0E, 'M', 'ⰾ'), + (0x2C0F, 'M', 'ⰿ'), + (0x2C10, 'M', 'ⱀ'), + (0x2C11, 'M', 'ⱁ'), + (0x2C12, 'M', 'ⱂ'), + (0x2C13, 'M', 'ⱃ'), + (0x2C14, 'M', 'ⱄ'), + (0x2C15, 'M', 'ⱅ'), + (0x2C16, 'M', 'ⱆ'), + (0x2C17, 'M', 'ⱇ'), + (0x2C18, 'M', 'ⱈ'), + (0x2C19, 'M', 'ⱉ'), + (0x2C1A, 'M', 'ⱊ'), + (0x2C1B, 'M', 'ⱋ'), + (0x2C1C, 'M', 'ⱌ'), + (0x2C1D, 'M', 'ⱍ'), + (0x2C1E, 'M', 'ⱎ'), + (0x2C1F, 'M', 'ⱏ'), + (0x2C20, 'M', 'ⱐ'), + (0x2C21, 'M', 'ⱑ'), + (0x2C22, 'M', 'ⱒ'), + (0x2C23, 'M', 'ⱓ'), + (0x2C24, 'M', 'ⱔ'), + (0x2C25, 'M', 'ⱕ'), + (0x2C26, 'M', 'ⱖ'), + (0x2C27, 'M', 'ⱗ'), + (0x2C28, 'M', 'ⱘ'), + ] + +def _seg_25() -> List[Union[Tuple[int, str], Tuple[int, str, str]]]: + return [ + (0x2C29, 'M', 'ⱙ'), + (0x2C2A, 'M', 'ⱚ'), + (0x2C2B, 'M', 'ⱛ'), + (0x2C2C, 'M', 'ⱜ'), + (0x2C2D, 'M', 'ⱝ'), + (0x2C2E, 'M', 'ⱞ'), + (0x2C2F, 'M', 'ⱟ'), (0x2C30, 'V'), - (0x2C5F, 'X'), - (0x2C60, 'M', u'ⱡ'), + (0x2C60, 'M', 'ⱡ'), (0x2C61, 'V'), - (0x2C62, 'M', u'ɫ'), - (0x2C63, 'M', u'ᵽ'), - (0x2C64, 'M', u'ɽ'), + (0x2C62, 'M', 'ɫ'), + (0x2C63, 'M', 'ᵽ'), + (0x2C64, 'M', 'ɽ'), (0x2C65, 'V'), - (0x2C67, 'M', u'ⱨ'), + (0x2C67, 'M', 'ⱨ'), (0x2C68, 'V'), - (0x2C69, 'M', u'ⱪ'), + (0x2C69, 'M', 'ⱪ'), (0x2C6A, 'V'), - (0x2C6B, 'M', u'ⱬ'), + (0x2C6B, 'M', 'ⱬ'), (0x2C6C, 'V'), - (0x2C6D, 'M', u'ɑ'), - (0x2C6E, 'M', u'ɱ'), - (0x2C6F, 'M', u'ɐ'), - (0x2C70, 'M', u'ɒ'), - ] - -def _seg_25(): - return [ + (0x2C6D, 'M', 'ɑ'), + (0x2C6E, 'M', 'ɱ'), + (0x2C6F, 'M', 'ɐ'), + (0x2C70, 'M', 'ɒ'), (0x2C71, 'V'), - (0x2C72, 'M', u'ⱳ'), + (0x2C72, 'M', 'ⱳ'), (0x2C73, 'V'), - (0x2C75, 'M', u'ⱶ'), + (0x2C75, 'M', 'ⱶ'), (0x2C76, 'V'), - (0x2C7C, 'M', u'j'), - (0x2C7D, 'M', u'v'), - (0x2C7E, 'M', u'ȿ'), - (0x2C7F, 'M', u'ɀ'), - (0x2C80, 'M', u'ⲁ'), + (0x2C7C, 'M', 'j'), + (0x2C7D, 'M', 'v'), + (0x2C7E, 'M', 'ȿ'), + (0x2C7F, 'M', 'ɀ'), + (0x2C80, 'M', 'ⲁ'), (0x2C81, 'V'), - (0x2C82, 'M', u'ⲃ'), + (0x2C82, 'M', 'ⲃ'), (0x2C83, 'V'), - (0x2C84, 'M', u'ⲅ'), + (0x2C84, 'M', 'ⲅ'), (0x2C85, 'V'), - (0x2C86, 'M', u'ⲇ'), + (0x2C86, 'M', 'ⲇ'), (0x2C87, 'V'), - (0x2C88, 'M', u'ⲉ'), + (0x2C88, 'M', 'ⲉ'), (0x2C89, 'V'), - (0x2C8A, 'M', u'ⲋ'), + (0x2C8A, 'M', 'ⲋ'), (0x2C8B, 'V'), - (0x2C8C, 'M', u'ⲍ'), + (0x2C8C, 'M', 'ⲍ'), (0x2C8D, 'V'), - (0x2C8E, 'M', u'ⲏ'), + (0x2C8E, 'M', 'ⲏ'), (0x2C8F, 'V'), - (0x2C90, 'M', u'ⲑ'), + (0x2C90, 'M', 'ⲑ'), (0x2C91, 'V'), - (0x2C92, 'M', u'ⲓ'), + (0x2C92, 'M', 'ⲓ'), (0x2C93, 'V'), - (0x2C94, 'M', u'ⲕ'), + (0x2C94, 'M', 'ⲕ'), (0x2C95, 'V'), - (0x2C96, 'M', u'ⲗ'), + (0x2C96, 'M', 'ⲗ'), (0x2C97, 'V'), - (0x2C98, 'M', u'ⲙ'), + (0x2C98, 'M', 'ⲙ'), (0x2C99, 'V'), - (0x2C9A, 'M', u'ⲛ'), + (0x2C9A, 'M', 'ⲛ'), (0x2C9B, 'V'), - (0x2C9C, 'M', u'ⲝ'), + (0x2C9C, 'M', 'ⲝ'), (0x2C9D, 'V'), - (0x2C9E, 'M', u'ⲟ'), + (0x2C9E, 'M', 'ⲟ'), (0x2C9F, 'V'), - (0x2CA0, 'M', u'ⲡ'), + (0x2CA0, 'M', 'ⲡ'), (0x2CA1, 'V'), - (0x2CA2, 'M', u'ⲣ'), + (0x2CA2, 'M', 'ⲣ'), (0x2CA3, 'V'), - (0x2CA4, 'M', u'ⲥ'), + (0x2CA4, 'M', 'ⲥ'), (0x2CA5, 'V'), - (0x2CA6, 'M', u'ⲧ'), + (0x2CA6, 'M', 'ⲧ'), (0x2CA7, 'V'), - (0x2CA8, 'M', u'ⲩ'), + (0x2CA8, 'M', 'ⲩ'), (0x2CA9, 'V'), - (0x2CAA, 'M', u'ⲫ'), + (0x2CAA, 'M', 'ⲫ'), (0x2CAB, 'V'), - (0x2CAC, 'M', u'ⲭ'), + (0x2CAC, 'M', 'ⲭ'), (0x2CAD, 'V'), - (0x2CAE, 'M', u'ⲯ'), + (0x2CAE, 'M', 'ⲯ'), (0x2CAF, 'V'), - (0x2CB0, 'M', u'ⲱ'), + (0x2CB0, 'M', 'ⲱ'), (0x2CB1, 'V'), - (0x2CB2, 'M', u'ⲳ'), + (0x2CB2, 'M', 'ⲳ'), (0x2CB3, 'V'), - (0x2CB4, 'M', u'ⲵ'), + (0x2CB4, 'M', 'ⲵ'), (0x2CB5, 'V'), - (0x2CB6, 'M', u'ⲷ'), + (0x2CB6, 'M', 'ⲷ'), (0x2CB7, 'V'), - (0x2CB8, 'M', u'ⲹ'), + (0x2CB8, 'M', 'ⲹ'), (0x2CB9, 'V'), - (0x2CBA, 'M', u'ⲻ'), + (0x2CBA, 'M', 'ⲻ'), (0x2CBB, 'V'), - (0x2CBC, 'M', u'ⲽ'), + (0x2CBC, 'M', 'ⲽ'), (0x2CBD, 'V'), - (0x2CBE, 'M', u'ⲿ'), + (0x2CBE, 'M', 'ⲿ'), (0x2CBF, 'V'), - (0x2CC0, 'M', u'ⳁ'), + (0x2CC0, 'M', 'ⳁ'), (0x2CC1, 'V'), - (0x2CC2, 'M', u'ⳃ'), + (0x2CC2, 'M', 'ⳃ'), + ] + +def _seg_26() -> List[Union[Tuple[int, str], Tuple[int, str, str]]]: + return [ (0x2CC3, 'V'), - (0x2CC4, 'M', u'ⳅ'), + (0x2CC4, 'M', 'ⳅ'), (0x2CC5, 'V'), - (0x2CC6, 'M', u'ⳇ'), + (0x2CC6, 'M', 'ⳇ'), (0x2CC7, 'V'), - (0x2CC8, 'M', u'ⳉ'), + (0x2CC8, 'M', 'ⳉ'), (0x2CC9, 'V'), - (0x2CCA, 'M', u'ⳋ'), + (0x2CCA, 'M', 'ⳋ'), (0x2CCB, 'V'), - (0x2CCC, 'M', u'ⳍ'), + (0x2CCC, 'M', 'ⳍ'), (0x2CCD, 'V'), - (0x2CCE, 'M', u'ⳏ'), + (0x2CCE, 'M', 'ⳏ'), (0x2CCF, 'V'), - (0x2CD0, 'M', u'ⳑ'), + (0x2CD0, 'M', 'ⳑ'), (0x2CD1, 'V'), - (0x2CD2, 'M', u'ⳓ'), + (0x2CD2, 'M', 'ⳓ'), (0x2CD3, 'V'), - (0x2CD4, 'M', u'ⳕ'), + (0x2CD4, 'M', 'ⳕ'), (0x2CD5, 'V'), - (0x2CD6, 'M', u'ⳗ'), + (0x2CD6, 'M', 'ⳗ'), (0x2CD7, 'V'), - (0x2CD8, 'M', u'ⳙ'), + (0x2CD8, 'M', 'ⳙ'), (0x2CD9, 'V'), - (0x2CDA, 'M', u'ⳛ'), - ] - -def _seg_26(): - return [ + (0x2CDA, 'M', 'ⳛ'), (0x2CDB, 'V'), - (0x2CDC, 'M', u'ⳝ'), + (0x2CDC, 'M', 'ⳝ'), (0x2CDD, 'V'), - (0x2CDE, 'M', u'ⳟ'), + (0x2CDE, 'M', 'ⳟ'), (0x2CDF, 'V'), - (0x2CE0, 'M', u'ⳡ'), + (0x2CE0, 'M', 'ⳡ'), (0x2CE1, 'V'), - (0x2CE2, 'M', u'ⳣ'), + (0x2CE2, 'M', 'ⳣ'), (0x2CE3, 'V'), - (0x2CEB, 'M', u'ⳬ'), + (0x2CEB, 'M', 'ⳬ'), (0x2CEC, 'V'), - (0x2CED, 'M', u'ⳮ'), + (0x2CED, 'M', 'ⳮ'), (0x2CEE, 'V'), - (0x2CF2, 'M', u'ⳳ'), + (0x2CF2, 'M', 'ⳳ'), (0x2CF3, 'V'), (0x2CF4, 'X'), (0x2CF9, 'V'), @@ -2735,7 +2762,7 @@ def _seg_26(): (0x2D2E, 'X'), (0x2D30, 'V'), (0x2D68, 'X'), - (0x2D6F, 'M', u'ⵡ'), + (0x2D6F, 'M', 'ⵡ'), (0x2D70, 'V'), (0x2D71, 'X'), (0x2D7F, 'V'), @@ -2757,1154 +2784,1184 @@ def _seg_26(): (0x2DD8, 'V'), (0x2DDF, 'X'), (0x2DE0, 'V'), - (0x2E4F, 'X'), + (0x2E5E, 'X'), (0x2E80, 'V'), (0x2E9A, 'X'), (0x2E9B, 'V'), - (0x2E9F, 'M', u'母'), + (0x2E9F, 'M', '母'), (0x2EA0, 'V'), - (0x2EF3, 'M', u'龟'), + (0x2EF3, 'M', '龟'), (0x2EF4, 'X'), - (0x2F00, 'M', u'一'), - (0x2F01, 'M', u'丨'), - (0x2F02, 'M', u'丶'), - (0x2F03, 'M', u'丿'), - (0x2F04, 'M', u'乙'), - (0x2F05, 'M', u'亅'), - (0x2F06, 'M', u'二'), - (0x2F07, 'M', u'亠'), - (0x2F08, 'M', u'人'), - (0x2F09, 'M', u'儿'), - (0x2F0A, 'M', u'入'), - (0x2F0B, 'M', u'八'), - (0x2F0C, 'M', u'冂'), - (0x2F0D, 'M', u'冖'), - (0x2F0E, 'M', u'冫'), - (0x2F0F, 'M', u'几'), - (0x2F10, 'M', u'凵'), - (0x2F11, 'M', u'刀'), - (0x2F12, 'M', u'力'), - (0x2F13, 'M', u'勹'), - (0x2F14, 'M', u'匕'), - (0x2F15, 'M', u'匚'), - (0x2F16, 'M', u'匸'), - (0x2F17, 'M', u'十'), - (0x2F18, 'M', u'卜'), - (0x2F19, 'M', u'卩'), - (0x2F1A, 'M', u'厂'), - (0x2F1B, 'M', u'厶'), - (0x2F1C, 'M', u'又'), - (0x2F1D, 'M', u'口'), - (0x2F1E, 'M', u'囗'), - (0x2F1F, 'M', u'土'), - (0x2F20, 'M', u'士'), - (0x2F21, 'M', u'夂'), - (0x2F22, 'M', u'夊'), - (0x2F23, 'M', u'夕'), - (0x2F24, 'M', u'大'), - (0x2F25, 'M', u'女'), - (0x2F26, 'M', u'子'), - (0x2F27, 'M', u'宀'), - (0x2F28, 'M', u'寸'), - (0x2F29, 'M', u'小'), - (0x2F2A, 'M', u'尢'), - (0x2F2B, 'M', u'尸'), - (0x2F2C, 'M', u'屮'), - (0x2F2D, 'M', u'山'), + (0x2F00, 'M', '一'), + (0x2F01, 'M', '丨'), + (0x2F02, 'M', '丶'), + (0x2F03, 'M', '丿'), + (0x2F04, 'M', '乙'), + (0x2F05, 'M', '亅'), + (0x2F06, 'M', '二'), + (0x2F07, 'M', '亠'), + (0x2F08, 'M', '人'), + (0x2F09, 'M', '儿'), + (0x2F0A, 'M', '入'), + (0x2F0B, 'M', '八'), + (0x2F0C, 'M', '冂'), + (0x2F0D, 'M', '冖'), + (0x2F0E, 'M', '冫'), + (0x2F0F, 'M', '几'), + (0x2F10, 'M', '凵'), + (0x2F11, 'M', '刀'), + (0x2F12, 'M', '力'), + (0x2F13, 'M', '勹'), + (0x2F14, 'M', '匕'), + (0x2F15, 'M', '匚'), ] -def _seg_27(): +def _seg_27() -> List[Union[Tuple[int, str], Tuple[int, str, str]]]: return [ - (0x2F2E, 'M', u'巛'), - (0x2F2F, 'M', u'工'), - (0x2F30, 'M', u'己'), - (0x2F31, 'M', u'巾'), - (0x2F32, 'M', u'干'), - (0x2F33, 'M', u'幺'), - (0x2F34, 'M', u'广'), - (0x2F35, 'M', u'廴'), - (0x2F36, 'M', u'廾'), - (0x2F37, 'M', u'弋'), - (0x2F38, 'M', u'弓'), - (0x2F39, 'M', u'彐'), - (0x2F3A, 'M', u'彡'), - (0x2F3B, 'M', u'彳'), - (0x2F3C, 'M', u'心'), - (0x2F3D, 'M', u'戈'), - (0x2F3E, 'M', u'戶'), - (0x2F3F, 'M', u'手'), - (0x2F40, 'M', u'支'), - (0x2F41, 'M', u'攴'), - (0x2F42, 'M', u'文'), - (0x2F43, 'M', u'斗'), - (0x2F44, 'M', u'斤'), - (0x2F45, 'M', u'方'), - (0x2F46, 'M', u'无'), - (0x2F47, 'M', u'日'), - (0x2F48, 'M', u'曰'), - (0x2F49, 'M', u'月'), - (0x2F4A, 'M', u'木'), - (0x2F4B, 'M', u'欠'), - (0x2F4C, 'M', u'止'), - (0x2F4D, 'M', u'歹'), - (0x2F4E, 'M', u'殳'), - (0x2F4F, 'M', u'毋'), - (0x2F50, 'M', u'比'), - (0x2F51, 'M', u'毛'), - (0x2F52, 'M', u'氏'), - (0x2F53, 'M', u'气'), - (0x2F54, 'M', u'水'), - (0x2F55, 'M', u'火'), - (0x2F56, 'M', u'爪'), - (0x2F57, 'M', u'父'), - (0x2F58, 'M', u'爻'), - (0x2F59, 'M', u'爿'), - (0x2F5A, 'M', u'片'), - (0x2F5B, 'M', u'牙'), - (0x2F5C, 'M', u'牛'), - (0x2F5D, 'M', u'犬'), - (0x2F5E, 'M', u'玄'), - (0x2F5F, 'M', u'玉'), - (0x2F60, 'M', u'瓜'), - (0x2F61, 'M', u'瓦'), - (0x2F62, 'M', u'甘'), - (0x2F63, 'M', u'生'), - (0x2F64, 'M', u'用'), - (0x2F65, 'M', u'田'), - (0x2F66, 'M', u'疋'), - (0x2F67, 'M', u'疒'), - (0x2F68, 'M', u'癶'), - (0x2F69, 'M', u'白'), - (0x2F6A, 'M', u'皮'), - (0x2F6B, 'M', u'皿'), - (0x2F6C, 'M', u'目'), - (0x2F6D, 'M', u'矛'), - (0x2F6E, 'M', u'矢'), - (0x2F6F, 'M', u'石'), - (0x2F70, 'M', u'示'), - (0x2F71, 'M', u'禸'), - (0x2F72, 'M', u'禾'), - (0x2F73, 'M', u'穴'), - (0x2F74, 'M', u'立'), - (0x2F75, 'M', u'竹'), - (0x2F76, 'M', u'米'), - (0x2F77, 'M', u'糸'), - (0x2F78, 'M', u'缶'), - (0x2F79, 'M', u'网'), - (0x2F7A, 'M', u'羊'), - (0x2F7B, 'M', u'羽'), - (0x2F7C, 'M', u'老'), - (0x2F7D, 'M', u'而'), - (0x2F7E, 'M', u'耒'), - (0x2F7F, 'M', u'耳'), - (0x2F80, 'M', u'聿'), - (0x2F81, 'M', u'肉'), - (0x2F82, 'M', u'臣'), - (0x2F83, 'M', u'自'), - (0x2F84, 'M', u'至'), - (0x2F85, 'M', u'臼'), - (0x2F86, 'M', u'舌'), - (0x2F87, 'M', u'舛'), - (0x2F88, 'M', u'舟'), - (0x2F89, 'M', u'艮'), - (0x2F8A, 'M', u'色'), - (0x2F8B, 'M', u'艸'), - (0x2F8C, 'M', u'虍'), - (0x2F8D, 'M', u'虫'), - (0x2F8E, 'M', u'血'), - (0x2F8F, 'M', u'行'), - (0x2F90, 'M', u'衣'), - (0x2F91, 'M', u'襾'), + (0x2F16, 'M', '匸'), + (0x2F17, 'M', '十'), + (0x2F18, 'M', '卜'), + (0x2F19, 'M', '卩'), + (0x2F1A, 'M', '厂'), + (0x2F1B, 'M', '厶'), + (0x2F1C, 'M', '又'), + (0x2F1D, 'M', '口'), + (0x2F1E, 'M', '囗'), + (0x2F1F, 'M', '土'), + (0x2F20, 'M', '士'), + (0x2F21, 'M', '夂'), + (0x2F22, 'M', '夊'), + (0x2F23, 'M', '夕'), + (0x2F24, 'M', '大'), + (0x2F25, 'M', '女'), + (0x2F26, 'M', '子'), + (0x2F27, 'M', '宀'), + (0x2F28, 'M', '寸'), + (0x2F29, 'M', '小'), + (0x2F2A, 'M', '尢'), + (0x2F2B, 'M', '尸'), + (0x2F2C, 'M', '屮'), + (0x2F2D, 'M', '山'), + (0x2F2E, 'M', '巛'), + (0x2F2F, 'M', '工'), + (0x2F30, 'M', '己'), + (0x2F31, 'M', '巾'), + (0x2F32, 'M', '干'), + (0x2F33, 'M', '幺'), + (0x2F34, 'M', '广'), + (0x2F35, 'M', '廴'), + (0x2F36, 'M', '廾'), + (0x2F37, 'M', '弋'), + (0x2F38, 'M', '弓'), + (0x2F39, 'M', '彐'), + (0x2F3A, 'M', '彡'), + (0x2F3B, 'M', '彳'), + (0x2F3C, 'M', '心'), + (0x2F3D, 'M', '戈'), + (0x2F3E, 'M', '戶'), + (0x2F3F, 'M', '手'), + (0x2F40, 'M', '支'), + (0x2F41, 'M', '攴'), + (0x2F42, 'M', '文'), + (0x2F43, 'M', '斗'), + (0x2F44, 'M', '斤'), + (0x2F45, 'M', '方'), + (0x2F46, 'M', '无'), + (0x2F47, 'M', '日'), + (0x2F48, 'M', '曰'), + (0x2F49, 'M', '月'), + (0x2F4A, 'M', '木'), + (0x2F4B, 'M', '欠'), + (0x2F4C, 'M', '止'), + (0x2F4D, 'M', '歹'), + (0x2F4E, 'M', '殳'), + (0x2F4F, 'M', '毋'), + (0x2F50, 'M', '比'), + (0x2F51, 'M', '毛'), + (0x2F52, 'M', '氏'), + (0x2F53, 'M', '气'), + (0x2F54, 'M', '水'), + (0x2F55, 'M', '火'), + (0x2F56, 'M', '爪'), + (0x2F57, 'M', '父'), + (0x2F58, 'M', '爻'), + (0x2F59, 'M', '爿'), + (0x2F5A, 'M', '片'), + (0x2F5B, 'M', '牙'), + (0x2F5C, 'M', '牛'), + (0x2F5D, 'M', '犬'), + (0x2F5E, 'M', '玄'), + (0x2F5F, 'M', '玉'), + (0x2F60, 'M', '瓜'), + (0x2F61, 'M', '瓦'), + (0x2F62, 'M', '甘'), + (0x2F63, 'M', '生'), + (0x2F64, 'M', '用'), + (0x2F65, 'M', '田'), + (0x2F66, 'M', '疋'), + (0x2F67, 'M', '疒'), + (0x2F68, 'M', '癶'), + (0x2F69, 'M', '白'), + (0x2F6A, 'M', '皮'), + (0x2F6B, 'M', '皿'), + (0x2F6C, 'M', '目'), + (0x2F6D, 'M', '矛'), + (0x2F6E, 'M', '矢'), + (0x2F6F, 'M', '石'), + (0x2F70, 'M', '示'), + (0x2F71, 'M', '禸'), + (0x2F72, 'M', '禾'), + (0x2F73, 'M', '穴'), + (0x2F74, 'M', '立'), + (0x2F75, 'M', '竹'), + (0x2F76, 'M', '米'), + (0x2F77, 'M', '糸'), + (0x2F78, 'M', '缶'), + (0x2F79, 'M', '网'), ] -def _seg_28(): +def _seg_28() -> List[Union[Tuple[int, str], Tuple[int, str, str]]]: return [ - (0x2F92, 'M', u'見'), - (0x2F93, 'M', u'角'), - (0x2F94, 'M', u'言'), - (0x2F95, 'M', u'谷'), - (0x2F96, 'M', u'豆'), - (0x2F97, 'M', u'豕'), - (0x2F98, 'M', u'豸'), - (0x2F99, 'M', u'貝'), - (0x2F9A, 'M', u'赤'), - (0x2F9B, 'M', u'走'), - (0x2F9C, 'M', u'足'), - (0x2F9D, 'M', u'身'), - (0x2F9E, 'M', u'車'), - (0x2F9F, 'M', u'辛'), - (0x2FA0, 'M', u'辰'), - (0x2FA1, 'M', u'辵'), - (0x2FA2, 'M', u'邑'), - (0x2FA3, 'M', u'酉'), - (0x2FA4, 'M', u'釆'), - (0x2FA5, 'M', u'里'), - (0x2FA6, 'M', u'金'), - (0x2FA7, 'M', u'長'), - (0x2FA8, 'M', u'門'), - (0x2FA9, 'M', u'阜'), - (0x2FAA, 'M', u'隶'), - (0x2FAB, 'M', u'隹'), - (0x2FAC, 'M', u'雨'), - (0x2FAD, 'M', u'靑'), - (0x2FAE, 'M', u'非'), - (0x2FAF, 'M', u'面'), - (0x2FB0, 'M', u'革'), - (0x2FB1, 'M', u'韋'), - (0x2FB2, 'M', u'韭'), - (0x2FB3, 'M', u'音'), - (0x2FB4, 'M', u'頁'), - (0x2FB5, 'M', u'風'), - (0x2FB6, 'M', u'飛'), - (0x2FB7, 'M', u'食'), - (0x2FB8, 'M', u'首'), - (0x2FB9, 'M', u'香'), - (0x2FBA, 'M', u'馬'), - (0x2FBB, 'M', u'骨'), - (0x2FBC, 'M', u'高'), - (0x2FBD, 'M', u'髟'), - (0x2FBE, 'M', u'鬥'), - (0x2FBF, 'M', u'鬯'), - (0x2FC0, 'M', u'鬲'), - (0x2FC1, 'M', u'鬼'), - (0x2FC2, 'M', u'魚'), - (0x2FC3, 'M', u'鳥'), - (0x2FC4, 'M', u'鹵'), - (0x2FC5, 'M', u'鹿'), - (0x2FC6, 'M', u'麥'), - (0x2FC7, 'M', u'麻'), - (0x2FC8, 'M', u'黃'), - (0x2FC9, 'M', u'黍'), - (0x2FCA, 'M', u'黑'), - (0x2FCB, 'M', u'黹'), - (0x2FCC, 'M', u'黽'), - (0x2FCD, 'M', u'鼎'), - (0x2FCE, 'M', u'鼓'), - (0x2FCF, 'M', u'鼠'), - (0x2FD0, 'M', u'鼻'), - (0x2FD1, 'M', u'齊'), - (0x2FD2, 'M', u'齒'), - (0x2FD3, 'M', u'龍'), - (0x2FD4, 'M', u'龜'), - (0x2FD5, 'M', u'龠'), + (0x2F7A, 'M', '羊'), + (0x2F7B, 'M', '羽'), + (0x2F7C, 'M', '老'), + (0x2F7D, 'M', '而'), + (0x2F7E, 'M', '耒'), + (0x2F7F, 'M', '耳'), + (0x2F80, 'M', '聿'), + (0x2F81, 'M', '肉'), + (0x2F82, 'M', '臣'), + (0x2F83, 'M', '自'), + (0x2F84, 'M', '至'), + (0x2F85, 'M', '臼'), + (0x2F86, 'M', '舌'), + (0x2F87, 'M', '舛'), + (0x2F88, 'M', '舟'), + (0x2F89, 'M', '艮'), + (0x2F8A, 'M', '色'), + (0x2F8B, 'M', '艸'), + (0x2F8C, 'M', '虍'), + (0x2F8D, 'M', '虫'), + (0x2F8E, 'M', '血'), + (0x2F8F, 'M', '行'), + (0x2F90, 'M', '衣'), + (0x2F91, 'M', '襾'), + (0x2F92, 'M', '見'), + (0x2F93, 'M', '角'), + (0x2F94, 'M', '言'), + (0x2F95, 'M', '谷'), + (0x2F96, 'M', '豆'), + (0x2F97, 'M', '豕'), + (0x2F98, 'M', '豸'), + (0x2F99, 'M', '貝'), + (0x2F9A, 'M', '赤'), + (0x2F9B, 'M', '走'), + (0x2F9C, 'M', '足'), + (0x2F9D, 'M', '身'), + (0x2F9E, 'M', '車'), + (0x2F9F, 'M', '辛'), + (0x2FA0, 'M', '辰'), + (0x2FA1, 'M', '辵'), + (0x2FA2, 'M', '邑'), + (0x2FA3, 'M', '酉'), + (0x2FA4, 'M', '釆'), + (0x2FA5, 'M', '里'), + (0x2FA6, 'M', '金'), + (0x2FA7, 'M', '長'), + (0x2FA8, 'M', '門'), + (0x2FA9, 'M', '阜'), + (0x2FAA, 'M', '隶'), + (0x2FAB, 'M', '隹'), + (0x2FAC, 'M', '雨'), + (0x2FAD, 'M', '靑'), + (0x2FAE, 'M', '非'), + (0x2FAF, 'M', '面'), + (0x2FB0, 'M', '革'), + (0x2FB1, 'M', '韋'), + (0x2FB2, 'M', '韭'), + (0x2FB3, 'M', '音'), + (0x2FB4, 'M', '頁'), + (0x2FB5, 'M', '風'), + (0x2FB6, 'M', '飛'), + (0x2FB7, 'M', '食'), + (0x2FB8, 'M', '首'), + (0x2FB9, 'M', '香'), + (0x2FBA, 'M', '馬'), + (0x2FBB, 'M', '骨'), + (0x2FBC, 'M', '高'), + (0x2FBD, 'M', '髟'), + (0x2FBE, 'M', '鬥'), + (0x2FBF, 'M', '鬯'), + (0x2FC0, 'M', '鬲'), + (0x2FC1, 'M', '鬼'), + (0x2FC2, 'M', '魚'), + (0x2FC3, 'M', '鳥'), + (0x2FC4, 'M', '鹵'), + (0x2FC5, 'M', '鹿'), + (0x2FC6, 'M', '麥'), + (0x2FC7, 'M', '麻'), + (0x2FC8, 'M', '黃'), + (0x2FC9, 'M', '黍'), + (0x2FCA, 'M', '黑'), + (0x2FCB, 'M', '黹'), + (0x2FCC, 'M', '黽'), + (0x2FCD, 'M', '鼎'), + (0x2FCE, 'M', '鼓'), + (0x2FCF, 'M', '鼠'), + (0x2FD0, 'M', '鼻'), + (0x2FD1, 'M', '齊'), + (0x2FD2, 'M', '齒'), + (0x2FD3, 'M', '龍'), + (0x2FD4, 'M', '龜'), + (0x2FD5, 'M', '龠'), (0x2FD6, 'X'), - (0x3000, '3', u' '), + (0x3000, '3', ' '), (0x3001, 'V'), - (0x3002, 'M', u'.'), + (0x3002, 'M', '.'), (0x3003, 'V'), - (0x3036, 'M', u'〒'), + (0x3036, 'M', '〒'), (0x3037, 'V'), - (0x3038, 'M', u'十'), - (0x3039, 'M', u'卄'), - (0x303A, 'M', u'卅'), + (0x3038, 'M', '十'), + ] + +def _seg_29() -> List[Union[Tuple[int, str], Tuple[int, str, str]]]: + return [ + (0x3039, 'M', '卄'), + (0x303A, 'M', '卅'), (0x303B, 'V'), (0x3040, 'X'), (0x3041, 'V'), (0x3097, 'X'), (0x3099, 'V'), - (0x309B, '3', u' ゙'), - (0x309C, '3', u' ゚'), + (0x309B, '3', ' ゙'), + (0x309C, '3', ' ゚'), (0x309D, 'V'), - (0x309F, 'M', u'より'), + (0x309F, 'M', 'より'), (0x30A0, 'V'), - (0x30FF, 'M', u'コト'), + (0x30FF, 'M', 'コト'), (0x3100, 'X'), (0x3105, 'V'), (0x3130, 'X'), - (0x3131, 'M', u'ᄀ'), - (0x3132, 'M', u'ᄁ'), - (0x3133, 'M', u'ᆪ'), - (0x3134, 'M', u'ᄂ'), - (0x3135, 'M', u'ᆬ'), - (0x3136, 'M', u'ᆭ'), - (0x3137, 'M', u'ᄃ'), - (0x3138, 'M', u'ᄄ'), + (0x3131, 'M', 'ᄀ'), + (0x3132, 'M', 'ᄁ'), + (0x3133, 'M', 'ᆪ'), + (0x3134, 'M', 'ᄂ'), + (0x3135, 'M', 'ᆬ'), + (0x3136, 'M', 'ᆭ'), + (0x3137, 'M', 'ᄃ'), + (0x3138, 'M', 'ᄄ'), + (0x3139, 'M', 'ᄅ'), + (0x313A, 'M', 'ᆰ'), + (0x313B, 'M', 'ᆱ'), + (0x313C, 'M', 'ᆲ'), + (0x313D, 'M', 'ᆳ'), + (0x313E, 'M', 'ᆴ'), + (0x313F, 'M', 'ᆵ'), + (0x3140, 'M', 'ᄚ'), + (0x3141, 'M', 'ᄆ'), + (0x3142, 'M', 'ᄇ'), + (0x3143, 'M', 'ᄈ'), + (0x3144, 'M', 'ᄡ'), + (0x3145, 'M', 'ᄉ'), + (0x3146, 'M', 'ᄊ'), + (0x3147, 'M', 'ᄋ'), + (0x3148, 'M', 'ᄌ'), + (0x3149, 'M', 'ᄍ'), + (0x314A, 'M', 'ᄎ'), + (0x314B, 'M', 'ᄏ'), + (0x314C, 'M', 'ᄐ'), + (0x314D, 'M', 'ᄑ'), + (0x314E, 'M', 'ᄒ'), + (0x314F, 'M', 'ᅡ'), + (0x3150, 'M', 'ᅢ'), + (0x3151, 'M', 'ᅣ'), + (0x3152, 'M', 'ᅤ'), + (0x3153, 'M', 'ᅥ'), + (0x3154, 'M', 'ᅦ'), + (0x3155, 'M', 'ᅧ'), + (0x3156, 'M', 'ᅨ'), + (0x3157, 'M', 'ᅩ'), + (0x3158, 'M', 'ᅪ'), + (0x3159, 'M', 'ᅫ'), + (0x315A, 'M', 'ᅬ'), + (0x315B, 'M', 'ᅭ'), + (0x315C, 'M', 'ᅮ'), + (0x315D, 'M', 'ᅯ'), + (0x315E, 'M', 'ᅰ'), + (0x315F, 'M', 'ᅱ'), + (0x3160, 'M', 'ᅲ'), + (0x3161, 'M', 'ᅳ'), + (0x3162, 'M', 'ᅴ'), + (0x3163, 'M', 'ᅵ'), + (0x3164, 'X'), + (0x3165, 'M', 'ᄔ'), + (0x3166, 'M', 'ᄕ'), + (0x3167, 'M', 'ᇇ'), + (0x3168, 'M', 'ᇈ'), + (0x3169, 'M', 'ᇌ'), + (0x316A, 'M', 'ᇎ'), + (0x316B, 'M', 'ᇓ'), + (0x316C, 'M', 'ᇗ'), + (0x316D, 'M', 'ᇙ'), + (0x316E, 'M', 'ᄜ'), + (0x316F, 'M', 'ᇝ'), + (0x3170, 'M', 'ᇟ'), + (0x3171, 'M', 'ᄝ'), + (0x3172, 'M', 'ᄞ'), + (0x3173, 'M', 'ᄠ'), + (0x3174, 'M', 'ᄢ'), + (0x3175, 'M', 'ᄣ'), + (0x3176, 'M', 'ᄧ'), + (0x3177, 'M', 'ᄩ'), + (0x3178, 'M', 'ᄫ'), + (0x3179, 'M', 'ᄬ'), + (0x317A, 'M', 'ᄭ'), + (0x317B, 'M', 'ᄮ'), + (0x317C, 'M', 'ᄯ'), + (0x317D, 'M', 'ᄲ'), + (0x317E, 'M', 'ᄶ'), + (0x317F, 'M', 'ᅀ'), + (0x3180, 'M', 'ᅇ'), + (0x3181, 'M', 'ᅌ'), + (0x3182, 'M', 'ᇱ'), + (0x3183, 'M', 'ᇲ'), + (0x3184, 'M', 'ᅗ'), ] -def _seg_29(): +def _seg_30() -> List[Union[Tuple[int, str], Tuple[int, str, str]]]: return [ - (0x3139, 'M', u'ᄅ'), - (0x313A, 'M', u'ᆰ'), - (0x313B, 'M', u'ᆱ'), - (0x313C, 'M', u'ᆲ'), - (0x313D, 'M', u'ᆳ'), - (0x313E, 'M', u'ᆴ'), - (0x313F, 'M', u'ᆵ'), - (0x3140, 'M', u'ᄚ'), - (0x3141, 'M', u'ᄆ'), - (0x3142, 'M', u'ᄇ'), - (0x3143, 'M', u'ᄈ'), - (0x3144, 'M', u'ᄡ'), - (0x3145, 'M', u'ᄉ'), - (0x3146, 'M', u'ᄊ'), - (0x3147, 'M', u'ᄋ'), - (0x3148, 'M', u'ᄌ'), - (0x3149, 'M', u'ᄍ'), - (0x314A, 'M', u'ᄎ'), - (0x314B, 'M', u'ᄏ'), - (0x314C, 'M', u'ᄐ'), - (0x314D, 'M', u'ᄑ'), - (0x314E, 'M', u'ᄒ'), - (0x314F, 'M', u'ᅡ'), - (0x3150, 'M', u'ᅢ'), - (0x3151, 'M', u'ᅣ'), - (0x3152, 'M', u'ᅤ'), - (0x3153, 'M', u'ᅥ'), - (0x3154, 'M', u'ᅦ'), - (0x3155, 'M', u'ᅧ'), - (0x3156, 'M', u'ᅨ'), - (0x3157, 'M', u'ᅩ'), - (0x3158, 'M', u'ᅪ'), - (0x3159, 'M', u'ᅫ'), - (0x315A, 'M', u'ᅬ'), - (0x315B, 'M', u'ᅭ'), - (0x315C, 'M', u'ᅮ'), - (0x315D, 'M', u'ᅯ'), - (0x315E, 'M', u'ᅰ'), - (0x315F, 'M', u'ᅱ'), - (0x3160, 'M', u'ᅲ'), - (0x3161, 'M', u'ᅳ'), - (0x3162, 'M', u'ᅴ'), - (0x3163, 'M', u'ᅵ'), - (0x3164, 'X'), - (0x3165, 'M', u'ᄔ'), - (0x3166, 'M', u'ᄕ'), - (0x3167, 'M', u'ᇇ'), - (0x3168, 'M', u'ᇈ'), - (0x3169, 'M', u'ᇌ'), - (0x316A, 'M', u'ᇎ'), - (0x316B, 'M', u'ᇓ'), - (0x316C, 'M', u'ᇗ'), - (0x316D, 'M', u'ᇙ'), - (0x316E, 'M', u'ᄜ'), - (0x316F, 'M', u'ᇝ'), - (0x3170, 'M', u'ᇟ'), - (0x3171, 'M', u'ᄝ'), - (0x3172, 'M', u'ᄞ'), - (0x3173, 'M', u'ᄠ'), - (0x3174, 'M', u'ᄢ'), - (0x3175, 'M', u'ᄣ'), - (0x3176, 'M', u'ᄧ'), - (0x3177, 'M', u'ᄩ'), - (0x3178, 'M', u'ᄫ'), - (0x3179, 'M', u'ᄬ'), - (0x317A, 'M', u'ᄭ'), - (0x317B, 'M', u'ᄮ'), - (0x317C, 'M', u'ᄯ'), - (0x317D, 'M', u'ᄲ'), - (0x317E, 'M', u'ᄶ'), - (0x317F, 'M', u'ᅀ'), - (0x3180, 'M', u'ᅇ'), - (0x3181, 'M', u'ᅌ'), - (0x3182, 'M', u'ᇱ'), - (0x3183, 'M', u'ᇲ'), - (0x3184, 'M', u'ᅗ'), - (0x3185, 'M', u'ᅘ'), - (0x3186, 'M', u'ᅙ'), - (0x3187, 'M', u'ᆄ'), - (0x3188, 'M', u'ᆅ'), - (0x3189, 'M', u'ᆈ'), - (0x318A, 'M', u'ᆑ'), - (0x318B, 'M', u'ᆒ'), - (0x318C, 'M', u'ᆔ'), - (0x318D, 'M', u'ᆞ'), - (0x318E, 'M', u'ᆡ'), + (0x3185, 'M', 'ᅘ'), + (0x3186, 'M', 'ᅙ'), + (0x3187, 'M', 'ᆄ'), + (0x3188, 'M', 'ᆅ'), + (0x3189, 'M', 'ᆈ'), + (0x318A, 'M', 'ᆑ'), + (0x318B, 'M', 'ᆒ'), + (0x318C, 'M', 'ᆔ'), + (0x318D, 'M', 'ᆞ'), + (0x318E, 'M', 'ᆡ'), (0x318F, 'X'), (0x3190, 'V'), - (0x3192, 'M', u'一'), - (0x3193, 'M', u'二'), - (0x3194, 'M', u'三'), - (0x3195, 'M', u'四'), - (0x3196, 'M', u'上'), - (0x3197, 'M', u'中'), - (0x3198, 'M', u'下'), - (0x3199, 'M', u'甲'), - (0x319A, 'M', u'乙'), - (0x319B, 'M', u'丙'), - (0x319C, 'M', u'丁'), - (0x319D, 'M', u'天'), - ] - -def _seg_30(): - return [ - (0x319E, 'M', u'地'), - (0x319F, 'M', u'人'), + (0x3192, 'M', '一'), + (0x3193, 'M', '二'), + (0x3194, 'M', '三'), + (0x3195, 'M', '四'), + (0x3196, 'M', '上'), + (0x3197, 'M', '中'), + (0x3198, 'M', '下'), + (0x3199, 'M', '甲'), + (0x319A, 'M', '乙'), + (0x319B, 'M', '丙'), + (0x319C, 'M', '丁'), + (0x319D, 'M', '天'), + (0x319E, 'M', '地'), + (0x319F, 'M', '人'), (0x31A0, 'V'), - (0x31BB, 'X'), - (0x31C0, 'V'), (0x31E4, 'X'), (0x31F0, 'V'), - (0x3200, '3', u'(ᄀ)'), - (0x3201, '3', u'(ᄂ)'), - (0x3202, '3', u'(ᄃ)'), - (0x3203, '3', u'(ᄅ)'), - (0x3204, '3', u'(ᄆ)'), - (0x3205, '3', u'(ᄇ)'), - (0x3206, '3', u'(ᄉ)'), - (0x3207, '3', u'(ᄋ)'), - (0x3208, '3', u'(ᄌ)'), - (0x3209, '3', u'(ᄎ)'), - (0x320A, '3', u'(ᄏ)'), - (0x320B, '3', u'(ᄐ)'), - (0x320C, '3', u'(ᄑ)'), - (0x320D, '3', u'(ᄒ)'), - (0x320E, '3', u'(가)'), - (0x320F, '3', u'(나)'), - (0x3210, '3', u'(다)'), - (0x3211, '3', u'(라)'), - (0x3212, '3', u'(마)'), - (0x3213, '3', u'(바)'), - (0x3214, '3', u'(사)'), - (0x3215, '3', u'(아)'), - (0x3216, '3', u'(자)'), - (0x3217, '3', u'(차)'), - (0x3218, '3', u'(카)'), - (0x3219, '3', u'(타)'), - (0x321A, '3', u'(파)'), - (0x321B, '3', u'(하)'), - (0x321C, '3', u'(주)'), - (0x321D, '3', u'(오전)'), - (0x321E, '3', u'(오후)'), + (0x3200, '3', '(ᄀ)'), + (0x3201, '3', '(ᄂ)'), + (0x3202, '3', '(ᄃ)'), + (0x3203, '3', '(ᄅ)'), + (0x3204, '3', '(ᄆ)'), + (0x3205, '3', '(ᄇ)'), + (0x3206, '3', '(ᄉ)'), + (0x3207, '3', '(ᄋ)'), + (0x3208, '3', '(ᄌ)'), + (0x3209, '3', '(ᄎ)'), + (0x320A, '3', '(ᄏ)'), + (0x320B, '3', '(ᄐ)'), + (0x320C, '3', '(ᄑ)'), + (0x320D, '3', '(ᄒ)'), + (0x320E, '3', '(가)'), + (0x320F, '3', '(나)'), + (0x3210, '3', '(다)'), + (0x3211, '3', '(라)'), + (0x3212, '3', '(마)'), + (0x3213, '3', '(바)'), + (0x3214, '3', '(사)'), + (0x3215, '3', '(아)'), + (0x3216, '3', '(자)'), + (0x3217, '3', '(차)'), + (0x3218, '3', '(카)'), + (0x3219, '3', '(타)'), + (0x321A, '3', '(파)'), + (0x321B, '3', '(하)'), + (0x321C, '3', '(주)'), + (0x321D, '3', '(오전)'), + (0x321E, '3', '(오후)'), (0x321F, 'X'), - (0x3220, '3', u'(一)'), - (0x3221, '3', u'(二)'), - (0x3222, '3', u'(三)'), - (0x3223, '3', u'(四)'), - (0x3224, '3', u'(五)'), - (0x3225, '3', u'(六)'), - (0x3226, '3', u'(七)'), - (0x3227, '3', u'(八)'), - (0x3228, '3', u'(九)'), - (0x3229, '3', u'(十)'), - (0x322A, '3', u'(月)'), - (0x322B, '3', u'(火)'), - (0x322C, '3', u'(水)'), - (0x322D, '3', u'(木)'), - (0x322E, '3', u'(金)'), - (0x322F, '3', u'(土)'), - (0x3230, '3', u'(日)'), - (0x3231, '3', u'(株)'), - (0x3232, '3', u'(有)'), - (0x3233, '3', u'(社)'), - (0x3234, '3', u'(名)'), - (0x3235, '3', u'(特)'), - (0x3236, '3', u'(財)'), - (0x3237, '3', u'(祝)'), - (0x3238, '3', u'(労)'), - (0x3239, '3', u'(代)'), - (0x323A, '3', u'(呼)'), - (0x323B, '3', u'(学)'), - (0x323C, '3', u'(監)'), - (0x323D, '3', u'(企)'), - (0x323E, '3', u'(資)'), - (0x323F, '3', u'(協)'), - (0x3240, '3', u'(祭)'), - (0x3241, '3', u'(休)'), - (0x3242, '3', u'(自)'), - (0x3243, '3', u'(至)'), - (0x3244, 'M', u'問'), - (0x3245, 'M', u'幼'), - (0x3246, 'M', u'文'), - (0x3247, 'M', u'箏'), - (0x3248, 'V'), - (0x3250, 'M', u'pte'), - (0x3251, 'M', u'21'), - (0x3252, 'M', u'22'), - (0x3253, 'M', u'23'), - (0x3254, 'M', u'24'), - (0x3255, 'M', u'25'), - (0x3256, 'M', u'26'), - (0x3257, 'M', u'27'), - (0x3258, 'M', u'28'), - (0x3259, 'M', u'29'), - (0x325A, 'M', u'30'), - (0x325B, 'M', u'31'), - (0x325C, 'M', u'32'), - (0x325D, 'M', u'33'), - (0x325E, 'M', u'34'), - (0x325F, 'M', u'35'), - (0x3260, 'M', u'ᄀ'), - (0x3261, 'M', u'ᄂ'), - (0x3262, 'M', u'ᄃ'), - (0x3263, 'M', u'ᄅ'), + (0x3220, '3', '(一)'), + (0x3221, '3', '(二)'), + (0x3222, '3', '(三)'), + (0x3223, '3', '(四)'), + (0x3224, '3', '(五)'), + (0x3225, '3', '(六)'), + (0x3226, '3', '(七)'), + (0x3227, '3', '(八)'), + (0x3228, '3', '(九)'), + (0x3229, '3', '(十)'), + (0x322A, '3', '(月)'), + (0x322B, '3', '(火)'), + (0x322C, '3', '(水)'), + (0x322D, '3', '(木)'), + (0x322E, '3', '(金)'), + (0x322F, '3', '(土)'), + (0x3230, '3', '(日)'), + (0x3231, '3', '(株)'), + (0x3232, '3', '(有)'), + (0x3233, '3', '(社)'), + (0x3234, '3', '(名)'), + (0x3235, '3', '(特)'), + (0x3236, '3', '(財)'), + (0x3237, '3', '(祝)'), + (0x3238, '3', '(労)'), + (0x3239, '3', '(代)'), + (0x323A, '3', '(呼)'), + (0x323B, '3', '(学)'), + (0x323C, '3', '(監)'), + (0x323D, '3', '(企)'), + (0x323E, '3', '(資)'), + (0x323F, '3', '(協)'), + (0x3240, '3', '(祭)'), + (0x3241, '3', '(休)'), + (0x3242, '3', '(自)'), + (0x3243, '3', '(至)'), + (0x3244, 'M', '問'), + (0x3245, 'M', '幼'), + (0x3246, 'M', '文'), ] -def _seg_31(): +def _seg_31() -> List[Union[Tuple[int, str], Tuple[int, str, str]]]: return [ - (0x3264, 'M', u'ᄆ'), - (0x3265, 'M', u'ᄇ'), - (0x3266, 'M', u'ᄉ'), - (0x3267, 'M', u'ᄋ'), - (0x3268, 'M', u'ᄌ'), - (0x3269, 'M', u'ᄎ'), - (0x326A, 'M', u'ᄏ'), - (0x326B, 'M', u'ᄐ'), - (0x326C, 'M', u'ᄑ'), - (0x326D, 'M', u'ᄒ'), - (0x326E, 'M', u'가'), - (0x326F, 'M', u'나'), - (0x3270, 'M', u'다'), - (0x3271, 'M', u'라'), - (0x3272, 'M', u'마'), - (0x3273, 'M', u'바'), - (0x3274, 'M', u'사'), - (0x3275, 'M', u'아'), - (0x3276, 'M', u'자'), - (0x3277, 'M', u'차'), - (0x3278, 'M', u'카'), - (0x3279, 'M', u'타'), - (0x327A, 'M', u'파'), - (0x327B, 'M', u'하'), - (0x327C, 'M', u'참고'), - (0x327D, 'M', u'주의'), - (0x327E, 'M', u'우'), + (0x3247, 'M', '箏'), + (0x3248, 'V'), + (0x3250, 'M', 'pte'), + (0x3251, 'M', '21'), + (0x3252, 'M', '22'), + (0x3253, 'M', '23'), + (0x3254, 'M', '24'), + (0x3255, 'M', '25'), + (0x3256, 'M', '26'), + (0x3257, 'M', '27'), + (0x3258, 'M', '28'), + (0x3259, 'M', '29'), + (0x325A, 'M', '30'), + (0x325B, 'M', '31'), + (0x325C, 'M', '32'), + (0x325D, 'M', '33'), + (0x325E, 'M', '34'), + (0x325F, 'M', '35'), + (0x3260, 'M', 'ᄀ'), + (0x3261, 'M', 'ᄂ'), + (0x3262, 'M', 'ᄃ'), + (0x3263, 'M', 'ᄅ'), + (0x3264, 'M', 'ᄆ'), + (0x3265, 'M', 'ᄇ'), + (0x3266, 'M', 'ᄉ'), + (0x3267, 'M', 'ᄋ'), + (0x3268, 'M', 'ᄌ'), + (0x3269, 'M', 'ᄎ'), + (0x326A, 'M', 'ᄏ'), + (0x326B, 'M', 'ᄐ'), + (0x326C, 'M', 'ᄑ'), + (0x326D, 'M', 'ᄒ'), + (0x326E, 'M', '가'), + (0x326F, 'M', '나'), + (0x3270, 'M', '다'), + (0x3271, 'M', '라'), + (0x3272, 'M', '마'), + (0x3273, 'M', '바'), + (0x3274, 'M', '사'), + (0x3275, 'M', '아'), + (0x3276, 'M', '자'), + (0x3277, 'M', '차'), + (0x3278, 'M', '카'), + (0x3279, 'M', '타'), + (0x327A, 'M', '파'), + (0x327B, 'M', '하'), + (0x327C, 'M', '참고'), + (0x327D, 'M', '주의'), + (0x327E, 'M', '우'), (0x327F, 'V'), - (0x3280, 'M', u'一'), - (0x3281, 'M', u'二'), - (0x3282, 'M', u'三'), - (0x3283, 'M', u'四'), - (0x3284, 'M', u'五'), - (0x3285, 'M', u'六'), - (0x3286, 'M', u'七'), - (0x3287, 'M', u'八'), - (0x3288, 'M', u'九'), - (0x3289, 'M', u'十'), - (0x328A, 'M', u'月'), - (0x328B, 'M', u'火'), - (0x328C, 'M', u'水'), - (0x328D, 'M', u'木'), - (0x328E, 'M', u'金'), - (0x328F, 'M', u'土'), - (0x3290, 'M', u'日'), - (0x3291, 'M', u'株'), - (0x3292, 'M', u'有'), - (0x3293, 'M', u'社'), - (0x3294, 'M', u'名'), - (0x3295, 'M', u'特'), - (0x3296, 'M', u'財'), - (0x3297, 'M', u'祝'), - (0x3298, 'M', u'労'), - (0x3299, 'M', u'秘'), - (0x329A, 'M', u'男'), - (0x329B, 'M', u'女'), - (0x329C, 'M', u'適'), - (0x329D, 'M', u'優'), - (0x329E, 'M', u'印'), - (0x329F, 'M', u'注'), - (0x32A0, 'M', u'項'), - (0x32A1, 'M', u'休'), - (0x32A2, 'M', u'写'), - (0x32A3, 'M', u'正'), - (0x32A4, 'M', u'上'), - (0x32A5, 'M', u'中'), - (0x32A6, 'M', u'下'), - (0x32A7, 'M', u'左'), - (0x32A8, 'M', u'右'), - (0x32A9, 'M', u'医'), - (0x32AA, 'M', u'宗'), - (0x32AB, 'M', u'学'), - (0x32AC, 'M', u'監'), - (0x32AD, 'M', u'企'), - (0x32AE, 'M', u'資'), - (0x32AF, 'M', u'協'), - (0x32B0, 'M', u'夜'), - (0x32B1, 'M', u'36'), - (0x32B2, 'M', u'37'), - (0x32B3, 'M', u'38'), - (0x32B4, 'M', u'39'), - (0x32B5, 'M', u'40'), - (0x32B6, 'M', u'41'), - (0x32B7, 'M', u'42'), - (0x32B8, 'M', u'43'), - (0x32B9, 'M', u'44'), - (0x32BA, 'M', u'45'), - (0x32BB, 'M', u'46'), - (0x32BC, 'M', u'47'), - (0x32BD, 'M', u'48'), - (0x32BE, 'M', u'49'), - (0x32BF, 'M', u'50'), - (0x32C0, 'M', u'1月'), - (0x32C1, 'M', u'2月'), - (0x32C2, 'M', u'3月'), - (0x32C3, 'M', u'4月'), - (0x32C4, 'M', u'5月'), - (0x32C5, 'M', u'6月'), - (0x32C6, 'M', u'7月'), - (0x32C7, 'M', u'8月'), + (0x3280, 'M', '一'), + (0x3281, 'M', '二'), + (0x3282, 'M', '三'), + (0x3283, 'M', '四'), + (0x3284, 'M', '五'), + (0x3285, 'M', '六'), + (0x3286, 'M', '七'), + (0x3287, 'M', '八'), + (0x3288, 'M', '九'), + (0x3289, 'M', '十'), + (0x328A, 'M', '月'), + (0x328B, 'M', '火'), + (0x328C, 'M', '水'), + (0x328D, 'M', '木'), + (0x328E, 'M', '金'), + (0x328F, 'M', '土'), + (0x3290, 'M', '日'), + (0x3291, 'M', '株'), + (0x3292, 'M', '有'), + (0x3293, 'M', '社'), + (0x3294, 'M', '名'), + (0x3295, 'M', '特'), + (0x3296, 'M', '財'), + (0x3297, 'M', '祝'), + (0x3298, 'M', '労'), + (0x3299, 'M', '秘'), + (0x329A, 'M', '男'), + (0x329B, 'M', '女'), + (0x329C, 'M', '適'), + (0x329D, 'M', '優'), + (0x329E, 'M', '印'), + (0x329F, 'M', '注'), + (0x32A0, 'M', '項'), + (0x32A1, 'M', '休'), + (0x32A2, 'M', '写'), + (0x32A3, 'M', '正'), + (0x32A4, 'M', '上'), + (0x32A5, 'M', '中'), + (0x32A6, 'M', '下'), + (0x32A7, 'M', '左'), + (0x32A8, 'M', '右'), + (0x32A9, 'M', '医'), + (0x32AA, 'M', '宗'), + (0x32AB, 'M', '学'), + (0x32AC, 'M', '監'), + (0x32AD, 'M', '企'), + (0x32AE, 'M', '資'), + (0x32AF, 'M', '協'), + (0x32B0, 'M', '夜'), + (0x32B1, 'M', '36'), ] -def _seg_32(): +def _seg_32() -> List[Union[Tuple[int, str], Tuple[int, str, str]]]: return [ - (0x32C8, 'M', u'9月'), - (0x32C9, 'M', u'10月'), - (0x32CA, 'M', u'11月'), - (0x32CB, 'M', u'12月'), - (0x32CC, 'M', u'hg'), - (0x32CD, 'M', u'erg'), - (0x32CE, 'M', u'ev'), - (0x32CF, 'M', u'ltd'), - (0x32D0, 'M', u'ア'), - (0x32D1, 'M', u'イ'), - (0x32D2, 'M', u'ウ'), - (0x32D3, 'M', u'エ'), - (0x32D4, 'M', u'オ'), - (0x32D5, 'M', u'カ'), - (0x32D6, 'M', u'キ'), - (0x32D7, 'M', u'ク'), - (0x32D8, 'M', u'ケ'), - (0x32D9, 'M', u'コ'), - (0x32DA, 'M', u'サ'), - (0x32DB, 'M', u'シ'), - (0x32DC, 'M', u'ス'), - (0x32DD, 'M', u'セ'), - (0x32DE, 'M', u'ソ'), - (0x32DF, 'M', u'タ'), - (0x32E0, 'M', u'チ'), - (0x32E1, 'M', u'ツ'), - (0x32E2, 'M', u'テ'), - (0x32E3, 'M', u'ト'), - (0x32E4, 'M', u'ナ'), - (0x32E5, 'M', u'ニ'), - (0x32E6, 'M', u'ヌ'), - (0x32E7, 'M', u'ネ'), - (0x32E8, 'M', u'ノ'), - (0x32E9, 'M', u'ハ'), - (0x32EA, 'M', u'ヒ'), - (0x32EB, 'M', u'フ'), - (0x32EC, 'M', u'ヘ'), - (0x32ED, 'M', u'ホ'), - (0x32EE, 'M', u'マ'), - (0x32EF, 'M', u'ミ'), - (0x32F0, 'M', u'ム'), - (0x32F1, 'M', u'メ'), - (0x32F2, 'M', u'モ'), - (0x32F3, 'M', u'ヤ'), - (0x32F4, 'M', u'ユ'), - (0x32F5, 'M', u'ヨ'), - (0x32F6, 'M', u'ラ'), - (0x32F7, 'M', u'リ'), - (0x32F8, 'M', u'ル'), - (0x32F9, 'M', u'レ'), - (0x32FA, 'M', u'ロ'), - (0x32FB, 'M', u'ワ'), - (0x32FC, 'M', u'ヰ'), - (0x32FD, 'M', u'ヱ'), - (0x32FE, 'M', u'ヲ'), - (0x32FF, 'X'), - (0x3300, 'M', u'アパート'), - (0x3301, 'M', u'アルファ'), - (0x3302, 'M', u'アンペア'), - (0x3303, 'M', u'アール'), - (0x3304, 'M', u'イニング'), - (0x3305, 'M', u'インチ'), - (0x3306, 'M', u'ウォン'), - (0x3307, 'M', u'エスクード'), - (0x3308, 'M', u'エーカー'), - (0x3309, 'M', u'オンス'), - (0x330A, 'M', u'オーム'), - (0x330B, 'M', u'カイリ'), - (0x330C, 'M', u'カラット'), - (0x330D, 'M', u'カロリー'), - (0x330E, 'M', u'ガロン'), - (0x330F, 'M', u'ガンマ'), - (0x3310, 'M', u'ギガ'), - (0x3311, 'M', u'ギニー'), - (0x3312, 'M', u'キュリー'), - (0x3313, 'M', u'ギルダー'), - (0x3314, 'M', u'キロ'), - (0x3315, 'M', u'キログラム'), - (0x3316, 'M', u'キロメートル'), - (0x3317, 'M', u'キロワット'), - (0x3318, 'M', u'グラム'), - (0x3319, 'M', u'グラムトン'), - (0x331A, 'M', u'クルゼイロ'), - (0x331B, 'M', u'クローネ'), - (0x331C, 'M', u'ケース'), - (0x331D, 'M', u'コルナ'), - (0x331E, 'M', u'コーポ'), - (0x331F, 'M', u'サイクル'), - (0x3320, 'M', u'サンチーム'), - (0x3321, 'M', u'シリング'), - (0x3322, 'M', u'センチ'), - (0x3323, 'M', u'セント'), - (0x3324, 'M', u'ダース'), - (0x3325, 'M', u'デシ'), - (0x3326, 'M', u'ドル'), - (0x3327, 'M', u'トン'), - (0x3328, 'M', u'ナノ'), - (0x3329, 'M', u'ノット'), - (0x332A, 'M', u'ハイツ'), - (0x332B, 'M', u'パーセント'), + (0x32B2, 'M', '37'), + (0x32B3, 'M', '38'), + (0x32B4, 'M', '39'), + (0x32B5, 'M', '40'), + (0x32B6, 'M', '41'), + (0x32B7, 'M', '42'), + (0x32B8, 'M', '43'), + (0x32B9, 'M', '44'), + (0x32BA, 'M', '45'), + (0x32BB, 'M', '46'), + (0x32BC, 'M', '47'), + (0x32BD, 'M', '48'), + (0x32BE, 'M', '49'), + (0x32BF, 'M', '50'), + (0x32C0, 'M', '1月'), + (0x32C1, 'M', '2月'), + (0x32C2, 'M', '3月'), + (0x32C3, 'M', '4月'), + (0x32C4, 'M', '5月'), + (0x32C5, 'M', '6月'), + (0x32C6, 'M', '7月'), + (0x32C7, 'M', '8月'), + (0x32C8, 'M', '9月'), + (0x32C9, 'M', '10月'), + (0x32CA, 'M', '11月'), + (0x32CB, 'M', '12月'), + (0x32CC, 'M', 'hg'), + (0x32CD, 'M', 'erg'), + (0x32CE, 'M', 'ev'), + (0x32CF, 'M', 'ltd'), + (0x32D0, 'M', 'ア'), + (0x32D1, 'M', 'イ'), + (0x32D2, 'M', 'ウ'), + (0x32D3, 'M', 'エ'), + (0x32D4, 'M', 'オ'), + (0x32D5, 'M', 'カ'), + (0x32D6, 'M', 'キ'), + (0x32D7, 'M', 'ク'), + (0x32D8, 'M', 'ケ'), + (0x32D9, 'M', 'コ'), + (0x32DA, 'M', 'サ'), + (0x32DB, 'M', 'シ'), + (0x32DC, 'M', 'ス'), + (0x32DD, 'M', 'セ'), + (0x32DE, 'M', 'ソ'), + (0x32DF, 'M', 'タ'), + (0x32E0, 'M', 'チ'), + (0x32E1, 'M', 'ツ'), + (0x32E2, 'M', 'テ'), + (0x32E3, 'M', 'ト'), + (0x32E4, 'M', 'ナ'), + (0x32E5, 'M', 'ニ'), + (0x32E6, 'M', 'ヌ'), + (0x32E7, 'M', 'ネ'), + (0x32E8, 'M', 'ノ'), + (0x32E9, 'M', 'ハ'), + (0x32EA, 'M', 'ヒ'), + (0x32EB, 'M', 'フ'), + (0x32EC, 'M', 'ヘ'), + (0x32ED, 'M', 'ホ'), + (0x32EE, 'M', 'マ'), + (0x32EF, 'M', 'ミ'), + (0x32F0, 'M', 'ム'), + (0x32F1, 'M', 'メ'), + (0x32F2, 'M', 'モ'), + (0x32F3, 'M', 'ヤ'), + (0x32F4, 'M', 'ユ'), + (0x32F5, 'M', 'ヨ'), + (0x32F6, 'M', 'ラ'), + (0x32F7, 'M', 'リ'), + (0x32F8, 'M', 'ル'), + (0x32F9, 'M', 'レ'), + (0x32FA, 'M', 'ロ'), + (0x32FB, 'M', 'ワ'), + (0x32FC, 'M', 'ヰ'), + (0x32FD, 'M', 'ヱ'), + (0x32FE, 'M', 'ヲ'), + (0x32FF, 'M', '令和'), + (0x3300, 'M', 'アパート'), + (0x3301, 'M', 'アルファ'), + (0x3302, 'M', 'アンペア'), + (0x3303, 'M', 'アール'), + (0x3304, 'M', 'イニング'), + (0x3305, 'M', 'インチ'), + (0x3306, 'M', 'ウォン'), + (0x3307, 'M', 'エスクード'), + (0x3308, 'M', 'エーカー'), + (0x3309, 'M', 'オンス'), + (0x330A, 'M', 'オーム'), + (0x330B, 'M', 'カイリ'), + (0x330C, 'M', 'カラット'), + (0x330D, 'M', 'カロリー'), + (0x330E, 'M', 'ガロン'), + (0x330F, 'M', 'ガンマ'), + (0x3310, 'M', 'ギガ'), + (0x3311, 'M', 'ギニー'), + (0x3312, 'M', 'キュリー'), + (0x3313, 'M', 'ギルダー'), + (0x3314, 'M', 'キロ'), + (0x3315, 'M', 'キログラム'), ] -def _seg_33(): +def _seg_33() -> List[Union[Tuple[int, str], Tuple[int, str, str]]]: return [ - (0x332C, 'M', u'パーツ'), - (0x332D, 'M', u'バーレル'), - (0x332E, 'M', u'ピアストル'), - (0x332F, 'M', u'ピクル'), - (0x3330, 'M', u'ピコ'), - (0x3331, 'M', u'ビル'), - (0x3332, 'M', u'ファラッド'), - (0x3333, 'M', u'フィート'), - (0x3334, 'M', u'ブッシェル'), - (0x3335, 'M', u'フラン'), - (0x3336, 'M', u'ヘクタール'), - (0x3337, 'M', u'ペソ'), - (0x3338, 'M', u'ペニヒ'), - (0x3339, 'M', u'ヘルツ'), - (0x333A, 'M', u'ペンス'), - (0x333B, 'M', u'ページ'), - (0x333C, 'M', u'ベータ'), - (0x333D, 'M', u'ポイント'), - (0x333E, 'M', u'ボルト'), - (0x333F, 'M', u'ホン'), - (0x3340, 'M', u'ポンド'), - (0x3341, 'M', u'ホール'), - (0x3342, 'M', u'ホーン'), - (0x3343, 'M', u'マイクロ'), - (0x3344, 'M', u'マイル'), - (0x3345, 'M', u'マッハ'), - (0x3346, 'M', u'マルク'), - (0x3347, 'M', u'マンション'), - (0x3348, 'M', u'ミクロン'), - (0x3349, 'M', u'ミリ'), - (0x334A, 'M', u'ミリバール'), - (0x334B, 'M', u'メガ'), - (0x334C, 'M', u'メガトン'), - (0x334D, 'M', u'メートル'), - (0x334E, 'M', u'ヤード'), - (0x334F, 'M', u'ヤール'), - (0x3350, 'M', u'ユアン'), - (0x3351, 'M', u'リットル'), - (0x3352, 'M', u'リラ'), - (0x3353, 'M', u'ルピー'), - (0x3354, 'M', u'ルーブル'), - (0x3355, 'M', u'レム'), - (0x3356, 'M', u'レントゲン'), - (0x3357, 'M', u'ワット'), - (0x3358, 'M', u'0点'), - (0x3359, 'M', u'1点'), - (0x335A, 'M', u'2点'), - (0x335B, 'M', u'3点'), - (0x335C, 'M', u'4点'), - (0x335D, 'M', u'5点'), - (0x335E, 'M', u'6点'), - (0x335F, 'M', u'7点'), - (0x3360, 'M', u'8点'), - (0x3361, 'M', u'9点'), - (0x3362, 'M', u'10点'), - (0x3363, 'M', u'11点'), - (0x3364, 'M', u'12点'), - (0x3365, 'M', u'13点'), - (0x3366, 'M', u'14点'), - (0x3367, 'M', u'15点'), - (0x3368, 'M', u'16点'), - (0x3369, 'M', u'17点'), - (0x336A, 'M', u'18点'), - (0x336B, 'M', u'19点'), - (0x336C, 'M', u'20点'), - (0x336D, 'M', u'21点'), - (0x336E, 'M', u'22点'), - (0x336F, 'M', u'23点'), - (0x3370, 'M', u'24点'), - (0x3371, 'M', u'hpa'), - (0x3372, 'M', u'da'), - (0x3373, 'M', u'au'), - (0x3374, 'M', u'bar'), - (0x3375, 'M', u'ov'), - (0x3376, 'M', u'pc'), - (0x3377, 'M', u'dm'), - (0x3378, 'M', u'dm2'), - (0x3379, 'M', u'dm3'), - (0x337A, 'M', u'iu'), - (0x337B, 'M', u'平成'), - (0x337C, 'M', u'昭和'), - (0x337D, 'M', u'大正'), - (0x337E, 'M', u'明治'), - (0x337F, 'M', u'株式会社'), - (0x3380, 'M', u'pa'), - (0x3381, 'M', u'na'), - (0x3382, 'M', u'μa'), - (0x3383, 'M', u'ma'), - (0x3384, 'M', u'ka'), - (0x3385, 'M', u'kb'), - (0x3386, 'M', u'mb'), - (0x3387, 'M', u'gb'), - (0x3388, 'M', u'cal'), - (0x3389, 'M', u'kcal'), - (0x338A, 'M', u'pf'), - (0x338B, 'M', u'nf'), - (0x338C, 'M', u'μf'), - (0x338D, 'M', u'μg'), - (0x338E, 'M', u'mg'), - (0x338F, 'M', u'kg'), + (0x3316, 'M', 'キロメートル'), + (0x3317, 'M', 'キロワット'), + (0x3318, 'M', 'グラム'), + (0x3319, 'M', 'グラムトン'), + (0x331A, 'M', 'クルゼイロ'), + (0x331B, 'M', 'クローネ'), + (0x331C, 'M', 'ケース'), + (0x331D, 'M', 'コルナ'), + (0x331E, 'M', 'コーポ'), + (0x331F, 'M', 'サイクル'), + (0x3320, 'M', 'サンチーム'), + (0x3321, 'M', 'シリング'), + (0x3322, 'M', 'センチ'), + (0x3323, 'M', 'セント'), + (0x3324, 'M', 'ダース'), + (0x3325, 'M', 'デシ'), + (0x3326, 'M', 'ドル'), + (0x3327, 'M', 'トン'), + (0x3328, 'M', 'ナノ'), + (0x3329, 'M', 'ノット'), + (0x332A, 'M', 'ハイツ'), + (0x332B, 'M', 'パーセント'), + (0x332C, 'M', 'パーツ'), + (0x332D, 'M', 'バーレル'), + (0x332E, 'M', 'ピアストル'), + (0x332F, 'M', 'ピクル'), + (0x3330, 'M', 'ピコ'), + (0x3331, 'M', 'ビル'), + (0x3332, 'M', 'ファラッド'), + (0x3333, 'M', 'フィート'), + (0x3334, 'M', 'ブッシェル'), + (0x3335, 'M', 'フラン'), + (0x3336, 'M', 'ヘクタール'), + (0x3337, 'M', 'ペソ'), + (0x3338, 'M', 'ペニヒ'), + (0x3339, 'M', 'ヘルツ'), + (0x333A, 'M', 'ペンス'), + (0x333B, 'M', 'ページ'), + (0x333C, 'M', 'ベータ'), + (0x333D, 'M', 'ポイント'), + (0x333E, 'M', 'ボルト'), + (0x333F, 'M', 'ホン'), + (0x3340, 'M', 'ポンド'), + (0x3341, 'M', 'ホール'), + (0x3342, 'M', 'ホーン'), + (0x3343, 'M', 'マイクロ'), + (0x3344, 'M', 'マイル'), + (0x3345, 'M', 'マッハ'), + (0x3346, 'M', 'マルク'), + (0x3347, 'M', 'マンション'), + (0x3348, 'M', 'ミクロン'), + (0x3349, 'M', 'ミリ'), + (0x334A, 'M', 'ミリバール'), + (0x334B, 'M', 'メガ'), + (0x334C, 'M', 'メガトン'), + (0x334D, 'M', 'メートル'), + (0x334E, 'M', 'ヤード'), + (0x334F, 'M', 'ヤール'), + (0x3350, 'M', 'ユアン'), + (0x3351, 'M', 'リットル'), + (0x3352, 'M', 'リラ'), + (0x3353, 'M', 'ルピー'), + (0x3354, 'M', 'ルーブル'), + (0x3355, 'M', 'レム'), + (0x3356, 'M', 'レントゲン'), + (0x3357, 'M', 'ワット'), + (0x3358, 'M', '0点'), + (0x3359, 'M', '1点'), + (0x335A, 'M', '2点'), + (0x335B, 'M', '3点'), + (0x335C, 'M', '4点'), + (0x335D, 'M', '5点'), + (0x335E, 'M', '6点'), + (0x335F, 'M', '7点'), + (0x3360, 'M', '8点'), + (0x3361, 'M', '9点'), + (0x3362, 'M', '10点'), + (0x3363, 'M', '11点'), + (0x3364, 'M', '12点'), + (0x3365, 'M', '13点'), + (0x3366, 'M', '14点'), + (0x3367, 'M', '15点'), + (0x3368, 'M', '16点'), + (0x3369, 'M', '17点'), + (0x336A, 'M', '18点'), + (0x336B, 'M', '19点'), + (0x336C, 'M', '20点'), + (0x336D, 'M', '21点'), + (0x336E, 'M', '22点'), + (0x336F, 'M', '23点'), + (0x3370, 'M', '24点'), + (0x3371, 'M', 'hpa'), + (0x3372, 'M', 'da'), + (0x3373, 'M', 'au'), + (0x3374, 'M', 'bar'), + (0x3375, 'M', 'ov'), + (0x3376, 'M', 'pc'), + (0x3377, 'M', 'dm'), + (0x3378, 'M', 'dm2'), + (0x3379, 'M', 'dm3'), ] -def _seg_34(): +def _seg_34() -> List[Union[Tuple[int, str], Tuple[int, str, str]]]: return [ - (0x3390, 'M', u'hz'), - (0x3391, 'M', u'khz'), - (0x3392, 'M', u'mhz'), - (0x3393, 'M', u'ghz'), - (0x3394, 'M', u'thz'), - (0x3395, 'M', u'μl'), - (0x3396, 'M', u'ml'), - (0x3397, 'M', u'dl'), - (0x3398, 'M', u'kl'), - (0x3399, 'M', u'fm'), - (0x339A, 'M', u'nm'), - (0x339B, 'M', u'μm'), - (0x339C, 'M', u'mm'), - (0x339D, 'M', u'cm'), - (0x339E, 'M', u'km'), - (0x339F, 'M', u'mm2'), - (0x33A0, 'M', u'cm2'), - (0x33A1, 'M', u'm2'), - (0x33A2, 'M', u'km2'), - (0x33A3, 'M', u'mm3'), - (0x33A4, 'M', u'cm3'), - (0x33A5, 'M', u'm3'), - (0x33A6, 'M', u'km3'), - (0x33A7, 'M', u'm∕s'), - (0x33A8, 'M', u'm∕s2'), - (0x33A9, 'M', u'pa'), - (0x33AA, 'M', u'kpa'), - (0x33AB, 'M', u'mpa'), - (0x33AC, 'M', u'gpa'), - (0x33AD, 'M', u'rad'), - (0x33AE, 'M', u'rad∕s'), - (0x33AF, 'M', u'rad∕s2'), - (0x33B0, 'M', u'ps'), - (0x33B1, 'M', u'ns'), - (0x33B2, 'M', u'μs'), - (0x33B3, 'M', u'ms'), - (0x33B4, 'M', u'pv'), - (0x33B5, 'M', u'nv'), - (0x33B6, 'M', u'μv'), - (0x33B7, 'M', u'mv'), - (0x33B8, 'M', u'kv'), - (0x33B9, 'M', u'mv'), - (0x33BA, 'M', u'pw'), - (0x33BB, 'M', u'nw'), - (0x33BC, 'M', u'μw'), - (0x33BD, 'M', u'mw'), - (0x33BE, 'M', u'kw'), - (0x33BF, 'M', u'mw'), - (0x33C0, 'M', u'kω'), - (0x33C1, 'M', u'mω'), + (0x337A, 'M', 'iu'), + (0x337B, 'M', '平成'), + (0x337C, 'M', '昭和'), + (0x337D, 'M', '大正'), + (0x337E, 'M', '明治'), + (0x337F, 'M', '株式会社'), + (0x3380, 'M', 'pa'), + (0x3381, 'M', 'na'), + (0x3382, 'M', 'μa'), + (0x3383, 'M', 'ma'), + (0x3384, 'M', 'ka'), + (0x3385, 'M', 'kb'), + (0x3386, 'M', 'mb'), + (0x3387, 'M', 'gb'), + (0x3388, 'M', 'cal'), + (0x3389, 'M', 'kcal'), + (0x338A, 'M', 'pf'), + (0x338B, 'M', 'nf'), + (0x338C, 'M', 'μf'), + (0x338D, 'M', 'μg'), + (0x338E, 'M', 'mg'), + (0x338F, 'M', 'kg'), + (0x3390, 'M', 'hz'), + (0x3391, 'M', 'khz'), + (0x3392, 'M', 'mhz'), + (0x3393, 'M', 'ghz'), + (0x3394, 'M', 'thz'), + (0x3395, 'M', 'μl'), + (0x3396, 'M', 'ml'), + (0x3397, 'M', 'dl'), + (0x3398, 'M', 'kl'), + (0x3399, 'M', 'fm'), + (0x339A, 'M', 'nm'), + (0x339B, 'M', 'μm'), + (0x339C, 'M', 'mm'), + (0x339D, 'M', 'cm'), + (0x339E, 'M', 'km'), + (0x339F, 'M', 'mm2'), + (0x33A0, 'M', 'cm2'), + (0x33A1, 'M', 'm2'), + (0x33A2, 'M', 'km2'), + (0x33A3, 'M', 'mm3'), + (0x33A4, 'M', 'cm3'), + (0x33A5, 'M', 'm3'), + (0x33A6, 'M', 'km3'), + (0x33A7, 'M', 'm∕s'), + (0x33A8, 'M', 'm∕s2'), + (0x33A9, 'M', 'pa'), + (0x33AA, 'M', 'kpa'), + (0x33AB, 'M', 'mpa'), + (0x33AC, 'M', 'gpa'), + (0x33AD, 'M', 'rad'), + (0x33AE, 'M', 'rad∕s'), + (0x33AF, 'M', 'rad∕s2'), + (0x33B0, 'M', 'ps'), + (0x33B1, 'M', 'ns'), + (0x33B2, 'M', 'μs'), + (0x33B3, 'M', 'ms'), + (0x33B4, 'M', 'pv'), + (0x33B5, 'M', 'nv'), + (0x33B6, 'M', 'μv'), + (0x33B7, 'M', 'mv'), + (0x33B8, 'M', 'kv'), + (0x33B9, 'M', 'mv'), + (0x33BA, 'M', 'pw'), + (0x33BB, 'M', 'nw'), + (0x33BC, 'M', 'μw'), + (0x33BD, 'M', 'mw'), + (0x33BE, 'M', 'kw'), + (0x33BF, 'M', 'mw'), + (0x33C0, 'M', 'kω'), + (0x33C1, 'M', 'mω'), (0x33C2, 'X'), - (0x33C3, 'M', u'bq'), - (0x33C4, 'M', u'cc'), - (0x33C5, 'M', u'cd'), - (0x33C6, 'M', u'c∕kg'), + (0x33C3, 'M', 'bq'), + (0x33C4, 'M', 'cc'), + (0x33C5, 'M', 'cd'), + (0x33C6, 'M', 'c∕kg'), (0x33C7, 'X'), - (0x33C8, 'M', u'db'), - (0x33C9, 'M', u'gy'), - (0x33CA, 'M', u'ha'), - (0x33CB, 'M', u'hp'), - (0x33CC, 'M', u'in'), - (0x33CD, 'M', u'kk'), - (0x33CE, 'M', u'km'), - (0x33CF, 'M', u'kt'), - (0x33D0, 'M', u'lm'), - (0x33D1, 'M', u'ln'), - (0x33D2, 'M', u'log'), - (0x33D3, 'M', u'lx'), - (0x33D4, 'M', u'mb'), - (0x33D5, 'M', u'mil'), - (0x33D6, 'M', u'mol'), - (0x33D7, 'M', u'ph'), + (0x33C8, 'M', 'db'), + (0x33C9, 'M', 'gy'), + (0x33CA, 'M', 'ha'), + (0x33CB, 'M', 'hp'), + (0x33CC, 'M', 'in'), + (0x33CD, 'M', 'kk'), + (0x33CE, 'M', 'km'), + (0x33CF, 'M', 'kt'), + (0x33D0, 'M', 'lm'), + (0x33D1, 'M', 'ln'), + (0x33D2, 'M', 'log'), + (0x33D3, 'M', 'lx'), + (0x33D4, 'M', 'mb'), + (0x33D5, 'M', 'mil'), + (0x33D6, 'M', 'mol'), + (0x33D7, 'M', 'ph'), (0x33D8, 'X'), - (0x33D9, 'M', u'ppm'), - (0x33DA, 'M', u'pr'), - (0x33DB, 'M', u'sr'), - (0x33DC, 'M', u'sv'), - (0x33DD, 'M', u'wb'), - (0x33DE, 'M', u'v∕m'), - (0x33DF, 'M', u'a∕m'), - (0x33E0, 'M', u'1日'), - (0x33E1, 'M', u'2日'), - (0x33E2, 'M', u'3日'), - (0x33E3, 'M', u'4日'), - (0x33E4, 'M', u'5日'), - (0x33E5, 'M', u'6日'), - (0x33E6, 'M', u'7日'), - (0x33E7, 'M', u'8日'), - (0x33E8, 'M', u'9日'), - (0x33E9, 'M', u'10日'), - (0x33EA, 'M', u'11日'), - (0x33EB, 'M', u'12日'), - (0x33EC, 'M', u'13日'), - (0x33ED, 'M', u'14日'), - (0x33EE, 'M', u'15日'), - (0x33EF, 'M', u'16日'), - (0x33F0, 'M', u'17日'), - (0x33F1, 'M', u'18日'), - (0x33F2, 'M', u'19日'), - (0x33F3, 'M', u'20日'), + (0x33D9, 'M', 'ppm'), + (0x33DA, 'M', 'pr'), + (0x33DB, 'M', 'sr'), + (0x33DC, 'M', 'sv'), + (0x33DD, 'M', 'wb'), ] -def _seg_35(): +def _seg_35() -> List[Union[Tuple[int, str], Tuple[int, str, str]]]: return [ - (0x33F4, 'M', u'21日'), - (0x33F5, 'M', u'22日'), - (0x33F6, 'M', u'23日'), - (0x33F7, 'M', u'24日'), - (0x33F8, 'M', u'25日'), - (0x33F9, 'M', u'26日'), - (0x33FA, 'M', u'27日'), - (0x33FB, 'M', u'28日'), - (0x33FC, 'M', u'29日'), - (0x33FD, 'M', u'30日'), - (0x33FE, 'M', u'31日'), - (0x33FF, 'M', u'gal'), + (0x33DE, 'M', 'v∕m'), + (0x33DF, 'M', 'a∕m'), + (0x33E0, 'M', '1日'), + (0x33E1, 'M', '2日'), + (0x33E2, 'M', '3日'), + (0x33E3, 'M', '4日'), + (0x33E4, 'M', '5日'), + (0x33E5, 'M', '6日'), + (0x33E6, 'M', '7日'), + (0x33E7, 'M', '8日'), + (0x33E8, 'M', '9日'), + (0x33E9, 'M', '10日'), + (0x33EA, 'M', '11日'), + (0x33EB, 'M', '12日'), + (0x33EC, 'M', '13日'), + (0x33ED, 'M', '14日'), + (0x33EE, 'M', '15日'), + (0x33EF, 'M', '16日'), + (0x33F0, 'M', '17日'), + (0x33F1, 'M', '18日'), + (0x33F2, 'M', '19日'), + (0x33F3, 'M', '20日'), + (0x33F4, 'M', '21日'), + (0x33F5, 'M', '22日'), + (0x33F6, 'M', '23日'), + (0x33F7, 'M', '24日'), + (0x33F8, 'M', '25日'), + (0x33F9, 'M', '26日'), + (0x33FA, 'M', '27日'), + (0x33FB, 'M', '28日'), + (0x33FC, 'M', '29日'), + (0x33FD, 'M', '30日'), + (0x33FE, 'M', '31日'), + (0x33FF, 'M', 'gal'), (0x3400, 'V'), - (0x4DB6, 'X'), - (0x4DC0, 'V'), - (0x9FF0, 'X'), - (0xA000, 'V'), (0xA48D, 'X'), (0xA490, 'V'), (0xA4C7, 'X'), (0xA4D0, 'V'), (0xA62C, 'X'), - (0xA640, 'M', u'ꙁ'), + (0xA640, 'M', 'ꙁ'), (0xA641, 'V'), - (0xA642, 'M', u'ꙃ'), + (0xA642, 'M', 'ꙃ'), (0xA643, 'V'), - (0xA644, 'M', u'ꙅ'), + (0xA644, 'M', 'ꙅ'), (0xA645, 'V'), - (0xA646, 'M', u'ꙇ'), + (0xA646, 'M', 'ꙇ'), (0xA647, 'V'), - (0xA648, 'M', u'ꙉ'), + (0xA648, 'M', 'ꙉ'), (0xA649, 'V'), - (0xA64A, 'M', u'ꙋ'), + (0xA64A, 'M', 'ꙋ'), (0xA64B, 'V'), - (0xA64C, 'M', u'ꙍ'), + (0xA64C, 'M', 'ꙍ'), (0xA64D, 'V'), - (0xA64E, 'M', u'ꙏ'), + (0xA64E, 'M', 'ꙏ'), (0xA64F, 'V'), - (0xA650, 'M', u'ꙑ'), + (0xA650, 'M', 'ꙑ'), (0xA651, 'V'), - (0xA652, 'M', u'ꙓ'), + (0xA652, 'M', 'ꙓ'), (0xA653, 'V'), - (0xA654, 'M', u'ꙕ'), + (0xA654, 'M', 'ꙕ'), (0xA655, 'V'), - (0xA656, 'M', u'ꙗ'), + (0xA656, 'M', 'ꙗ'), (0xA657, 'V'), - (0xA658, 'M', u'ꙙ'), + (0xA658, 'M', 'ꙙ'), (0xA659, 'V'), - (0xA65A, 'M', u'ꙛ'), + (0xA65A, 'M', 'ꙛ'), (0xA65B, 'V'), - (0xA65C, 'M', u'ꙝ'), + (0xA65C, 'M', 'ꙝ'), (0xA65D, 'V'), - (0xA65E, 'M', u'ꙟ'), + (0xA65E, 'M', 'ꙟ'), (0xA65F, 'V'), - (0xA660, 'M', u'ꙡ'), + (0xA660, 'M', 'ꙡ'), (0xA661, 'V'), - (0xA662, 'M', u'ꙣ'), + (0xA662, 'M', 'ꙣ'), (0xA663, 'V'), - (0xA664, 'M', u'ꙥ'), + (0xA664, 'M', 'ꙥ'), (0xA665, 'V'), - (0xA666, 'M', u'ꙧ'), + (0xA666, 'M', 'ꙧ'), (0xA667, 'V'), - (0xA668, 'M', u'ꙩ'), + (0xA668, 'M', 'ꙩ'), (0xA669, 'V'), - (0xA66A, 'M', u'ꙫ'), + (0xA66A, 'M', 'ꙫ'), (0xA66B, 'V'), - (0xA66C, 'M', u'ꙭ'), + (0xA66C, 'M', 'ꙭ'), (0xA66D, 'V'), - (0xA680, 'M', u'ꚁ'), + (0xA680, 'M', 'ꚁ'), (0xA681, 'V'), - (0xA682, 'M', u'ꚃ'), + (0xA682, 'M', 'ꚃ'), (0xA683, 'V'), - (0xA684, 'M', u'ꚅ'), + (0xA684, 'M', 'ꚅ'), (0xA685, 'V'), - (0xA686, 'M', u'ꚇ'), + (0xA686, 'M', 'ꚇ'), (0xA687, 'V'), - (0xA688, 'M', u'ꚉ'), + (0xA688, 'M', 'ꚉ'), (0xA689, 'V'), - (0xA68A, 'M', u'ꚋ'), + (0xA68A, 'M', 'ꚋ'), (0xA68B, 'V'), - (0xA68C, 'M', u'ꚍ'), + (0xA68C, 'M', 'ꚍ'), (0xA68D, 'V'), - (0xA68E, 'M', u'ꚏ'), + ] + +def _seg_36() -> List[Union[Tuple[int, str], Tuple[int, str, str]]]: + return [ + (0xA68E, 'M', 'ꚏ'), (0xA68F, 'V'), - (0xA690, 'M', u'ꚑ'), + (0xA690, 'M', 'ꚑ'), (0xA691, 'V'), - (0xA692, 'M', u'ꚓ'), + (0xA692, 'M', 'ꚓ'), (0xA693, 'V'), - (0xA694, 'M', u'ꚕ'), + (0xA694, 'M', 'ꚕ'), (0xA695, 'V'), - (0xA696, 'M', u'ꚗ'), + (0xA696, 'M', 'ꚗ'), (0xA697, 'V'), - (0xA698, 'M', u'ꚙ'), + (0xA698, 'M', 'ꚙ'), (0xA699, 'V'), - (0xA69A, 'M', u'ꚛ'), + (0xA69A, 'M', 'ꚛ'), (0xA69B, 'V'), - (0xA69C, 'M', u'ъ'), - (0xA69D, 'M', u'ь'), + (0xA69C, 'M', 'ъ'), + (0xA69D, 'M', 'ь'), (0xA69E, 'V'), (0xA6F8, 'X'), - ] - -def _seg_36(): - return [ (0xA700, 'V'), - (0xA722, 'M', u'ꜣ'), + (0xA722, 'M', 'ꜣ'), (0xA723, 'V'), - (0xA724, 'M', u'ꜥ'), + (0xA724, 'M', 'ꜥ'), (0xA725, 'V'), - (0xA726, 'M', u'ꜧ'), + (0xA726, 'M', 'ꜧ'), (0xA727, 'V'), - (0xA728, 'M', u'ꜩ'), + (0xA728, 'M', 'ꜩ'), (0xA729, 'V'), - (0xA72A, 'M', u'ꜫ'), + (0xA72A, 'M', 'ꜫ'), (0xA72B, 'V'), - (0xA72C, 'M', u'ꜭ'), + (0xA72C, 'M', 'ꜭ'), (0xA72D, 'V'), - (0xA72E, 'M', u'ꜯ'), + (0xA72E, 'M', 'ꜯ'), (0xA72F, 'V'), - (0xA732, 'M', u'ꜳ'), + (0xA732, 'M', 'ꜳ'), (0xA733, 'V'), - (0xA734, 'M', u'ꜵ'), + (0xA734, 'M', 'ꜵ'), (0xA735, 'V'), - (0xA736, 'M', u'ꜷ'), + (0xA736, 'M', 'ꜷ'), (0xA737, 'V'), - (0xA738, 'M', u'ꜹ'), + (0xA738, 'M', 'ꜹ'), (0xA739, 'V'), - (0xA73A, 'M', u'ꜻ'), + (0xA73A, 'M', 'ꜻ'), (0xA73B, 'V'), - (0xA73C, 'M', u'ꜽ'), + (0xA73C, 'M', 'ꜽ'), (0xA73D, 'V'), - (0xA73E, 'M', u'ꜿ'), + (0xA73E, 'M', 'ꜿ'), (0xA73F, 'V'), - (0xA740, 'M', u'ꝁ'), + (0xA740, 'M', 'ꝁ'), (0xA741, 'V'), - (0xA742, 'M', u'ꝃ'), + (0xA742, 'M', 'ꝃ'), (0xA743, 'V'), - (0xA744, 'M', u'ꝅ'), + (0xA744, 'M', 'ꝅ'), (0xA745, 'V'), - (0xA746, 'M', u'ꝇ'), + (0xA746, 'M', 'ꝇ'), (0xA747, 'V'), - (0xA748, 'M', u'ꝉ'), + (0xA748, 'M', 'ꝉ'), (0xA749, 'V'), - (0xA74A, 'M', u'ꝋ'), + (0xA74A, 'M', 'ꝋ'), (0xA74B, 'V'), - (0xA74C, 'M', u'ꝍ'), + (0xA74C, 'M', 'ꝍ'), (0xA74D, 'V'), - (0xA74E, 'M', u'ꝏ'), + (0xA74E, 'M', 'ꝏ'), (0xA74F, 'V'), - (0xA750, 'M', u'ꝑ'), + (0xA750, 'M', 'ꝑ'), (0xA751, 'V'), - (0xA752, 'M', u'ꝓ'), + (0xA752, 'M', 'ꝓ'), (0xA753, 'V'), - (0xA754, 'M', u'ꝕ'), + (0xA754, 'M', 'ꝕ'), (0xA755, 'V'), - (0xA756, 'M', u'ꝗ'), + (0xA756, 'M', 'ꝗ'), (0xA757, 'V'), - (0xA758, 'M', u'ꝙ'), + (0xA758, 'M', 'ꝙ'), (0xA759, 'V'), - (0xA75A, 'M', u'ꝛ'), + (0xA75A, 'M', 'ꝛ'), (0xA75B, 'V'), - (0xA75C, 'M', u'ꝝ'), + (0xA75C, 'M', 'ꝝ'), (0xA75D, 'V'), - (0xA75E, 'M', u'ꝟ'), + (0xA75E, 'M', 'ꝟ'), (0xA75F, 'V'), - (0xA760, 'M', u'ꝡ'), + (0xA760, 'M', 'ꝡ'), (0xA761, 'V'), - (0xA762, 'M', u'ꝣ'), + (0xA762, 'M', 'ꝣ'), (0xA763, 'V'), - (0xA764, 'M', u'ꝥ'), + (0xA764, 'M', 'ꝥ'), (0xA765, 'V'), - (0xA766, 'M', u'ꝧ'), + (0xA766, 'M', 'ꝧ'), (0xA767, 'V'), - (0xA768, 'M', u'ꝩ'), + (0xA768, 'M', 'ꝩ'), (0xA769, 'V'), - (0xA76A, 'M', u'ꝫ'), + (0xA76A, 'M', 'ꝫ'), (0xA76B, 'V'), - (0xA76C, 'M', u'ꝭ'), + (0xA76C, 'M', 'ꝭ'), (0xA76D, 'V'), - (0xA76E, 'M', u'ꝯ'), + (0xA76E, 'M', 'ꝯ'), (0xA76F, 'V'), - (0xA770, 'M', u'ꝯ'), + (0xA770, 'M', 'ꝯ'), (0xA771, 'V'), - (0xA779, 'M', u'ꝺ'), + (0xA779, 'M', 'ꝺ'), (0xA77A, 'V'), - (0xA77B, 'M', u'ꝼ'), + (0xA77B, 'M', 'ꝼ'), + ] + +def _seg_37() -> List[Union[Tuple[int, str], Tuple[int, str, str]]]: + return [ (0xA77C, 'V'), - (0xA77D, 'M', u'ᵹ'), - (0xA77E, 'M', u'ꝿ'), + (0xA77D, 'M', 'ᵹ'), + (0xA77E, 'M', 'ꝿ'), (0xA77F, 'V'), - (0xA780, 'M', u'ꞁ'), + (0xA780, 'M', 'ꞁ'), (0xA781, 'V'), - (0xA782, 'M', u'ꞃ'), + (0xA782, 'M', 'ꞃ'), (0xA783, 'V'), - (0xA784, 'M', u'ꞅ'), + (0xA784, 'M', 'ꞅ'), (0xA785, 'V'), - (0xA786, 'M', u'ꞇ'), + (0xA786, 'M', 'ꞇ'), (0xA787, 'V'), - (0xA78B, 'M', u'ꞌ'), + (0xA78B, 'M', 'ꞌ'), (0xA78C, 'V'), - (0xA78D, 'M', u'ɥ'), + (0xA78D, 'M', 'ɥ'), (0xA78E, 'V'), - (0xA790, 'M', u'ꞑ'), + (0xA790, 'M', 'ꞑ'), (0xA791, 'V'), - ] - -def _seg_37(): - return [ - (0xA792, 'M', u'ꞓ'), + (0xA792, 'M', 'ꞓ'), (0xA793, 'V'), - (0xA796, 'M', u'ꞗ'), + (0xA796, 'M', 'ꞗ'), (0xA797, 'V'), - (0xA798, 'M', u'ꞙ'), + (0xA798, 'M', 'ꞙ'), (0xA799, 'V'), - (0xA79A, 'M', u'ꞛ'), + (0xA79A, 'M', 'ꞛ'), (0xA79B, 'V'), - (0xA79C, 'M', u'ꞝ'), + (0xA79C, 'M', 'ꞝ'), (0xA79D, 'V'), - (0xA79E, 'M', u'ꞟ'), + (0xA79E, 'M', 'ꞟ'), (0xA79F, 'V'), - (0xA7A0, 'M', u'ꞡ'), + (0xA7A0, 'M', 'ꞡ'), (0xA7A1, 'V'), - (0xA7A2, 'M', u'ꞣ'), + (0xA7A2, 'M', 'ꞣ'), (0xA7A3, 'V'), - (0xA7A4, 'M', u'ꞥ'), + (0xA7A4, 'M', 'ꞥ'), (0xA7A5, 'V'), - (0xA7A6, 'M', u'ꞧ'), + (0xA7A6, 'M', 'ꞧ'), (0xA7A7, 'V'), - (0xA7A8, 'M', u'ꞩ'), + (0xA7A8, 'M', 'ꞩ'), (0xA7A9, 'V'), - (0xA7AA, 'M', u'ɦ'), - (0xA7AB, 'M', u'ɜ'), - (0xA7AC, 'M', u'ɡ'), - (0xA7AD, 'M', u'ɬ'), - (0xA7AE, 'M', u'ɪ'), + (0xA7AA, 'M', 'ɦ'), + (0xA7AB, 'M', 'ɜ'), + (0xA7AC, 'M', 'ɡ'), + (0xA7AD, 'M', 'ɬ'), + (0xA7AE, 'M', 'ɪ'), (0xA7AF, 'V'), - (0xA7B0, 'M', u'ʞ'), - (0xA7B1, 'M', u'ʇ'), - (0xA7B2, 'M', u'ʝ'), - (0xA7B3, 'M', u'ꭓ'), - (0xA7B4, 'M', u'ꞵ'), + (0xA7B0, 'M', 'ʞ'), + (0xA7B1, 'M', 'ʇ'), + (0xA7B2, 'M', 'ʝ'), + (0xA7B3, 'M', 'ꭓ'), + (0xA7B4, 'M', 'ꞵ'), (0xA7B5, 'V'), - (0xA7B6, 'M', u'ꞷ'), + (0xA7B6, 'M', 'ꞷ'), (0xA7B7, 'V'), - (0xA7B8, 'X'), + (0xA7B8, 'M', 'ꞹ'), (0xA7B9, 'V'), - (0xA7BA, 'X'), - (0xA7F7, 'V'), - (0xA7F8, 'M', u'ħ'), - (0xA7F9, 'M', u'œ'), + (0xA7BA, 'M', 'ꞻ'), + (0xA7BB, 'V'), + (0xA7BC, 'M', 'ꞽ'), + (0xA7BD, 'V'), + (0xA7BE, 'M', 'ꞿ'), + (0xA7BF, 'V'), + (0xA7C0, 'M', 'ꟁ'), + (0xA7C1, 'V'), + (0xA7C2, 'M', 'ꟃ'), + (0xA7C3, 'V'), + (0xA7C4, 'M', 'ꞔ'), + (0xA7C5, 'M', 'ʂ'), + (0xA7C6, 'M', 'ᶎ'), + (0xA7C7, 'M', 'ꟈ'), + (0xA7C8, 'V'), + (0xA7C9, 'M', 'ꟊ'), + (0xA7CA, 'V'), + (0xA7CB, 'X'), + (0xA7D0, 'M', 'ꟑ'), + (0xA7D1, 'V'), + (0xA7D2, 'X'), + (0xA7D3, 'V'), + (0xA7D4, 'X'), + (0xA7D5, 'V'), + (0xA7D6, 'M', 'ꟗ'), + (0xA7D7, 'V'), + (0xA7D8, 'M', 'ꟙ'), + (0xA7D9, 'V'), + (0xA7DA, 'X'), + (0xA7F2, 'M', 'c'), + (0xA7F3, 'M', 'f'), + (0xA7F4, 'M', 'q'), + (0xA7F5, 'M', 'ꟶ'), + (0xA7F6, 'V'), + (0xA7F8, 'M', 'ħ'), + (0xA7F9, 'M', 'œ'), (0xA7FA, 'V'), - (0xA82C, 'X'), + (0xA82D, 'X'), (0xA830, 'V'), (0xA83A, 'X'), (0xA840, 'V'), (0xA878, 'X'), (0xA880, 'V'), (0xA8C6, 'X'), + ] + +def _seg_38() -> List[Union[Tuple[int, str], Tuple[int, str, str]]]: + return [ (0xA8CE, 'V'), (0xA8DA, 'X'), (0xA8E0, 'V'), @@ -3938,96 +3995,98 @@ def _seg_37(): (0xAB28, 'V'), (0xAB2F, 'X'), (0xAB30, 'V'), - (0xAB5C, 'M', u'ꜧ'), - (0xAB5D, 'M', u'ꬷ'), - (0xAB5E, 'M', u'ɫ'), - (0xAB5F, 'M', u'ꭒ'), + (0xAB5C, 'M', 'ꜧ'), + (0xAB5D, 'M', 'ꬷ'), + (0xAB5E, 'M', 'ɫ'), + (0xAB5F, 'M', 'ꭒ'), (0xAB60, 'V'), - (0xAB66, 'X'), - (0xAB70, 'M', u'Ꭰ'), - (0xAB71, 'M', u'Ꭱ'), - (0xAB72, 'M', u'Ꭲ'), - (0xAB73, 'M', u'Ꭳ'), - (0xAB74, 'M', u'Ꭴ'), - (0xAB75, 'M', u'Ꭵ'), - (0xAB76, 'M', u'Ꭶ'), - (0xAB77, 'M', u'Ꭷ'), - (0xAB78, 'M', u'Ꭸ'), - (0xAB79, 'M', u'Ꭹ'), - (0xAB7A, 'M', u'Ꭺ'), + (0xAB69, 'M', 'ʍ'), + (0xAB6A, 'V'), + (0xAB6C, 'X'), + (0xAB70, 'M', 'Ꭰ'), + (0xAB71, 'M', 'Ꭱ'), + (0xAB72, 'M', 'Ꭲ'), + (0xAB73, 'M', 'Ꭳ'), + (0xAB74, 'M', 'Ꭴ'), + (0xAB75, 'M', 'Ꭵ'), + (0xAB76, 'M', 'Ꭶ'), + (0xAB77, 'M', 'Ꭷ'), + (0xAB78, 'M', 'Ꭸ'), + (0xAB79, 'M', 'Ꭹ'), + (0xAB7A, 'M', 'Ꭺ'), + (0xAB7B, 'M', 'Ꭻ'), + (0xAB7C, 'M', 'Ꭼ'), + (0xAB7D, 'M', 'Ꭽ'), + (0xAB7E, 'M', 'Ꭾ'), + (0xAB7F, 'M', 'Ꭿ'), + (0xAB80, 'M', 'Ꮀ'), + (0xAB81, 'M', 'Ꮁ'), + (0xAB82, 'M', 'Ꮂ'), + (0xAB83, 'M', 'Ꮃ'), + (0xAB84, 'M', 'Ꮄ'), + (0xAB85, 'M', 'Ꮅ'), + (0xAB86, 'M', 'Ꮆ'), + (0xAB87, 'M', 'Ꮇ'), + (0xAB88, 'M', 'Ꮈ'), + (0xAB89, 'M', 'Ꮉ'), + (0xAB8A, 'M', 'Ꮊ'), + (0xAB8B, 'M', 'Ꮋ'), + (0xAB8C, 'M', 'Ꮌ'), + (0xAB8D, 'M', 'Ꮍ'), + (0xAB8E, 'M', 'Ꮎ'), + (0xAB8F, 'M', 'Ꮏ'), + (0xAB90, 'M', 'Ꮐ'), + (0xAB91, 'M', 'Ꮑ'), + (0xAB92, 'M', 'Ꮒ'), + (0xAB93, 'M', 'Ꮓ'), + (0xAB94, 'M', 'Ꮔ'), + (0xAB95, 'M', 'Ꮕ'), + (0xAB96, 'M', 'Ꮖ'), + (0xAB97, 'M', 'Ꮗ'), + (0xAB98, 'M', 'Ꮘ'), + (0xAB99, 'M', 'Ꮙ'), + (0xAB9A, 'M', 'Ꮚ'), + (0xAB9B, 'M', 'Ꮛ'), + (0xAB9C, 'M', 'Ꮜ'), + (0xAB9D, 'M', 'Ꮝ'), + (0xAB9E, 'M', 'Ꮞ'), + (0xAB9F, 'M', 'Ꮟ'), + (0xABA0, 'M', 'Ꮠ'), + (0xABA1, 'M', 'Ꮡ'), + (0xABA2, 'M', 'Ꮢ'), + (0xABA3, 'M', 'Ꮣ'), + (0xABA4, 'M', 'Ꮤ'), + (0xABA5, 'M', 'Ꮥ'), + (0xABA6, 'M', 'Ꮦ'), + (0xABA7, 'M', 'Ꮧ'), + (0xABA8, 'M', 'Ꮨ'), + (0xABA9, 'M', 'Ꮩ'), + (0xABAA, 'M', 'Ꮪ'), ] -def _seg_38(): +def _seg_39() -> List[Union[Tuple[int, str], Tuple[int, str, str]]]: return [ - (0xAB7B, 'M', u'Ꭻ'), - (0xAB7C, 'M', u'Ꭼ'), - (0xAB7D, 'M', u'Ꭽ'), - (0xAB7E, 'M', u'Ꭾ'), - (0xAB7F, 'M', u'Ꭿ'), - (0xAB80, 'M', u'Ꮀ'), - (0xAB81, 'M', u'Ꮁ'), - (0xAB82, 'M', u'Ꮂ'), - (0xAB83, 'M', u'Ꮃ'), - (0xAB84, 'M', u'Ꮄ'), - (0xAB85, 'M', u'Ꮅ'), - (0xAB86, 'M', u'Ꮆ'), - (0xAB87, 'M', u'Ꮇ'), - (0xAB88, 'M', u'Ꮈ'), - (0xAB89, 'M', u'Ꮉ'), - (0xAB8A, 'M', u'Ꮊ'), - (0xAB8B, 'M', u'Ꮋ'), - (0xAB8C, 'M', u'Ꮌ'), - (0xAB8D, 'M', u'Ꮍ'), - (0xAB8E, 'M', u'Ꮎ'), - (0xAB8F, 'M', u'Ꮏ'), - (0xAB90, 'M', u'Ꮐ'), - (0xAB91, 'M', u'Ꮑ'), - (0xAB92, 'M', u'Ꮒ'), - (0xAB93, 'M', u'Ꮓ'), - (0xAB94, 'M', u'Ꮔ'), - (0xAB95, 'M', u'Ꮕ'), - (0xAB96, 'M', u'Ꮖ'), - (0xAB97, 'M', u'Ꮗ'), - (0xAB98, 'M', u'Ꮘ'), - (0xAB99, 'M', u'Ꮙ'), - (0xAB9A, 'M', u'Ꮚ'), - (0xAB9B, 'M', u'Ꮛ'), - (0xAB9C, 'M', u'Ꮜ'), - (0xAB9D, 'M', u'Ꮝ'), - (0xAB9E, 'M', u'Ꮞ'), - (0xAB9F, 'M', u'Ꮟ'), - (0xABA0, 'M', u'Ꮠ'), - (0xABA1, 'M', u'Ꮡ'), - (0xABA2, 'M', u'Ꮢ'), - (0xABA3, 'M', u'Ꮣ'), - (0xABA4, 'M', u'Ꮤ'), - (0xABA5, 'M', u'Ꮥ'), - (0xABA6, 'M', u'Ꮦ'), - (0xABA7, 'M', u'Ꮧ'), - (0xABA8, 'M', u'Ꮨ'), - (0xABA9, 'M', u'Ꮩ'), - (0xABAA, 'M', u'Ꮪ'), - (0xABAB, 'M', u'Ꮫ'), - (0xABAC, 'M', u'Ꮬ'), - (0xABAD, 'M', u'Ꮭ'), - (0xABAE, 'M', u'Ꮮ'), - (0xABAF, 'M', u'Ꮯ'), - (0xABB0, 'M', u'Ꮰ'), - (0xABB1, 'M', u'Ꮱ'), - (0xABB2, 'M', u'Ꮲ'), - (0xABB3, 'M', u'Ꮳ'), - (0xABB4, 'M', u'Ꮴ'), - (0xABB5, 'M', u'Ꮵ'), - (0xABB6, 'M', u'Ꮶ'), - (0xABB7, 'M', u'Ꮷ'), - (0xABB8, 'M', u'Ꮸ'), - (0xABB9, 'M', u'Ꮹ'), - (0xABBA, 'M', u'Ꮺ'), - (0xABBB, 'M', u'Ꮻ'), - (0xABBC, 'M', u'Ꮼ'), - (0xABBD, 'M', u'Ꮽ'), - (0xABBE, 'M', u'Ꮾ'), - (0xABBF, 'M', u'Ꮿ'), + (0xABAB, 'M', 'Ꮫ'), + (0xABAC, 'M', 'Ꮬ'), + (0xABAD, 'M', 'Ꮭ'), + (0xABAE, 'M', 'Ꮮ'), + (0xABAF, 'M', 'Ꮯ'), + (0xABB0, 'M', 'Ꮰ'), + (0xABB1, 'M', 'Ꮱ'), + (0xABB2, 'M', 'Ꮲ'), + (0xABB3, 'M', 'Ꮳ'), + (0xABB4, 'M', 'Ꮴ'), + (0xABB5, 'M', 'Ꮵ'), + (0xABB6, 'M', 'Ꮶ'), + (0xABB7, 'M', 'Ꮷ'), + (0xABB8, 'M', 'Ꮸ'), + (0xABB9, 'M', 'Ꮹ'), + (0xABBA, 'M', 'Ꮺ'), + (0xABBB, 'M', 'Ꮻ'), + (0xABBC, 'M', 'Ꮼ'), + (0xABBD, 'M', 'Ꮽ'), + (0xABBE, 'M', 'Ꮾ'), + (0xABBF, 'M', 'Ꮿ'), (0xABC0, 'V'), (0xABEE, 'X'), (0xABF0, 'V'), @@ -4038,1436 +4097,1436 @@ def _seg_38(): (0xD7C7, 'X'), (0xD7CB, 'V'), (0xD7FC, 'X'), - (0xF900, 'M', u'豈'), - (0xF901, 'M', u'更'), - (0xF902, 'M', u'車'), - (0xF903, 'M', u'賈'), - (0xF904, 'M', u'滑'), - (0xF905, 'M', u'串'), - (0xF906, 'M', u'句'), - (0xF907, 'M', u'龜'), - (0xF909, 'M', u'契'), - (0xF90A, 'M', u'金'), - (0xF90B, 'M', u'喇'), - (0xF90C, 'M', u'奈'), - (0xF90D, 'M', u'懶'), - (0xF90E, 'M', u'癩'), - (0xF90F, 'M', u'羅'), - (0xF910, 'M', u'蘿'), - (0xF911, 'M', u'螺'), - (0xF912, 'M', u'裸'), - (0xF913, 'M', u'邏'), - (0xF914, 'M', u'樂'), - (0xF915, 'M', u'洛'), + (0xF900, 'M', '豈'), + (0xF901, 'M', '更'), + (0xF902, 'M', '車'), + (0xF903, 'M', '賈'), + (0xF904, 'M', '滑'), + (0xF905, 'M', '串'), + (0xF906, 'M', '句'), + (0xF907, 'M', '龜'), + (0xF909, 'M', '契'), + (0xF90A, 'M', '金'), + (0xF90B, 'M', '喇'), + (0xF90C, 'M', '奈'), + (0xF90D, 'M', '懶'), + (0xF90E, 'M', '癩'), + (0xF90F, 'M', '羅'), + (0xF910, 'M', '蘿'), + (0xF911, 'M', '螺'), + (0xF912, 'M', '裸'), + (0xF913, 'M', '邏'), + (0xF914, 'M', '樂'), + (0xF915, 'M', '洛'), + (0xF916, 'M', '烙'), + (0xF917, 'M', '珞'), + (0xF918, 'M', '落'), + (0xF919, 'M', '酪'), + (0xF91A, 'M', '駱'), + (0xF91B, 'M', '亂'), + (0xF91C, 'M', '卵'), + (0xF91D, 'M', '欄'), + (0xF91E, 'M', '爛'), + (0xF91F, 'M', '蘭'), + (0xF920, 'M', '鸞'), + (0xF921, 'M', '嵐'), + (0xF922, 'M', '濫'), + (0xF923, 'M', '藍'), + (0xF924, 'M', '襤'), + (0xF925, 'M', '拉'), + (0xF926, 'M', '臘'), + (0xF927, 'M', '蠟'), + (0xF928, 'M', '廊'), + (0xF929, 'M', '朗'), + (0xF92A, 'M', '浪'), + (0xF92B, 'M', '狼'), + (0xF92C, 'M', '郎'), + (0xF92D, 'M', '來'), + (0xF92E, 'M', '冷'), + (0xF92F, 'M', '勞'), + (0xF930, 'M', '擄'), + (0xF931, 'M', '櫓'), + (0xF932, 'M', '爐'), + (0xF933, 'M', '盧'), + (0xF934, 'M', '老'), + (0xF935, 'M', '蘆'), + (0xF936, 'M', '虜'), + (0xF937, 'M', '路'), + (0xF938, 'M', '露'), + (0xF939, 'M', '魯'), + (0xF93A, 'M', '鷺'), + (0xF93B, 'M', '碌'), + (0xF93C, 'M', '祿'), + (0xF93D, 'M', '綠'), + (0xF93E, 'M', '菉'), + (0xF93F, 'M', '錄'), + (0xF940, 'M', '鹿'), + (0xF941, 'M', '論'), + (0xF942, 'M', '壟'), + (0xF943, 'M', '弄'), + (0xF944, 'M', '籠'), + (0xF945, 'M', '聾'), ] -def _seg_39(): +def _seg_40() -> List[Union[Tuple[int, str], Tuple[int, str, str]]]: return [ - (0xF916, 'M', u'烙'), - (0xF917, 'M', u'珞'), - (0xF918, 'M', u'落'), - (0xF919, 'M', u'酪'), - (0xF91A, 'M', u'駱'), - (0xF91B, 'M', u'亂'), - (0xF91C, 'M', u'卵'), - (0xF91D, 'M', u'欄'), - (0xF91E, 'M', u'爛'), - (0xF91F, 'M', u'蘭'), - (0xF920, 'M', u'鸞'), - (0xF921, 'M', u'嵐'), - (0xF922, 'M', u'濫'), - (0xF923, 'M', u'藍'), - (0xF924, 'M', u'襤'), - (0xF925, 'M', u'拉'), - (0xF926, 'M', u'臘'), - (0xF927, 'M', u'蠟'), - (0xF928, 'M', u'廊'), - (0xF929, 'M', u'朗'), - (0xF92A, 'M', u'浪'), - (0xF92B, 'M', u'狼'), - (0xF92C, 'M', u'郎'), - (0xF92D, 'M', u'來'), - (0xF92E, 'M', u'冷'), - (0xF92F, 'M', u'勞'), - (0xF930, 'M', u'擄'), - (0xF931, 'M', u'櫓'), - (0xF932, 'M', u'爐'), - (0xF933, 'M', u'盧'), - (0xF934, 'M', u'老'), - (0xF935, 'M', u'蘆'), - (0xF936, 'M', u'虜'), - (0xF937, 'M', u'路'), - (0xF938, 'M', u'露'), - (0xF939, 'M', u'魯'), - (0xF93A, 'M', u'鷺'), - (0xF93B, 'M', u'碌'), - (0xF93C, 'M', u'祿'), - (0xF93D, 'M', u'綠'), - (0xF93E, 'M', u'菉'), - (0xF93F, 'M', u'錄'), - (0xF940, 'M', u'鹿'), - (0xF941, 'M', u'論'), - (0xF942, 'M', u'壟'), - (0xF943, 'M', u'弄'), - (0xF944, 'M', u'籠'), - (0xF945, 'M', u'聾'), - (0xF946, 'M', u'牢'), - (0xF947, 'M', u'磊'), - (0xF948, 'M', u'賂'), - (0xF949, 'M', u'雷'), - (0xF94A, 'M', u'壘'), - (0xF94B, 'M', u'屢'), - (0xF94C, 'M', u'樓'), - (0xF94D, 'M', u'淚'), - (0xF94E, 'M', u'漏'), - (0xF94F, 'M', u'累'), - (0xF950, 'M', u'縷'), - (0xF951, 'M', u'陋'), - (0xF952, 'M', u'勒'), - (0xF953, 'M', u'肋'), - (0xF954, 'M', u'凜'), - (0xF955, 'M', u'凌'), - (0xF956, 'M', u'稜'), - (0xF957, 'M', u'綾'), - (0xF958, 'M', u'菱'), - (0xF959, 'M', u'陵'), - (0xF95A, 'M', u'讀'), - (0xF95B, 'M', u'拏'), - (0xF95C, 'M', u'樂'), - (0xF95D, 'M', u'諾'), - (0xF95E, 'M', u'丹'), - (0xF95F, 'M', u'寧'), - (0xF960, 'M', u'怒'), - (0xF961, 'M', u'率'), - (0xF962, 'M', u'異'), - (0xF963, 'M', u'北'), - (0xF964, 'M', u'磻'), - (0xF965, 'M', u'便'), - (0xF966, 'M', u'復'), - (0xF967, 'M', u'不'), - (0xF968, 'M', u'泌'), - (0xF969, 'M', u'數'), - (0xF96A, 'M', u'索'), - (0xF96B, 'M', u'參'), - (0xF96C, 'M', u'塞'), - (0xF96D, 'M', u'省'), - (0xF96E, 'M', u'葉'), - (0xF96F, 'M', u'說'), - (0xF970, 'M', u'殺'), - (0xF971, 'M', u'辰'), - (0xF972, 'M', u'沈'), - (0xF973, 'M', u'拾'), - (0xF974, 'M', u'若'), - (0xF975, 'M', u'掠'), - (0xF976, 'M', u'略'), - (0xF977, 'M', u'亮'), - (0xF978, 'M', u'兩'), - (0xF979, 'M', u'凉'), + (0xF946, 'M', '牢'), + (0xF947, 'M', '磊'), + (0xF948, 'M', '賂'), + (0xF949, 'M', '雷'), + (0xF94A, 'M', '壘'), + (0xF94B, 'M', '屢'), + (0xF94C, 'M', '樓'), + (0xF94D, 'M', '淚'), + (0xF94E, 'M', '漏'), + (0xF94F, 'M', '累'), + (0xF950, 'M', '縷'), + (0xF951, 'M', '陋'), + (0xF952, 'M', '勒'), + (0xF953, 'M', '肋'), + (0xF954, 'M', '凜'), + (0xF955, 'M', '凌'), + (0xF956, 'M', '稜'), + (0xF957, 'M', '綾'), + (0xF958, 'M', '菱'), + (0xF959, 'M', '陵'), + (0xF95A, 'M', '讀'), + (0xF95B, 'M', '拏'), + (0xF95C, 'M', '樂'), + (0xF95D, 'M', '諾'), + (0xF95E, 'M', '丹'), + (0xF95F, 'M', '寧'), + (0xF960, 'M', '怒'), + (0xF961, 'M', '率'), + (0xF962, 'M', '異'), + (0xF963, 'M', '北'), + (0xF964, 'M', '磻'), + (0xF965, 'M', '便'), + (0xF966, 'M', '復'), + (0xF967, 'M', '不'), + (0xF968, 'M', '泌'), + (0xF969, 'M', '數'), + (0xF96A, 'M', '索'), + (0xF96B, 'M', '參'), + (0xF96C, 'M', '塞'), + (0xF96D, 'M', '省'), + (0xF96E, 'M', '葉'), + (0xF96F, 'M', '說'), + (0xF970, 'M', '殺'), + (0xF971, 'M', '辰'), + (0xF972, 'M', '沈'), + (0xF973, 'M', '拾'), + (0xF974, 'M', '若'), + (0xF975, 'M', '掠'), + (0xF976, 'M', '略'), + (0xF977, 'M', '亮'), + (0xF978, 'M', '兩'), + (0xF979, 'M', '凉'), + (0xF97A, 'M', '梁'), + (0xF97B, 'M', '糧'), + (0xF97C, 'M', '良'), + (0xF97D, 'M', '諒'), + (0xF97E, 'M', '量'), + (0xF97F, 'M', '勵'), + (0xF980, 'M', '呂'), + (0xF981, 'M', '女'), + (0xF982, 'M', '廬'), + (0xF983, 'M', '旅'), + (0xF984, 'M', '濾'), + (0xF985, 'M', '礪'), + (0xF986, 'M', '閭'), + (0xF987, 'M', '驪'), + (0xF988, 'M', '麗'), + (0xF989, 'M', '黎'), + (0xF98A, 'M', '力'), + (0xF98B, 'M', '曆'), + (0xF98C, 'M', '歷'), + (0xF98D, 'M', '轢'), + (0xF98E, 'M', '年'), + (0xF98F, 'M', '憐'), + (0xF990, 'M', '戀'), + (0xF991, 'M', '撚'), + (0xF992, 'M', '漣'), + (0xF993, 'M', '煉'), + (0xF994, 'M', '璉'), + (0xF995, 'M', '秊'), + (0xF996, 'M', '練'), + (0xF997, 'M', '聯'), + (0xF998, 'M', '輦'), + (0xF999, 'M', '蓮'), + (0xF99A, 'M', '連'), + (0xF99B, 'M', '鍊'), + (0xF99C, 'M', '列'), + (0xF99D, 'M', '劣'), + (0xF99E, 'M', '咽'), + (0xF99F, 'M', '烈'), + (0xF9A0, 'M', '裂'), + (0xF9A1, 'M', '說'), + (0xF9A2, 'M', '廉'), + (0xF9A3, 'M', '念'), + (0xF9A4, 'M', '捻'), + (0xF9A5, 'M', '殮'), + (0xF9A6, 'M', '簾'), + (0xF9A7, 'M', '獵'), + (0xF9A8, 'M', '令'), + (0xF9A9, 'M', '囹'), ] -def _seg_40(): +def _seg_41() -> List[Union[Tuple[int, str], Tuple[int, str, str]]]: return [ - (0xF97A, 'M', u'梁'), - (0xF97B, 'M', u'糧'), - (0xF97C, 'M', u'良'), - (0xF97D, 'M', u'諒'), - (0xF97E, 'M', u'量'), - (0xF97F, 'M', u'勵'), - (0xF980, 'M', u'呂'), - (0xF981, 'M', u'女'), - (0xF982, 'M', u'廬'), - (0xF983, 'M', u'旅'), - (0xF984, 'M', u'濾'), - (0xF985, 'M', u'礪'), - (0xF986, 'M', u'閭'), - (0xF987, 'M', u'驪'), - (0xF988, 'M', u'麗'), - (0xF989, 'M', u'黎'), - (0xF98A, 'M', u'力'), - (0xF98B, 'M', u'曆'), - (0xF98C, 'M', u'歷'), - (0xF98D, 'M', u'轢'), - (0xF98E, 'M', u'年'), - (0xF98F, 'M', u'憐'), - (0xF990, 'M', u'戀'), - (0xF991, 'M', u'撚'), - (0xF992, 'M', u'漣'), - (0xF993, 'M', u'煉'), - (0xF994, 'M', u'璉'), - (0xF995, 'M', u'秊'), - (0xF996, 'M', u'練'), - (0xF997, 'M', u'聯'), - (0xF998, 'M', u'輦'), - (0xF999, 'M', u'蓮'), - (0xF99A, 'M', u'連'), - (0xF99B, 'M', u'鍊'), - (0xF99C, 'M', u'列'), - (0xF99D, 'M', u'劣'), - (0xF99E, 'M', u'咽'), - (0xF99F, 'M', u'烈'), - (0xF9A0, 'M', u'裂'), - (0xF9A1, 'M', u'說'), - (0xF9A2, 'M', u'廉'), - (0xF9A3, 'M', u'念'), - (0xF9A4, 'M', u'捻'), - (0xF9A5, 'M', u'殮'), - (0xF9A6, 'M', u'簾'), - (0xF9A7, 'M', u'獵'), - (0xF9A8, 'M', u'令'), - (0xF9A9, 'M', u'囹'), - (0xF9AA, 'M', u'寧'), - (0xF9AB, 'M', u'嶺'), - (0xF9AC, 'M', u'怜'), - (0xF9AD, 'M', u'玲'), - (0xF9AE, 'M', u'瑩'), - (0xF9AF, 'M', u'羚'), - (0xF9B0, 'M', u'聆'), - (0xF9B1, 'M', u'鈴'), - (0xF9B2, 'M', u'零'), - (0xF9B3, 'M', u'靈'), - (0xF9B4, 'M', u'領'), - (0xF9B5, 'M', u'例'), - (0xF9B6, 'M', u'禮'), - (0xF9B7, 'M', u'醴'), - (0xF9B8, 'M', u'隸'), - (0xF9B9, 'M', u'惡'), - (0xF9BA, 'M', u'了'), - (0xF9BB, 'M', u'僚'), - (0xF9BC, 'M', u'寮'), - (0xF9BD, 'M', u'尿'), - (0xF9BE, 'M', u'料'), - (0xF9BF, 'M', u'樂'), - (0xF9C0, 'M', u'燎'), - (0xF9C1, 'M', u'療'), - (0xF9C2, 'M', u'蓼'), - (0xF9C3, 'M', u'遼'), - (0xF9C4, 'M', u'龍'), - (0xF9C5, 'M', u'暈'), - (0xF9C6, 'M', u'阮'), - (0xF9C7, 'M', u'劉'), - (0xF9C8, 'M', u'杻'), - (0xF9C9, 'M', u'柳'), - (0xF9CA, 'M', u'流'), - (0xF9CB, 'M', u'溜'), - (0xF9CC, 'M', u'琉'), - (0xF9CD, 'M', u'留'), - (0xF9CE, 'M', u'硫'), - (0xF9CF, 'M', u'紐'), - (0xF9D0, 'M', u'類'), - (0xF9D1, 'M', u'六'), - (0xF9D2, 'M', u'戮'), - (0xF9D3, 'M', u'陸'), - (0xF9D4, 'M', u'倫'), - (0xF9D5, 'M', u'崙'), - (0xF9D6, 'M', u'淪'), - (0xF9D7, 'M', u'輪'), - (0xF9D8, 'M', u'律'), - (0xF9D9, 'M', u'慄'), - (0xF9DA, 'M', u'栗'), - (0xF9DB, 'M', u'率'), - (0xF9DC, 'M', u'隆'), - (0xF9DD, 'M', u'利'), + (0xF9AA, 'M', '寧'), + (0xF9AB, 'M', '嶺'), + (0xF9AC, 'M', '怜'), + (0xF9AD, 'M', '玲'), + (0xF9AE, 'M', '瑩'), + (0xF9AF, 'M', '羚'), + (0xF9B0, 'M', '聆'), + (0xF9B1, 'M', '鈴'), + (0xF9B2, 'M', '零'), + (0xF9B3, 'M', '靈'), + (0xF9B4, 'M', '領'), + (0xF9B5, 'M', '例'), + (0xF9B6, 'M', '禮'), + (0xF9B7, 'M', '醴'), + (0xF9B8, 'M', '隸'), + (0xF9B9, 'M', '惡'), + (0xF9BA, 'M', '了'), + (0xF9BB, 'M', '僚'), + (0xF9BC, 'M', '寮'), + (0xF9BD, 'M', '尿'), + (0xF9BE, 'M', '料'), + (0xF9BF, 'M', '樂'), + (0xF9C0, 'M', '燎'), + (0xF9C1, 'M', '療'), + (0xF9C2, 'M', '蓼'), + (0xF9C3, 'M', '遼'), + (0xF9C4, 'M', '龍'), + (0xF9C5, 'M', '暈'), + (0xF9C6, 'M', '阮'), + (0xF9C7, 'M', '劉'), + (0xF9C8, 'M', '杻'), + (0xF9C9, 'M', '柳'), + (0xF9CA, 'M', '流'), + (0xF9CB, 'M', '溜'), + (0xF9CC, 'M', '琉'), + (0xF9CD, 'M', '留'), + (0xF9CE, 'M', '硫'), + (0xF9CF, 'M', '紐'), + (0xF9D0, 'M', '類'), + (0xF9D1, 'M', '六'), + (0xF9D2, 'M', '戮'), + (0xF9D3, 'M', '陸'), + (0xF9D4, 'M', '倫'), + (0xF9D5, 'M', '崙'), + (0xF9D6, 'M', '淪'), + (0xF9D7, 'M', '輪'), + (0xF9D8, 'M', '律'), + (0xF9D9, 'M', '慄'), + (0xF9DA, 'M', '栗'), + (0xF9DB, 'M', '率'), + (0xF9DC, 'M', '隆'), + (0xF9DD, 'M', '利'), + (0xF9DE, 'M', '吏'), + (0xF9DF, 'M', '履'), + (0xF9E0, 'M', '易'), + (0xF9E1, 'M', '李'), + (0xF9E2, 'M', '梨'), + (0xF9E3, 'M', '泥'), + (0xF9E4, 'M', '理'), + (0xF9E5, 'M', '痢'), + (0xF9E6, 'M', '罹'), + (0xF9E7, 'M', '裏'), + (0xF9E8, 'M', '裡'), + (0xF9E9, 'M', '里'), + (0xF9EA, 'M', '離'), + (0xF9EB, 'M', '匿'), + (0xF9EC, 'M', '溺'), + (0xF9ED, 'M', '吝'), + (0xF9EE, 'M', '燐'), + (0xF9EF, 'M', '璘'), + (0xF9F0, 'M', '藺'), + (0xF9F1, 'M', '隣'), + (0xF9F2, 'M', '鱗'), + (0xF9F3, 'M', '麟'), + (0xF9F4, 'M', '林'), + (0xF9F5, 'M', '淋'), + (0xF9F6, 'M', '臨'), + (0xF9F7, 'M', '立'), + (0xF9F8, 'M', '笠'), + (0xF9F9, 'M', '粒'), + (0xF9FA, 'M', '狀'), + (0xF9FB, 'M', '炙'), + (0xF9FC, 'M', '識'), + (0xF9FD, 'M', '什'), + (0xF9FE, 'M', '茶'), + (0xF9FF, 'M', '刺'), + (0xFA00, 'M', '切'), + (0xFA01, 'M', '度'), + (0xFA02, 'M', '拓'), + (0xFA03, 'M', '糖'), + (0xFA04, 'M', '宅'), + (0xFA05, 'M', '洞'), + (0xFA06, 'M', '暴'), + (0xFA07, 'M', '輻'), + (0xFA08, 'M', '行'), + (0xFA09, 'M', '降'), + (0xFA0A, 'M', '見'), + (0xFA0B, 'M', '廓'), + (0xFA0C, 'M', '兀'), + (0xFA0D, 'M', '嗀'), ] -def _seg_41(): +def _seg_42() -> List[Union[Tuple[int, str], Tuple[int, str, str]]]: return [ - (0xF9DE, 'M', u'吏'), - (0xF9DF, 'M', u'履'), - (0xF9E0, 'M', u'易'), - (0xF9E1, 'M', u'李'), - (0xF9E2, 'M', u'梨'), - (0xF9E3, 'M', u'泥'), - (0xF9E4, 'M', u'理'), - (0xF9E5, 'M', u'痢'), - (0xF9E6, 'M', u'罹'), - (0xF9E7, 'M', u'裏'), - (0xF9E8, 'M', u'裡'), - (0xF9E9, 'M', u'里'), - (0xF9EA, 'M', u'離'), - (0xF9EB, 'M', u'匿'), - (0xF9EC, 'M', u'溺'), - (0xF9ED, 'M', u'吝'), - (0xF9EE, 'M', u'燐'), - (0xF9EF, 'M', u'璘'), - (0xF9F0, 'M', u'藺'), - (0xF9F1, 'M', u'隣'), - (0xF9F2, 'M', u'鱗'), - (0xF9F3, 'M', u'麟'), - (0xF9F4, 'M', u'林'), - (0xF9F5, 'M', u'淋'), - (0xF9F6, 'M', u'臨'), - (0xF9F7, 'M', u'立'), - (0xF9F8, 'M', u'笠'), - (0xF9F9, 'M', u'粒'), - (0xF9FA, 'M', u'狀'), - (0xF9FB, 'M', u'炙'), - (0xF9FC, 'M', u'識'), - (0xF9FD, 'M', u'什'), - (0xF9FE, 'M', u'茶'), - (0xF9FF, 'M', u'刺'), - (0xFA00, 'M', u'切'), - (0xFA01, 'M', u'度'), - (0xFA02, 'M', u'拓'), - (0xFA03, 'M', u'糖'), - (0xFA04, 'M', u'宅'), - (0xFA05, 'M', u'洞'), - (0xFA06, 'M', u'暴'), - (0xFA07, 'M', u'輻'), - (0xFA08, 'M', u'行'), - (0xFA09, 'M', u'降'), - (0xFA0A, 'M', u'見'), - (0xFA0B, 'M', u'廓'), - (0xFA0C, 'M', u'兀'), - (0xFA0D, 'M', u'嗀'), (0xFA0E, 'V'), - (0xFA10, 'M', u'塚'), + (0xFA10, 'M', '塚'), (0xFA11, 'V'), - (0xFA12, 'M', u'晴'), + (0xFA12, 'M', '晴'), (0xFA13, 'V'), - (0xFA15, 'M', u'凞'), - (0xFA16, 'M', u'猪'), - (0xFA17, 'M', u'益'), - (0xFA18, 'M', u'礼'), - (0xFA19, 'M', u'神'), - (0xFA1A, 'M', u'祥'), - (0xFA1B, 'M', u'福'), - (0xFA1C, 'M', u'靖'), - (0xFA1D, 'M', u'精'), - (0xFA1E, 'M', u'羽'), + (0xFA15, 'M', '凞'), + (0xFA16, 'M', '猪'), + (0xFA17, 'M', '益'), + (0xFA18, 'M', '礼'), + (0xFA19, 'M', '神'), + (0xFA1A, 'M', '祥'), + (0xFA1B, 'M', '福'), + (0xFA1C, 'M', '靖'), + (0xFA1D, 'M', '精'), + (0xFA1E, 'M', '羽'), (0xFA1F, 'V'), - (0xFA20, 'M', u'蘒'), + (0xFA20, 'M', '蘒'), (0xFA21, 'V'), - (0xFA22, 'M', u'諸'), + (0xFA22, 'M', '諸'), (0xFA23, 'V'), - (0xFA25, 'M', u'逸'), - (0xFA26, 'M', u'都'), + (0xFA25, 'M', '逸'), + (0xFA26, 'M', '都'), (0xFA27, 'V'), - (0xFA2A, 'M', u'飯'), - (0xFA2B, 'M', u'飼'), - (0xFA2C, 'M', u'館'), - (0xFA2D, 'M', u'鶴'), - (0xFA2E, 'M', u'郞'), - (0xFA2F, 'M', u'隷'), - (0xFA30, 'M', u'侮'), - (0xFA31, 'M', u'僧'), - (0xFA32, 'M', u'免'), - (0xFA33, 'M', u'勉'), - (0xFA34, 'M', u'勤'), - (0xFA35, 'M', u'卑'), - (0xFA36, 'M', u'喝'), - (0xFA37, 'M', u'嘆'), - (0xFA38, 'M', u'器'), - (0xFA39, 'M', u'塀'), - (0xFA3A, 'M', u'墨'), - (0xFA3B, 'M', u'層'), - (0xFA3C, 'M', u'屮'), - (0xFA3D, 'M', u'悔'), - (0xFA3E, 'M', u'慨'), - (0xFA3F, 'M', u'憎'), - (0xFA40, 'M', u'懲'), - (0xFA41, 'M', u'敏'), - (0xFA42, 'M', u'既'), - (0xFA43, 'M', u'暑'), - (0xFA44, 'M', u'梅'), - (0xFA45, 'M', u'海'), - (0xFA46, 'M', u'渚'), + (0xFA2A, 'M', '飯'), + (0xFA2B, 'M', '飼'), + (0xFA2C, 'M', '館'), + (0xFA2D, 'M', '鶴'), + (0xFA2E, 'M', '郞'), + (0xFA2F, 'M', '隷'), + (0xFA30, 'M', '侮'), + (0xFA31, 'M', '僧'), + (0xFA32, 'M', '免'), + (0xFA33, 'M', '勉'), + (0xFA34, 'M', '勤'), + (0xFA35, 'M', '卑'), + (0xFA36, 'M', '喝'), + (0xFA37, 'M', '嘆'), + (0xFA38, 'M', '器'), + (0xFA39, 'M', '塀'), + (0xFA3A, 'M', '墨'), + (0xFA3B, 'M', '層'), + (0xFA3C, 'M', '屮'), + (0xFA3D, 'M', '悔'), + (0xFA3E, 'M', '慨'), + (0xFA3F, 'M', '憎'), + (0xFA40, 'M', '懲'), + (0xFA41, 'M', '敏'), + (0xFA42, 'M', '既'), + (0xFA43, 'M', '暑'), + (0xFA44, 'M', '梅'), + (0xFA45, 'M', '海'), + (0xFA46, 'M', '渚'), + (0xFA47, 'M', '漢'), + (0xFA48, 'M', '煮'), + (0xFA49, 'M', '爫'), + (0xFA4A, 'M', '琢'), + (0xFA4B, 'M', '碑'), + (0xFA4C, 'M', '社'), + (0xFA4D, 'M', '祉'), + (0xFA4E, 'M', '祈'), + (0xFA4F, 'M', '祐'), + (0xFA50, 'M', '祖'), + (0xFA51, 'M', '祝'), + (0xFA52, 'M', '禍'), + (0xFA53, 'M', '禎'), + (0xFA54, 'M', '穀'), + (0xFA55, 'M', '突'), + (0xFA56, 'M', '節'), + (0xFA57, 'M', '練'), + (0xFA58, 'M', '縉'), + (0xFA59, 'M', '繁'), + (0xFA5A, 'M', '署'), + (0xFA5B, 'M', '者'), + (0xFA5C, 'M', '臭'), + (0xFA5D, 'M', '艹'), + (0xFA5F, 'M', '著'), + (0xFA60, 'M', '褐'), + (0xFA61, 'M', '視'), + (0xFA62, 'M', '謁'), + (0xFA63, 'M', '謹'), + (0xFA64, 'M', '賓'), + (0xFA65, 'M', '贈'), + (0xFA66, 'M', '辶'), + (0xFA67, 'M', '逸'), + (0xFA68, 'M', '難'), + (0xFA69, 'M', '響'), + (0xFA6A, 'M', '頻'), + (0xFA6B, 'M', '恵'), + (0xFA6C, 'M', '𤋮'), + (0xFA6D, 'M', '舘'), + (0xFA6E, 'X'), + (0xFA70, 'M', '並'), + (0xFA71, 'M', '况'), + (0xFA72, 'M', '全'), + (0xFA73, 'M', '侀'), + (0xFA74, 'M', '充'), + (0xFA75, 'M', '冀'), + (0xFA76, 'M', '勇'), + (0xFA77, 'M', '勺'), + (0xFA78, 'M', '喝'), ] -def _seg_42(): +def _seg_43() -> List[Union[Tuple[int, str], Tuple[int, str, str]]]: return [ - (0xFA47, 'M', u'漢'), - (0xFA48, 'M', u'煮'), - (0xFA49, 'M', u'爫'), - (0xFA4A, 'M', u'琢'), - (0xFA4B, 'M', u'碑'), - (0xFA4C, 'M', u'社'), - (0xFA4D, 'M', u'祉'), - (0xFA4E, 'M', u'祈'), - (0xFA4F, 'M', u'祐'), - (0xFA50, 'M', u'祖'), - (0xFA51, 'M', u'祝'), - (0xFA52, 'M', u'禍'), - (0xFA53, 'M', u'禎'), - (0xFA54, 'M', u'穀'), - (0xFA55, 'M', u'突'), - (0xFA56, 'M', u'節'), - (0xFA57, 'M', u'練'), - (0xFA58, 'M', u'縉'), - (0xFA59, 'M', u'繁'), - (0xFA5A, 'M', u'署'), - (0xFA5B, 'M', u'者'), - (0xFA5C, 'M', u'臭'), - (0xFA5D, 'M', u'艹'), - (0xFA5F, 'M', u'著'), - (0xFA60, 'M', u'褐'), - (0xFA61, 'M', u'視'), - (0xFA62, 'M', u'謁'), - (0xFA63, 'M', u'謹'), - (0xFA64, 'M', u'賓'), - (0xFA65, 'M', u'贈'), - (0xFA66, 'M', u'辶'), - (0xFA67, 'M', u'逸'), - (0xFA68, 'M', u'難'), - (0xFA69, 'M', u'響'), - (0xFA6A, 'M', u'頻'), - (0xFA6B, 'M', u'恵'), - (0xFA6C, 'M', u'𤋮'), - (0xFA6D, 'M', u'舘'), - (0xFA6E, 'X'), - (0xFA70, 'M', u'並'), - (0xFA71, 'M', u'况'), - (0xFA72, 'M', u'全'), - (0xFA73, 'M', u'侀'), - (0xFA74, 'M', u'充'), - (0xFA75, 'M', u'冀'), - (0xFA76, 'M', u'勇'), - (0xFA77, 'M', u'勺'), - (0xFA78, 'M', u'喝'), - (0xFA79, 'M', u'啕'), - (0xFA7A, 'M', u'喙'), - (0xFA7B, 'M', u'嗢'), - (0xFA7C, 'M', u'塚'), - (0xFA7D, 'M', u'墳'), - (0xFA7E, 'M', u'奄'), - (0xFA7F, 'M', u'奔'), - (0xFA80, 'M', u'婢'), - (0xFA81, 'M', u'嬨'), - (0xFA82, 'M', u'廒'), - (0xFA83, 'M', u'廙'), - (0xFA84, 'M', u'彩'), - (0xFA85, 'M', u'徭'), - (0xFA86, 'M', u'惘'), - (0xFA87, 'M', u'慎'), - (0xFA88, 'M', u'愈'), - (0xFA89, 'M', u'憎'), - (0xFA8A, 'M', u'慠'), - (0xFA8B, 'M', u'懲'), - (0xFA8C, 'M', u'戴'), - (0xFA8D, 'M', u'揄'), - (0xFA8E, 'M', u'搜'), - (0xFA8F, 'M', u'摒'), - (0xFA90, 'M', u'敖'), - (0xFA91, 'M', u'晴'), - (0xFA92, 'M', u'朗'), - (0xFA93, 'M', u'望'), - (0xFA94, 'M', u'杖'), - (0xFA95, 'M', u'歹'), - (0xFA96, 'M', u'殺'), - (0xFA97, 'M', u'流'), - (0xFA98, 'M', u'滛'), - (0xFA99, 'M', u'滋'), - (0xFA9A, 'M', u'漢'), - (0xFA9B, 'M', u'瀞'), - (0xFA9C, 'M', u'煮'), - (0xFA9D, 'M', u'瞧'), - (0xFA9E, 'M', u'爵'), - (0xFA9F, 'M', u'犯'), - (0xFAA0, 'M', u'猪'), - (0xFAA1, 'M', u'瑱'), - (0xFAA2, 'M', u'甆'), - (0xFAA3, 'M', u'画'), - (0xFAA4, 'M', u'瘝'), - (0xFAA5, 'M', u'瘟'), - (0xFAA6, 'M', u'益'), - (0xFAA7, 'M', u'盛'), - (0xFAA8, 'M', u'直'), - (0xFAA9, 'M', u'睊'), - (0xFAAA, 'M', u'着'), - (0xFAAB, 'M', u'磌'), - (0xFAAC, 'M', u'窱'), + (0xFA79, 'M', '啕'), + (0xFA7A, 'M', '喙'), + (0xFA7B, 'M', '嗢'), + (0xFA7C, 'M', '塚'), + (0xFA7D, 'M', '墳'), + (0xFA7E, 'M', '奄'), + (0xFA7F, 'M', '奔'), + (0xFA80, 'M', '婢'), + (0xFA81, 'M', '嬨'), + (0xFA82, 'M', '廒'), + (0xFA83, 'M', '廙'), + (0xFA84, 'M', '彩'), + (0xFA85, 'M', '徭'), + (0xFA86, 'M', '惘'), + (0xFA87, 'M', '慎'), + (0xFA88, 'M', '愈'), + (0xFA89, 'M', '憎'), + (0xFA8A, 'M', '慠'), + (0xFA8B, 'M', '懲'), + (0xFA8C, 'M', '戴'), + (0xFA8D, 'M', '揄'), + (0xFA8E, 'M', '搜'), + (0xFA8F, 'M', '摒'), + (0xFA90, 'M', '敖'), + (0xFA91, 'M', '晴'), + (0xFA92, 'M', '朗'), + (0xFA93, 'M', '望'), + (0xFA94, 'M', '杖'), + (0xFA95, 'M', '歹'), + (0xFA96, 'M', '殺'), + (0xFA97, 'M', '流'), + (0xFA98, 'M', '滛'), + (0xFA99, 'M', '滋'), + (0xFA9A, 'M', '漢'), + (0xFA9B, 'M', '瀞'), + (0xFA9C, 'M', '煮'), + (0xFA9D, 'M', '瞧'), + (0xFA9E, 'M', '爵'), + (0xFA9F, 'M', '犯'), + (0xFAA0, 'M', '猪'), + (0xFAA1, 'M', '瑱'), + (0xFAA2, 'M', '甆'), + (0xFAA3, 'M', '画'), + (0xFAA4, 'M', '瘝'), + (0xFAA5, 'M', '瘟'), + (0xFAA6, 'M', '益'), + (0xFAA7, 'M', '盛'), + (0xFAA8, 'M', '直'), + (0xFAA9, 'M', '睊'), + (0xFAAA, 'M', '着'), + (0xFAAB, 'M', '磌'), + (0xFAAC, 'M', '窱'), + (0xFAAD, 'M', '節'), + (0xFAAE, 'M', '类'), + (0xFAAF, 'M', '絛'), + (0xFAB0, 'M', '練'), + (0xFAB1, 'M', '缾'), + (0xFAB2, 'M', '者'), + (0xFAB3, 'M', '荒'), + (0xFAB4, 'M', '華'), + (0xFAB5, 'M', '蝹'), + (0xFAB6, 'M', '襁'), + (0xFAB7, 'M', '覆'), + (0xFAB8, 'M', '視'), + (0xFAB9, 'M', '調'), + (0xFABA, 'M', '諸'), + (0xFABB, 'M', '請'), + (0xFABC, 'M', '謁'), + (0xFABD, 'M', '諾'), + (0xFABE, 'M', '諭'), + (0xFABF, 'M', '謹'), + (0xFAC0, 'M', '變'), + (0xFAC1, 'M', '贈'), + (0xFAC2, 'M', '輸'), + (0xFAC3, 'M', '遲'), + (0xFAC4, 'M', '醙'), + (0xFAC5, 'M', '鉶'), + (0xFAC6, 'M', '陼'), + (0xFAC7, 'M', '難'), + (0xFAC8, 'M', '靖'), + (0xFAC9, 'M', '韛'), + (0xFACA, 'M', '響'), + (0xFACB, 'M', '頋'), + (0xFACC, 'M', '頻'), + (0xFACD, 'M', '鬒'), + (0xFACE, 'M', '龜'), + (0xFACF, 'M', '𢡊'), + (0xFAD0, 'M', '𢡄'), + (0xFAD1, 'M', '𣏕'), + (0xFAD2, 'M', '㮝'), + (0xFAD3, 'M', '䀘'), + (0xFAD4, 'M', '䀹'), + (0xFAD5, 'M', '𥉉'), + (0xFAD6, 'M', '𥳐'), + (0xFAD7, 'M', '𧻓'), + (0xFAD8, 'M', '齃'), + (0xFAD9, 'M', '龎'), + (0xFADA, 'X'), + (0xFB00, 'M', 'ff'), + (0xFB01, 'M', 'fi'), ] -def _seg_43(): +def _seg_44() -> List[Union[Tuple[int, str], Tuple[int, str, str]]]: return [ - (0xFAAD, 'M', u'節'), - (0xFAAE, 'M', u'类'), - (0xFAAF, 'M', u'絛'), - (0xFAB0, 'M', u'練'), - (0xFAB1, 'M', u'缾'), - (0xFAB2, 'M', u'者'), - (0xFAB3, 'M', u'荒'), - (0xFAB4, 'M', u'華'), - (0xFAB5, 'M', u'蝹'), - (0xFAB6, 'M', u'襁'), - (0xFAB7, 'M', u'覆'), - (0xFAB8, 'M', u'視'), - (0xFAB9, 'M', u'調'), - (0xFABA, 'M', u'諸'), - (0xFABB, 'M', u'請'), - (0xFABC, 'M', u'謁'), - (0xFABD, 'M', u'諾'), - (0xFABE, 'M', u'諭'), - (0xFABF, 'M', u'謹'), - (0xFAC0, 'M', u'變'), - (0xFAC1, 'M', u'贈'), - (0xFAC2, 'M', u'輸'), - (0xFAC3, 'M', u'遲'), - (0xFAC4, 'M', u'醙'), - (0xFAC5, 'M', u'鉶'), - (0xFAC6, 'M', u'陼'), - (0xFAC7, 'M', u'難'), - (0xFAC8, 'M', u'靖'), - (0xFAC9, 'M', u'韛'), - (0xFACA, 'M', u'響'), - (0xFACB, 'M', u'頋'), - (0xFACC, 'M', u'頻'), - (0xFACD, 'M', u'鬒'), - (0xFACE, 'M', u'龜'), - (0xFACF, 'M', u'𢡊'), - (0xFAD0, 'M', u'𢡄'), - (0xFAD1, 'M', u'𣏕'), - (0xFAD2, 'M', u'㮝'), - (0xFAD3, 'M', u'䀘'), - (0xFAD4, 'M', u'䀹'), - (0xFAD5, 'M', u'𥉉'), - (0xFAD6, 'M', u'𥳐'), - (0xFAD7, 'M', u'𧻓'), - (0xFAD8, 'M', u'齃'), - (0xFAD9, 'M', u'龎'), - (0xFADA, 'X'), - (0xFB00, 'M', u'ff'), - (0xFB01, 'M', u'fi'), - (0xFB02, 'M', u'fl'), - (0xFB03, 'M', u'ffi'), - (0xFB04, 'M', u'ffl'), - (0xFB05, 'M', u'st'), + (0xFB02, 'M', 'fl'), + (0xFB03, 'M', 'ffi'), + (0xFB04, 'M', 'ffl'), + (0xFB05, 'M', 'st'), (0xFB07, 'X'), - (0xFB13, 'M', u'մն'), - (0xFB14, 'M', u'մե'), - (0xFB15, 'M', u'մի'), - (0xFB16, 'M', u'վն'), - (0xFB17, 'M', u'մխ'), + (0xFB13, 'M', 'մն'), + (0xFB14, 'M', 'մե'), + (0xFB15, 'M', 'մի'), + (0xFB16, 'M', 'վն'), + (0xFB17, 'M', 'մխ'), (0xFB18, 'X'), - (0xFB1D, 'M', u'יִ'), + (0xFB1D, 'M', 'יִ'), (0xFB1E, 'V'), - (0xFB1F, 'M', u'ײַ'), - (0xFB20, 'M', u'ע'), - (0xFB21, 'M', u'א'), - (0xFB22, 'M', u'ד'), - (0xFB23, 'M', u'ה'), - (0xFB24, 'M', u'כ'), - (0xFB25, 'M', u'ל'), - (0xFB26, 'M', u'ם'), - (0xFB27, 'M', u'ר'), - (0xFB28, 'M', u'ת'), - (0xFB29, '3', u'+'), - (0xFB2A, 'M', u'שׁ'), - (0xFB2B, 'M', u'שׂ'), - (0xFB2C, 'M', u'שּׁ'), - (0xFB2D, 'M', u'שּׂ'), - (0xFB2E, 'M', u'אַ'), - (0xFB2F, 'M', u'אָ'), - (0xFB30, 'M', u'אּ'), - (0xFB31, 'M', u'בּ'), - (0xFB32, 'M', u'גּ'), - (0xFB33, 'M', u'דּ'), - (0xFB34, 'M', u'הּ'), - (0xFB35, 'M', u'וּ'), - (0xFB36, 'M', u'זּ'), + (0xFB1F, 'M', 'ײַ'), + (0xFB20, 'M', 'ע'), + (0xFB21, 'M', 'א'), + (0xFB22, 'M', 'ד'), + (0xFB23, 'M', 'ה'), + (0xFB24, 'M', 'כ'), + (0xFB25, 'M', 'ל'), + (0xFB26, 'M', 'ם'), + (0xFB27, 'M', 'ר'), + (0xFB28, 'M', 'ת'), + (0xFB29, '3', '+'), + (0xFB2A, 'M', 'שׁ'), + (0xFB2B, 'M', 'שׂ'), + (0xFB2C, 'M', 'שּׁ'), + (0xFB2D, 'M', 'שּׂ'), + (0xFB2E, 'M', 'אַ'), + (0xFB2F, 'M', 'אָ'), + (0xFB30, 'M', 'אּ'), + (0xFB31, 'M', 'בּ'), + (0xFB32, 'M', 'גּ'), + (0xFB33, 'M', 'דּ'), + (0xFB34, 'M', 'הּ'), + (0xFB35, 'M', 'וּ'), + (0xFB36, 'M', 'זּ'), (0xFB37, 'X'), - (0xFB38, 'M', u'טּ'), - (0xFB39, 'M', u'יּ'), - (0xFB3A, 'M', u'ךּ'), - (0xFB3B, 'M', u'כּ'), - (0xFB3C, 'M', u'לּ'), + (0xFB38, 'M', 'טּ'), + (0xFB39, 'M', 'יּ'), + (0xFB3A, 'M', 'ךּ'), + (0xFB3B, 'M', 'כּ'), + (0xFB3C, 'M', 'לּ'), (0xFB3D, 'X'), - (0xFB3E, 'M', u'מּ'), + (0xFB3E, 'M', 'מּ'), (0xFB3F, 'X'), - (0xFB40, 'M', u'נּ'), - (0xFB41, 'M', u'סּ'), + (0xFB40, 'M', 'נּ'), + (0xFB41, 'M', 'סּ'), (0xFB42, 'X'), - (0xFB43, 'M', u'ףּ'), - (0xFB44, 'M', u'פּ'), + (0xFB43, 'M', 'ףּ'), + (0xFB44, 'M', 'פּ'), (0xFB45, 'X'), + (0xFB46, 'M', 'צּ'), + (0xFB47, 'M', 'קּ'), + (0xFB48, 'M', 'רּ'), + (0xFB49, 'M', 'שּ'), + (0xFB4A, 'M', 'תּ'), + (0xFB4B, 'M', 'וֹ'), + (0xFB4C, 'M', 'בֿ'), + (0xFB4D, 'M', 'כֿ'), + (0xFB4E, 'M', 'פֿ'), + (0xFB4F, 'M', 'אל'), + (0xFB50, 'M', 'ٱ'), + (0xFB52, 'M', 'ٻ'), + (0xFB56, 'M', 'پ'), + (0xFB5A, 'M', 'ڀ'), + (0xFB5E, 'M', 'ٺ'), + (0xFB62, 'M', 'ٿ'), + (0xFB66, 'M', 'ٹ'), + (0xFB6A, 'M', 'ڤ'), + (0xFB6E, 'M', 'ڦ'), + (0xFB72, 'M', 'ڄ'), + (0xFB76, 'M', 'ڃ'), + (0xFB7A, 'M', 'چ'), + (0xFB7E, 'M', 'ڇ'), + (0xFB82, 'M', 'ڍ'), + (0xFB84, 'M', 'ڌ'), + (0xFB86, 'M', 'ڎ'), + (0xFB88, 'M', 'ڈ'), + (0xFB8A, 'M', 'ژ'), + (0xFB8C, 'M', 'ڑ'), + (0xFB8E, 'M', 'ک'), + (0xFB92, 'M', 'گ'), + (0xFB96, 'M', 'ڳ'), + (0xFB9A, 'M', 'ڱ'), + (0xFB9E, 'M', 'ں'), + (0xFBA0, 'M', 'ڻ'), + (0xFBA4, 'M', 'ۀ'), + (0xFBA6, 'M', 'ہ'), + (0xFBAA, 'M', 'ھ'), + (0xFBAE, 'M', 'ے'), + (0xFBB0, 'M', 'ۓ'), + (0xFBB2, 'V'), + (0xFBC3, 'X'), + (0xFBD3, 'M', 'ڭ'), + (0xFBD7, 'M', 'ۇ'), + (0xFBD9, 'M', 'ۆ'), + (0xFBDB, 'M', 'ۈ'), + (0xFBDD, 'M', 'ۇٴ'), + (0xFBDE, 'M', 'ۋ'), ] -def _seg_44(): +def _seg_45() -> List[Union[Tuple[int, str], Tuple[int, str, str]]]: return [ - (0xFB46, 'M', u'צּ'), - (0xFB47, 'M', u'קּ'), - (0xFB48, 'M', u'רּ'), - (0xFB49, 'M', u'שּ'), - (0xFB4A, 'M', u'תּ'), - (0xFB4B, 'M', u'וֹ'), - (0xFB4C, 'M', u'בֿ'), - (0xFB4D, 'M', u'כֿ'), - (0xFB4E, 'M', u'פֿ'), - (0xFB4F, 'M', u'אל'), - (0xFB50, 'M', u'ٱ'), - (0xFB52, 'M', u'ٻ'), - (0xFB56, 'M', u'پ'), - (0xFB5A, 'M', u'ڀ'), - (0xFB5E, 'M', u'ٺ'), - (0xFB62, 'M', u'ٿ'), - (0xFB66, 'M', u'ٹ'), - (0xFB6A, 'M', u'ڤ'), - (0xFB6E, 'M', u'ڦ'), - (0xFB72, 'M', u'ڄ'), - (0xFB76, 'M', u'ڃ'), - (0xFB7A, 'M', u'چ'), - (0xFB7E, 'M', u'ڇ'), - (0xFB82, 'M', u'ڍ'), - (0xFB84, 'M', u'ڌ'), - (0xFB86, 'M', u'ڎ'), - (0xFB88, 'M', u'ڈ'), - (0xFB8A, 'M', u'ژ'), - (0xFB8C, 'M', u'ڑ'), - (0xFB8E, 'M', u'ک'), - (0xFB92, 'M', u'گ'), - (0xFB96, 'M', u'ڳ'), - (0xFB9A, 'M', u'ڱ'), - (0xFB9E, 'M', u'ں'), - (0xFBA0, 'M', u'ڻ'), - (0xFBA4, 'M', u'ۀ'), - (0xFBA6, 'M', u'ہ'), - (0xFBAA, 'M', u'ھ'), - (0xFBAE, 'M', u'ے'), - (0xFBB0, 'M', u'ۓ'), - (0xFBB2, 'V'), - (0xFBC2, 'X'), - (0xFBD3, 'M', u'ڭ'), - (0xFBD7, 'M', u'ۇ'), - (0xFBD9, 'M', u'ۆ'), - (0xFBDB, 'M', u'ۈ'), - (0xFBDD, 'M', u'ۇٴ'), - (0xFBDE, 'M', u'ۋ'), - (0xFBE0, 'M', u'ۅ'), - (0xFBE2, 'M', u'ۉ'), - (0xFBE4, 'M', u'ې'), - (0xFBE8, 'M', u'ى'), - (0xFBEA, 'M', u'ئا'), - (0xFBEC, 'M', u'ئە'), - (0xFBEE, 'M', u'ئو'), - (0xFBF0, 'M', u'ئۇ'), - (0xFBF2, 'M', u'ئۆ'), - (0xFBF4, 'M', u'ئۈ'), - (0xFBF6, 'M', u'ئې'), - (0xFBF9, 'M', u'ئى'), - (0xFBFC, 'M', u'ی'), - (0xFC00, 'M', u'ئج'), - (0xFC01, 'M', u'ئح'), - (0xFC02, 'M', u'ئم'), - (0xFC03, 'M', u'ئى'), - (0xFC04, 'M', u'ئي'), - (0xFC05, 'M', u'بج'), - (0xFC06, 'M', u'بح'), - (0xFC07, 'M', u'بخ'), - (0xFC08, 'M', u'بم'), - (0xFC09, 'M', u'بى'), - (0xFC0A, 'M', u'بي'), - (0xFC0B, 'M', u'تج'), - (0xFC0C, 'M', u'تح'), - (0xFC0D, 'M', u'تخ'), - (0xFC0E, 'M', u'تم'), - (0xFC0F, 'M', u'تى'), - (0xFC10, 'M', u'تي'), - (0xFC11, 'M', u'ثج'), - (0xFC12, 'M', u'ثم'), - (0xFC13, 'M', u'ثى'), - (0xFC14, 'M', u'ثي'), - (0xFC15, 'M', u'جح'), - (0xFC16, 'M', u'جم'), - (0xFC17, 'M', u'حج'), - (0xFC18, 'M', u'حم'), - (0xFC19, 'M', u'خج'), - (0xFC1A, 'M', u'خح'), - (0xFC1B, 'M', u'خم'), - (0xFC1C, 'M', u'سج'), - (0xFC1D, 'M', u'سح'), - (0xFC1E, 'M', u'سخ'), - (0xFC1F, 'M', u'سم'), - (0xFC20, 'M', u'صح'), - (0xFC21, 'M', u'صم'), - (0xFC22, 'M', u'ضج'), - (0xFC23, 'M', u'ضح'), - (0xFC24, 'M', u'ضخ'), - (0xFC25, 'M', u'ضم'), - (0xFC26, 'M', u'طح'), + (0xFBE0, 'M', 'ۅ'), + (0xFBE2, 'M', 'ۉ'), + (0xFBE4, 'M', 'ې'), + (0xFBE8, 'M', 'ى'), + (0xFBEA, 'M', 'ئا'), + (0xFBEC, 'M', 'ئە'), + (0xFBEE, 'M', 'ئو'), + (0xFBF0, 'M', 'ئۇ'), + (0xFBF2, 'M', 'ئۆ'), + (0xFBF4, 'M', 'ئۈ'), + (0xFBF6, 'M', 'ئې'), + (0xFBF9, 'M', 'ئى'), + (0xFBFC, 'M', 'ی'), + (0xFC00, 'M', 'ئج'), + (0xFC01, 'M', 'ئح'), + (0xFC02, 'M', 'ئم'), + (0xFC03, 'M', 'ئى'), + (0xFC04, 'M', 'ئي'), + (0xFC05, 'M', 'بج'), + (0xFC06, 'M', 'بح'), + (0xFC07, 'M', 'بخ'), + (0xFC08, 'M', 'بم'), + (0xFC09, 'M', 'بى'), + (0xFC0A, 'M', 'بي'), + (0xFC0B, 'M', 'تج'), + (0xFC0C, 'M', 'تح'), + (0xFC0D, 'M', 'تخ'), + (0xFC0E, 'M', 'تم'), + (0xFC0F, 'M', 'تى'), + (0xFC10, 'M', 'تي'), + (0xFC11, 'M', 'ثج'), + (0xFC12, 'M', 'ثم'), + (0xFC13, 'M', 'ثى'), + (0xFC14, 'M', 'ثي'), + (0xFC15, 'M', 'جح'), + (0xFC16, 'M', 'جم'), + (0xFC17, 'M', 'حج'), + (0xFC18, 'M', 'حم'), + (0xFC19, 'M', 'خج'), + (0xFC1A, 'M', 'خح'), + (0xFC1B, 'M', 'خم'), + (0xFC1C, 'M', 'سج'), + (0xFC1D, 'M', 'سح'), + (0xFC1E, 'M', 'سخ'), + (0xFC1F, 'M', 'سم'), + (0xFC20, 'M', 'صح'), + (0xFC21, 'M', 'صم'), + (0xFC22, 'M', 'ضج'), + (0xFC23, 'M', 'ضح'), + (0xFC24, 'M', 'ضخ'), + (0xFC25, 'M', 'ضم'), + (0xFC26, 'M', 'طح'), + (0xFC27, 'M', 'طم'), + (0xFC28, 'M', 'ظم'), + (0xFC29, 'M', 'عج'), + (0xFC2A, 'M', 'عم'), + (0xFC2B, 'M', 'غج'), + (0xFC2C, 'M', 'غم'), + (0xFC2D, 'M', 'فج'), + (0xFC2E, 'M', 'فح'), + (0xFC2F, 'M', 'فخ'), + (0xFC30, 'M', 'فم'), + (0xFC31, 'M', 'فى'), + (0xFC32, 'M', 'في'), + (0xFC33, 'M', 'قح'), + (0xFC34, 'M', 'قم'), + (0xFC35, 'M', 'قى'), + (0xFC36, 'M', 'قي'), + (0xFC37, 'M', 'كا'), + (0xFC38, 'M', 'كج'), + (0xFC39, 'M', 'كح'), + (0xFC3A, 'M', 'كخ'), + (0xFC3B, 'M', 'كل'), + (0xFC3C, 'M', 'كم'), + (0xFC3D, 'M', 'كى'), + (0xFC3E, 'M', 'كي'), + (0xFC3F, 'M', 'لج'), + (0xFC40, 'M', 'لح'), + (0xFC41, 'M', 'لخ'), + (0xFC42, 'M', 'لم'), + (0xFC43, 'M', 'لى'), + (0xFC44, 'M', 'لي'), + (0xFC45, 'M', 'مج'), + (0xFC46, 'M', 'مح'), + (0xFC47, 'M', 'مخ'), + (0xFC48, 'M', 'مم'), + (0xFC49, 'M', 'مى'), + (0xFC4A, 'M', 'مي'), + (0xFC4B, 'M', 'نج'), + (0xFC4C, 'M', 'نح'), + (0xFC4D, 'M', 'نخ'), + (0xFC4E, 'M', 'نم'), + (0xFC4F, 'M', 'نى'), + (0xFC50, 'M', 'ني'), + (0xFC51, 'M', 'هج'), + (0xFC52, 'M', 'هم'), + (0xFC53, 'M', 'هى'), + (0xFC54, 'M', 'هي'), + (0xFC55, 'M', 'يج'), + (0xFC56, 'M', 'يح'), ] -def _seg_45(): +def _seg_46() -> List[Union[Tuple[int, str], Tuple[int, str, str]]]: return [ - (0xFC27, 'M', u'طم'), - (0xFC28, 'M', u'ظم'), - (0xFC29, 'M', u'عج'), - (0xFC2A, 'M', u'عم'), - (0xFC2B, 'M', u'غج'), - (0xFC2C, 'M', u'غم'), - (0xFC2D, 'M', u'فج'), - (0xFC2E, 'M', u'فح'), - (0xFC2F, 'M', u'فخ'), - (0xFC30, 'M', u'فم'), - (0xFC31, 'M', u'فى'), - (0xFC32, 'M', u'في'), - (0xFC33, 'M', u'قح'), - (0xFC34, 'M', u'قم'), - (0xFC35, 'M', u'قى'), - (0xFC36, 'M', u'قي'), - (0xFC37, 'M', u'كا'), - (0xFC38, 'M', u'كج'), - (0xFC39, 'M', u'كح'), - (0xFC3A, 'M', u'كخ'), - (0xFC3B, 'M', u'كل'), - (0xFC3C, 'M', u'كم'), - (0xFC3D, 'M', u'كى'), - (0xFC3E, 'M', u'كي'), - (0xFC3F, 'M', u'لج'), - (0xFC40, 'M', u'لح'), - (0xFC41, 'M', u'لخ'), - (0xFC42, 'M', u'لم'), - (0xFC43, 'M', u'لى'), - (0xFC44, 'M', u'لي'), - (0xFC45, 'M', u'مج'), - (0xFC46, 'M', u'مح'), - (0xFC47, 'M', u'مخ'), - (0xFC48, 'M', u'مم'), - (0xFC49, 'M', u'مى'), - (0xFC4A, 'M', u'مي'), - (0xFC4B, 'M', u'نج'), - (0xFC4C, 'M', u'نح'), - (0xFC4D, 'M', u'نخ'), - (0xFC4E, 'M', u'نم'), - (0xFC4F, 'M', u'نى'), - (0xFC50, 'M', u'ني'), - (0xFC51, 'M', u'هج'), - (0xFC52, 'M', u'هم'), - (0xFC53, 'M', u'هى'), - (0xFC54, 'M', u'هي'), - (0xFC55, 'M', u'يج'), - (0xFC56, 'M', u'يح'), - (0xFC57, 'M', u'يخ'), - (0xFC58, 'M', u'يم'), - (0xFC59, 'M', u'يى'), - (0xFC5A, 'M', u'يي'), - (0xFC5B, 'M', u'ذٰ'), - (0xFC5C, 'M', u'رٰ'), - (0xFC5D, 'M', u'ىٰ'), - (0xFC5E, '3', u' ٌّ'), - (0xFC5F, '3', u' ٍّ'), - (0xFC60, '3', u' َّ'), - (0xFC61, '3', u' ُّ'), - (0xFC62, '3', u' ِّ'), - (0xFC63, '3', u' ّٰ'), - (0xFC64, 'M', u'ئر'), - (0xFC65, 'M', u'ئز'), - (0xFC66, 'M', u'ئم'), - (0xFC67, 'M', u'ئن'), - (0xFC68, 'M', u'ئى'), - (0xFC69, 'M', u'ئي'), - (0xFC6A, 'M', u'بر'), - (0xFC6B, 'M', u'بز'), - (0xFC6C, 'M', u'بم'), - (0xFC6D, 'M', u'بن'), - (0xFC6E, 'M', u'بى'), - (0xFC6F, 'M', u'بي'), - (0xFC70, 'M', u'تر'), - (0xFC71, 'M', u'تز'), - (0xFC72, 'M', u'تم'), - (0xFC73, 'M', u'تن'), - (0xFC74, 'M', u'تى'), - (0xFC75, 'M', u'تي'), - (0xFC76, 'M', u'ثر'), - (0xFC77, 'M', u'ثز'), - (0xFC78, 'M', u'ثم'), - (0xFC79, 'M', u'ثن'), - (0xFC7A, 'M', u'ثى'), - (0xFC7B, 'M', u'ثي'), - (0xFC7C, 'M', u'فى'), - (0xFC7D, 'M', u'في'), - (0xFC7E, 'M', u'قى'), - (0xFC7F, 'M', u'قي'), - (0xFC80, 'M', u'كا'), - (0xFC81, 'M', u'كل'), - (0xFC82, 'M', u'كم'), - (0xFC83, 'M', u'كى'), - (0xFC84, 'M', u'كي'), - (0xFC85, 'M', u'لم'), - (0xFC86, 'M', u'لى'), - (0xFC87, 'M', u'لي'), - (0xFC88, 'M', u'ما'), - (0xFC89, 'M', u'مم'), - (0xFC8A, 'M', u'نر'), + (0xFC57, 'M', 'يخ'), + (0xFC58, 'M', 'يم'), + (0xFC59, 'M', 'يى'), + (0xFC5A, 'M', 'يي'), + (0xFC5B, 'M', 'ذٰ'), + (0xFC5C, 'M', 'رٰ'), + (0xFC5D, 'M', 'ىٰ'), + (0xFC5E, '3', ' ٌّ'), + (0xFC5F, '3', ' ٍّ'), + (0xFC60, '3', ' َّ'), + (0xFC61, '3', ' ُّ'), + (0xFC62, '3', ' ِّ'), + (0xFC63, '3', ' ّٰ'), + (0xFC64, 'M', 'ئر'), + (0xFC65, 'M', 'ئز'), + (0xFC66, 'M', 'ئم'), + (0xFC67, 'M', 'ئن'), + (0xFC68, 'M', 'ئى'), + (0xFC69, 'M', 'ئي'), + (0xFC6A, 'M', 'بر'), + (0xFC6B, 'M', 'بز'), + (0xFC6C, 'M', 'بم'), + (0xFC6D, 'M', 'بن'), + (0xFC6E, 'M', 'بى'), + (0xFC6F, 'M', 'بي'), + (0xFC70, 'M', 'تر'), + (0xFC71, 'M', 'تز'), + (0xFC72, 'M', 'تم'), + (0xFC73, 'M', 'تن'), + (0xFC74, 'M', 'تى'), + (0xFC75, 'M', 'تي'), + (0xFC76, 'M', 'ثر'), + (0xFC77, 'M', 'ثز'), + (0xFC78, 'M', 'ثم'), + (0xFC79, 'M', 'ثن'), + (0xFC7A, 'M', 'ثى'), + (0xFC7B, 'M', 'ثي'), + (0xFC7C, 'M', 'فى'), + (0xFC7D, 'M', 'في'), + (0xFC7E, 'M', 'قى'), + (0xFC7F, 'M', 'قي'), + (0xFC80, 'M', 'كا'), + (0xFC81, 'M', 'كل'), + (0xFC82, 'M', 'كم'), + (0xFC83, 'M', 'كى'), + (0xFC84, 'M', 'كي'), + (0xFC85, 'M', 'لم'), + (0xFC86, 'M', 'لى'), + (0xFC87, 'M', 'لي'), + (0xFC88, 'M', 'ما'), + (0xFC89, 'M', 'مم'), + (0xFC8A, 'M', 'نر'), + (0xFC8B, 'M', 'نز'), + (0xFC8C, 'M', 'نم'), + (0xFC8D, 'M', 'نن'), + (0xFC8E, 'M', 'نى'), + (0xFC8F, 'M', 'ني'), + (0xFC90, 'M', 'ىٰ'), + (0xFC91, 'M', 'ير'), + (0xFC92, 'M', 'يز'), + (0xFC93, 'M', 'يم'), + (0xFC94, 'M', 'ين'), + (0xFC95, 'M', 'يى'), + (0xFC96, 'M', 'يي'), + (0xFC97, 'M', 'ئج'), + (0xFC98, 'M', 'ئح'), + (0xFC99, 'M', 'ئخ'), + (0xFC9A, 'M', 'ئم'), + (0xFC9B, 'M', 'ئه'), + (0xFC9C, 'M', 'بج'), + (0xFC9D, 'M', 'بح'), + (0xFC9E, 'M', 'بخ'), + (0xFC9F, 'M', 'بم'), + (0xFCA0, 'M', 'به'), + (0xFCA1, 'M', 'تج'), + (0xFCA2, 'M', 'تح'), + (0xFCA3, 'M', 'تخ'), + (0xFCA4, 'M', 'تم'), + (0xFCA5, 'M', 'ته'), + (0xFCA6, 'M', 'ثم'), + (0xFCA7, 'M', 'جح'), + (0xFCA8, 'M', 'جم'), + (0xFCA9, 'M', 'حج'), + (0xFCAA, 'M', 'حم'), + (0xFCAB, 'M', 'خج'), + (0xFCAC, 'M', 'خم'), + (0xFCAD, 'M', 'سج'), + (0xFCAE, 'M', 'سح'), + (0xFCAF, 'M', 'سخ'), + (0xFCB0, 'M', 'سم'), + (0xFCB1, 'M', 'صح'), + (0xFCB2, 'M', 'صخ'), + (0xFCB3, 'M', 'صم'), + (0xFCB4, 'M', 'ضج'), + (0xFCB5, 'M', 'ضح'), + (0xFCB6, 'M', 'ضخ'), + (0xFCB7, 'M', 'ضم'), + (0xFCB8, 'M', 'طح'), + (0xFCB9, 'M', 'ظم'), + (0xFCBA, 'M', 'عج'), ] -def _seg_46(): +def _seg_47() -> List[Union[Tuple[int, str], Tuple[int, str, str]]]: return [ - (0xFC8B, 'M', u'نز'), - (0xFC8C, 'M', u'نم'), - (0xFC8D, 'M', u'نن'), - (0xFC8E, 'M', u'نى'), - (0xFC8F, 'M', u'ني'), - (0xFC90, 'M', u'ىٰ'), - (0xFC91, 'M', u'ير'), - (0xFC92, 'M', u'يز'), - (0xFC93, 'M', u'يم'), - (0xFC94, 'M', u'ين'), - (0xFC95, 'M', u'يى'), - (0xFC96, 'M', u'يي'), - (0xFC97, 'M', u'ئج'), - (0xFC98, 'M', u'ئح'), - (0xFC99, 'M', u'ئخ'), - (0xFC9A, 'M', u'ئم'), - (0xFC9B, 'M', u'ئه'), - (0xFC9C, 'M', u'بج'), - (0xFC9D, 'M', u'بح'), - (0xFC9E, 'M', u'بخ'), - (0xFC9F, 'M', u'بم'), - (0xFCA0, 'M', u'به'), - (0xFCA1, 'M', u'تج'), - (0xFCA2, 'M', u'تح'), - (0xFCA3, 'M', u'تخ'), - (0xFCA4, 'M', u'تم'), - (0xFCA5, 'M', u'ته'), - (0xFCA6, 'M', u'ثم'), - (0xFCA7, 'M', u'جح'), - (0xFCA8, 'M', u'جم'), - (0xFCA9, 'M', u'حج'), - (0xFCAA, 'M', u'حم'), - (0xFCAB, 'M', u'خج'), - (0xFCAC, 'M', u'خم'), - (0xFCAD, 'M', u'سج'), - (0xFCAE, 'M', u'سح'), - (0xFCAF, 'M', u'سخ'), - (0xFCB0, 'M', u'سم'), - (0xFCB1, 'M', u'صح'), - (0xFCB2, 'M', u'صخ'), - (0xFCB3, 'M', u'صم'), - (0xFCB4, 'M', u'ضج'), - (0xFCB5, 'M', u'ضح'), - (0xFCB6, 'M', u'ضخ'), - (0xFCB7, 'M', u'ضم'), - (0xFCB8, 'M', u'طح'), - (0xFCB9, 'M', u'ظم'), - (0xFCBA, 'M', u'عج'), - (0xFCBB, 'M', u'عم'), - (0xFCBC, 'M', u'غج'), - (0xFCBD, 'M', u'غم'), - (0xFCBE, 'M', u'فج'), - (0xFCBF, 'M', u'فح'), - (0xFCC0, 'M', u'فخ'), - (0xFCC1, 'M', u'فم'), - (0xFCC2, 'M', u'قح'), - (0xFCC3, 'M', u'قم'), - (0xFCC4, 'M', u'كج'), - (0xFCC5, 'M', u'كح'), - (0xFCC6, 'M', u'كخ'), - (0xFCC7, 'M', u'كل'), - (0xFCC8, 'M', u'كم'), - (0xFCC9, 'M', u'لج'), - (0xFCCA, 'M', u'لح'), - (0xFCCB, 'M', u'لخ'), - (0xFCCC, 'M', u'لم'), - (0xFCCD, 'M', u'له'), - (0xFCCE, 'M', u'مج'), - (0xFCCF, 'M', u'مح'), - (0xFCD0, 'M', u'مخ'), - (0xFCD1, 'M', u'مم'), - (0xFCD2, 'M', u'نج'), - (0xFCD3, 'M', u'نح'), - (0xFCD4, 'M', u'نخ'), - (0xFCD5, 'M', u'نم'), - (0xFCD6, 'M', u'نه'), - (0xFCD7, 'M', u'هج'), - (0xFCD8, 'M', u'هم'), - (0xFCD9, 'M', u'هٰ'), - (0xFCDA, 'M', u'يج'), - (0xFCDB, 'M', u'يح'), - (0xFCDC, 'M', u'يخ'), - (0xFCDD, 'M', u'يم'), - (0xFCDE, 'M', u'يه'), - (0xFCDF, 'M', u'ئم'), - (0xFCE0, 'M', u'ئه'), - (0xFCE1, 'M', u'بم'), - (0xFCE2, 'M', u'به'), - (0xFCE3, 'M', u'تم'), - (0xFCE4, 'M', u'ته'), - (0xFCE5, 'M', u'ثم'), - (0xFCE6, 'M', u'ثه'), - (0xFCE7, 'M', u'سم'), - (0xFCE8, 'M', u'سه'), - (0xFCE9, 'M', u'شم'), - (0xFCEA, 'M', u'شه'), - (0xFCEB, 'M', u'كل'), - (0xFCEC, 'M', u'كم'), - (0xFCED, 'M', u'لم'), - (0xFCEE, 'M', u'نم'), + (0xFCBB, 'M', 'عم'), + (0xFCBC, 'M', 'غج'), + (0xFCBD, 'M', 'غم'), + (0xFCBE, 'M', 'فج'), + (0xFCBF, 'M', 'فح'), + (0xFCC0, 'M', 'فخ'), + (0xFCC1, 'M', 'فم'), + (0xFCC2, 'M', 'قح'), + (0xFCC3, 'M', 'قم'), + (0xFCC4, 'M', 'كج'), + (0xFCC5, 'M', 'كح'), + (0xFCC6, 'M', 'كخ'), + (0xFCC7, 'M', 'كل'), + (0xFCC8, 'M', 'كم'), + (0xFCC9, 'M', 'لج'), + (0xFCCA, 'M', 'لح'), + (0xFCCB, 'M', 'لخ'), + (0xFCCC, 'M', 'لم'), + (0xFCCD, 'M', 'له'), + (0xFCCE, 'M', 'مج'), + (0xFCCF, 'M', 'مح'), + (0xFCD0, 'M', 'مخ'), + (0xFCD1, 'M', 'مم'), + (0xFCD2, 'M', 'نج'), + (0xFCD3, 'M', 'نح'), + (0xFCD4, 'M', 'نخ'), + (0xFCD5, 'M', 'نم'), + (0xFCD6, 'M', 'نه'), + (0xFCD7, 'M', 'هج'), + (0xFCD8, 'M', 'هم'), + (0xFCD9, 'M', 'هٰ'), + (0xFCDA, 'M', 'يج'), + (0xFCDB, 'M', 'يح'), + (0xFCDC, 'M', 'يخ'), + (0xFCDD, 'M', 'يم'), + (0xFCDE, 'M', 'يه'), + (0xFCDF, 'M', 'ئم'), + (0xFCE0, 'M', 'ئه'), + (0xFCE1, 'M', 'بم'), + (0xFCE2, 'M', 'به'), + (0xFCE3, 'M', 'تم'), + (0xFCE4, 'M', 'ته'), + (0xFCE5, 'M', 'ثم'), + (0xFCE6, 'M', 'ثه'), + (0xFCE7, 'M', 'سم'), + (0xFCE8, 'M', 'سه'), + (0xFCE9, 'M', 'شم'), + (0xFCEA, 'M', 'شه'), + (0xFCEB, 'M', 'كل'), + (0xFCEC, 'M', 'كم'), + (0xFCED, 'M', 'لم'), + (0xFCEE, 'M', 'نم'), + (0xFCEF, 'M', 'نه'), + (0xFCF0, 'M', 'يم'), + (0xFCF1, 'M', 'يه'), + (0xFCF2, 'M', 'ـَّ'), + (0xFCF3, 'M', 'ـُّ'), + (0xFCF4, 'M', 'ـِّ'), + (0xFCF5, 'M', 'طى'), + (0xFCF6, 'M', 'طي'), + (0xFCF7, 'M', 'عى'), + (0xFCF8, 'M', 'عي'), + (0xFCF9, 'M', 'غى'), + (0xFCFA, 'M', 'غي'), + (0xFCFB, 'M', 'سى'), + (0xFCFC, 'M', 'سي'), + (0xFCFD, 'M', 'شى'), + (0xFCFE, 'M', 'شي'), + (0xFCFF, 'M', 'حى'), + (0xFD00, 'M', 'حي'), + (0xFD01, 'M', 'جى'), + (0xFD02, 'M', 'جي'), + (0xFD03, 'M', 'خى'), + (0xFD04, 'M', 'خي'), + (0xFD05, 'M', 'صى'), + (0xFD06, 'M', 'صي'), + (0xFD07, 'M', 'ضى'), + (0xFD08, 'M', 'ضي'), + (0xFD09, 'M', 'شج'), + (0xFD0A, 'M', 'شح'), + (0xFD0B, 'M', 'شخ'), + (0xFD0C, 'M', 'شم'), + (0xFD0D, 'M', 'شر'), + (0xFD0E, 'M', 'سر'), + (0xFD0F, 'M', 'صر'), + (0xFD10, 'M', 'ضر'), + (0xFD11, 'M', 'طى'), + (0xFD12, 'M', 'طي'), + (0xFD13, 'M', 'عى'), + (0xFD14, 'M', 'عي'), + (0xFD15, 'M', 'غى'), + (0xFD16, 'M', 'غي'), + (0xFD17, 'M', 'سى'), + (0xFD18, 'M', 'سي'), + (0xFD19, 'M', 'شى'), + (0xFD1A, 'M', 'شي'), + (0xFD1B, 'M', 'حى'), + (0xFD1C, 'M', 'حي'), + (0xFD1D, 'M', 'جى'), + (0xFD1E, 'M', 'جي'), ] -def _seg_47(): +def _seg_48() -> List[Union[Tuple[int, str], Tuple[int, str, str]]]: return [ - (0xFCEF, 'M', u'نه'), - (0xFCF0, 'M', u'يم'), - (0xFCF1, 'M', u'يه'), - (0xFCF2, 'M', u'ـَّ'), - (0xFCF3, 'M', u'ـُّ'), - (0xFCF4, 'M', u'ـِّ'), - (0xFCF5, 'M', u'طى'), - (0xFCF6, 'M', u'طي'), - (0xFCF7, 'M', u'عى'), - (0xFCF8, 'M', u'عي'), - (0xFCF9, 'M', u'غى'), - (0xFCFA, 'M', u'غي'), - (0xFCFB, 'M', u'سى'), - (0xFCFC, 'M', u'سي'), - (0xFCFD, 'M', u'شى'), - (0xFCFE, 'M', u'شي'), - (0xFCFF, 'M', u'حى'), - (0xFD00, 'M', u'حي'), - (0xFD01, 'M', u'جى'), - (0xFD02, 'M', u'جي'), - (0xFD03, 'M', u'خى'), - (0xFD04, 'M', u'خي'), - (0xFD05, 'M', u'صى'), - (0xFD06, 'M', u'صي'), - (0xFD07, 'M', u'ضى'), - (0xFD08, 'M', u'ضي'), - (0xFD09, 'M', u'شج'), - (0xFD0A, 'M', u'شح'), - (0xFD0B, 'M', u'شخ'), - (0xFD0C, 'M', u'شم'), - (0xFD0D, 'M', u'شر'), - (0xFD0E, 'M', u'سر'), - (0xFD0F, 'M', u'صر'), - (0xFD10, 'M', u'ضر'), - (0xFD11, 'M', u'طى'), - (0xFD12, 'M', u'طي'), - (0xFD13, 'M', u'عى'), - (0xFD14, 'M', u'عي'), - (0xFD15, 'M', u'غى'), - (0xFD16, 'M', u'غي'), - (0xFD17, 'M', u'سى'), - (0xFD18, 'M', u'سي'), - (0xFD19, 'M', u'شى'), - (0xFD1A, 'M', u'شي'), - (0xFD1B, 'M', u'حى'), - (0xFD1C, 'M', u'حي'), - (0xFD1D, 'M', u'جى'), - (0xFD1E, 'M', u'جي'), - (0xFD1F, 'M', u'خى'), - (0xFD20, 'M', u'خي'), - (0xFD21, 'M', u'صى'), - (0xFD22, 'M', u'صي'), - (0xFD23, 'M', u'ضى'), - (0xFD24, 'M', u'ضي'), - (0xFD25, 'M', u'شج'), - (0xFD26, 'M', u'شح'), - (0xFD27, 'M', u'شخ'), - (0xFD28, 'M', u'شم'), - (0xFD29, 'M', u'شر'), - (0xFD2A, 'M', u'سر'), - (0xFD2B, 'M', u'صر'), - (0xFD2C, 'M', u'ضر'), - (0xFD2D, 'M', u'شج'), - (0xFD2E, 'M', u'شح'), - (0xFD2F, 'M', u'شخ'), - (0xFD30, 'M', u'شم'), - (0xFD31, 'M', u'سه'), - (0xFD32, 'M', u'شه'), - (0xFD33, 'M', u'طم'), - (0xFD34, 'M', u'سج'), - (0xFD35, 'M', u'سح'), - (0xFD36, 'M', u'سخ'), - (0xFD37, 'M', u'شج'), - (0xFD38, 'M', u'شح'), - (0xFD39, 'M', u'شخ'), - (0xFD3A, 'M', u'طم'), - (0xFD3B, 'M', u'ظم'), - (0xFD3C, 'M', u'اً'), + (0xFD1F, 'M', 'خى'), + (0xFD20, 'M', 'خي'), + (0xFD21, 'M', 'صى'), + (0xFD22, 'M', 'صي'), + (0xFD23, 'M', 'ضى'), + (0xFD24, 'M', 'ضي'), + (0xFD25, 'M', 'شج'), + (0xFD26, 'M', 'شح'), + (0xFD27, 'M', 'شخ'), + (0xFD28, 'M', 'شم'), + (0xFD29, 'M', 'شر'), + (0xFD2A, 'M', 'سر'), + (0xFD2B, 'M', 'صر'), + (0xFD2C, 'M', 'ضر'), + (0xFD2D, 'M', 'شج'), + (0xFD2E, 'M', 'شح'), + (0xFD2F, 'M', 'شخ'), + (0xFD30, 'M', 'شم'), + (0xFD31, 'M', 'سه'), + (0xFD32, 'M', 'شه'), + (0xFD33, 'M', 'طم'), + (0xFD34, 'M', 'سج'), + (0xFD35, 'M', 'سح'), + (0xFD36, 'M', 'سخ'), + (0xFD37, 'M', 'شج'), + (0xFD38, 'M', 'شح'), + (0xFD39, 'M', 'شخ'), + (0xFD3A, 'M', 'طم'), + (0xFD3B, 'M', 'ظم'), + (0xFD3C, 'M', 'اً'), (0xFD3E, 'V'), - (0xFD40, 'X'), - (0xFD50, 'M', u'تجم'), - (0xFD51, 'M', u'تحج'), - (0xFD53, 'M', u'تحم'), - (0xFD54, 'M', u'تخم'), - (0xFD55, 'M', u'تمج'), - (0xFD56, 'M', u'تمح'), - (0xFD57, 'M', u'تمخ'), - (0xFD58, 'M', u'جمح'), - (0xFD5A, 'M', u'حمي'), - (0xFD5B, 'M', u'حمى'), - (0xFD5C, 'M', u'سحج'), - (0xFD5D, 'M', u'سجح'), - (0xFD5E, 'M', u'سجى'), - (0xFD5F, 'M', u'سمح'), - (0xFD61, 'M', u'سمج'), - (0xFD62, 'M', u'سمم'), - (0xFD64, 'M', u'صحح'), - (0xFD66, 'M', u'صمم'), - (0xFD67, 'M', u'شحم'), - (0xFD69, 'M', u'شجي'), + (0xFD50, 'M', 'تجم'), + (0xFD51, 'M', 'تحج'), + (0xFD53, 'M', 'تحم'), + (0xFD54, 'M', 'تخم'), + (0xFD55, 'M', 'تمج'), + (0xFD56, 'M', 'تمح'), + (0xFD57, 'M', 'تمخ'), + (0xFD58, 'M', 'جمح'), + (0xFD5A, 'M', 'حمي'), + (0xFD5B, 'M', 'حمى'), + (0xFD5C, 'M', 'سحج'), + (0xFD5D, 'M', 'سجح'), + (0xFD5E, 'M', 'سجى'), + (0xFD5F, 'M', 'سمح'), + (0xFD61, 'M', 'سمج'), + (0xFD62, 'M', 'سمم'), + (0xFD64, 'M', 'صحح'), + (0xFD66, 'M', 'صمم'), + (0xFD67, 'M', 'شحم'), + (0xFD69, 'M', 'شجي'), + (0xFD6A, 'M', 'شمخ'), + (0xFD6C, 'M', 'شمم'), + (0xFD6E, 'M', 'ضحى'), + (0xFD6F, 'M', 'ضخم'), + (0xFD71, 'M', 'طمح'), + (0xFD73, 'M', 'طمم'), + (0xFD74, 'M', 'طمي'), + (0xFD75, 'M', 'عجم'), + (0xFD76, 'M', 'عمم'), + (0xFD78, 'M', 'عمى'), + (0xFD79, 'M', 'غمم'), + (0xFD7A, 'M', 'غمي'), + (0xFD7B, 'M', 'غمى'), + (0xFD7C, 'M', 'فخم'), + (0xFD7E, 'M', 'قمح'), + (0xFD7F, 'M', 'قمم'), + (0xFD80, 'M', 'لحم'), + (0xFD81, 'M', 'لحي'), + (0xFD82, 'M', 'لحى'), + (0xFD83, 'M', 'لجج'), + (0xFD85, 'M', 'لخم'), + (0xFD87, 'M', 'لمح'), + (0xFD89, 'M', 'محج'), + (0xFD8A, 'M', 'محم'), + (0xFD8B, 'M', 'محي'), + (0xFD8C, 'M', 'مجح'), + (0xFD8D, 'M', 'مجم'), + (0xFD8E, 'M', 'مخج'), + (0xFD8F, 'M', 'مخم'), + (0xFD90, 'X'), + (0xFD92, 'M', 'مجخ'), + (0xFD93, 'M', 'همج'), + (0xFD94, 'M', 'همم'), + (0xFD95, 'M', 'نحم'), + (0xFD96, 'M', 'نحى'), + (0xFD97, 'M', 'نجم'), + (0xFD99, 'M', 'نجى'), + (0xFD9A, 'M', 'نمي'), + (0xFD9B, 'M', 'نمى'), + (0xFD9C, 'M', 'يمم'), + (0xFD9E, 'M', 'بخي'), + (0xFD9F, 'M', 'تجي'), + (0xFDA0, 'M', 'تجى'), + (0xFDA1, 'M', 'تخي'), + (0xFDA2, 'M', 'تخى'), + (0xFDA3, 'M', 'تمي'), + (0xFDA4, 'M', 'تمى'), + (0xFDA5, 'M', 'جمي'), + (0xFDA6, 'M', 'جحى'), ] -def _seg_48(): +def _seg_49() -> List[Union[Tuple[int, str], Tuple[int, str, str]]]: return [ - (0xFD6A, 'M', u'شمخ'), - (0xFD6C, 'M', u'شمم'), - (0xFD6E, 'M', u'ضحى'), - (0xFD6F, 'M', u'ضخم'), - (0xFD71, 'M', u'طمح'), - (0xFD73, 'M', u'طمم'), - (0xFD74, 'M', u'طمي'), - (0xFD75, 'M', u'عجم'), - (0xFD76, 'M', u'عمم'), - (0xFD78, 'M', u'عمى'), - (0xFD79, 'M', u'غمم'), - (0xFD7A, 'M', u'غمي'), - (0xFD7B, 'M', u'غمى'), - (0xFD7C, 'M', u'فخم'), - (0xFD7E, 'M', u'قمح'), - (0xFD7F, 'M', u'قمم'), - (0xFD80, 'M', u'لحم'), - (0xFD81, 'M', u'لحي'), - (0xFD82, 'M', u'لحى'), - (0xFD83, 'M', u'لجج'), - (0xFD85, 'M', u'لخم'), - (0xFD87, 'M', u'لمح'), - (0xFD89, 'M', u'محج'), - (0xFD8A, 'M', u'محم'), - (0xFD8B, 'M', u'محي'), - (0xFD8C, 'M', u'مجح'), - (0xFD8D, 'M', u'مجم'), - (0xFD8E, 'M', u'مخج'), - (0xFD8F, 'M', u'مخم'), - (0xFD90, 'X'), - (0xFD92, 'M', u'مجخ'), - (0xFD93, 'M', u'همج'), - (0xFD94, 'M', u'همم'), - (0xFD95, 'M', u'نحم'), - (0xFD96, 'M', u'نحى'), - (0xFD97, 'M', u'نجم'), - (0xFD99, 'M', u'نجى'), - (0xFD9A, 'M', u'نمي'), - (0xFD9B, 'M', u'نمى'), - (0xFD9C, 'M', u'يمم'), - (0xFD9E, 'M', u'بخي'), - (0xFD9F, 'M', u'تجي'), - (0xFDA0, 'M', u'تجى'), - (0xFDA1, 'M', u'تخي'), - (0xFDA2, 'M', u'تخى'), - (0xFDA3, 'M', u'تمي'), - (0xFDA4, 'M', u'تمى'), - (0xFDA5, 'M', u'جمي'), - (0xFDA6, 'M', u'جحى'), - (0xFDA7, 'M', u'جمى'), - (0xFDA8, 'M', u'سخى'), - (0xFDA9, 'M', u'صحي'), - (0xFDAA, 'M', u'شحي'), - (0xFDAB, 'M', u'ضحي'), - (0xFDAC, 'M', u'لجي'), - (0xFDAD, 'M', u'لمي'), - (0xFDAE, 'M', u'يحي'), - (0xFDAF, 'M', u'يجي'), - (0xFDB0, 'M', u'يمي'), - (0xFDB1, 'M', u'ممي'), - (0xFDB2, 'M', u'قمي'), - (0xFDB3, 'M', u'نحي'), - (0xFDB4, 'M', u'قمح'), - (0xFDB5, 'M', u'لحم'), - (0xFDB6, 'M', u'عمي'), - (0xFDB7, 'M', u'كمي'), - (0xFDB8, 'M', u'نجح'), - (0xFDB9, 'M', u'مخي'), - (0xFDBA, 'M', u'لجم'), - (0xFDBB, 'M', u'كمم'), - (0xFDBC, 'M', u'لجم'), - (0xFDBD, 'M', u'نجح'), - (0xFDBE, 'M', u'جحي'), - (0xFDBF, 'M', u'حجي'), - (0xFDC0, 'M', u'مجي'), - (0xFDC1, 'M', u'فمي'), - (0xFDC2, 'M', u'بحي'), - (0xFDC3, 'M', u'كمم'), - (0xFDC4, 'M', u'عجم'), - (0xFDC5, 'M', u'صمم'), - (0xFDC6, 'M', u'سخي'), - (0xFDC7, 'M', u'نجي'), + (0xFDA7, 'M', 'جمى'), + (0xFDA8, 'M', 'سخى'), + (0xFDA9, 'M', 'صحي'), + (0xFDAA, 'M', 'شحي'), + (0xFDAB, 'M', 'ضحي'), + (0xFDAC, 'M', 'لجي'), + (0xFDAD, 'M', 'لمي'), + (0xFDAE, 'M', 'يحي'), + (0xFDAF, 'M', 'يجي'), + (0xFDB0, 'M', 'يمي'), + (0xFDB1, 'M', 'ممي'), + (0xFDB2, 'M', 'قمي'), + (0xFDB3, 'M', 'نحي'), + (0xFDB4, 'M', 'قمح'), + (0xFDB5, 'M', 'لحم'), + (0xFDB6, 'M', 'عمي'), + (0xFDB7, 'M', 'كمي'), + (0xFDB8, 'M', 'نجح'), + (0xFDB9, 'M', 'مخي'), + (0xFDBA, 'M', 'لجم'), + (0xFDBB, 'M', 'كمم'), + (0xFDBC, 'M', 'لجم'), + (0xFDBD, 'M', 'نجح'), + (0xFDBE, 'M', 'جحي'), + (0xFDBF, 'M', 'حجي'), + (0xFDC0, 'M', 'مجي'), + (0xFDC1, 'M', 'فمي'), + (0xFDC2, 'M', 'بحي'), + (0xFDC3, 'M', 'كمم'), + (0xFDC4, 'M', 'عجم'), + (0xFDC5, 'M', 'صمم'), + (0xFDC6, 'M', 'سخي'), + (0xFDC7, 'M', 'نجي'), (0xFDC8, 'X'), - (0xFDF0, 'M', u'صلے'), - (0xFDF1, 'M', u'قلے'), - (0xFDF2, 'M', u'الله'), - (0xFDF3, 'M', u'اكبر'), - (0xFDF4, 'M', u'محمد'), - (0xFDF5, 'M', u'صلعم'), - (0xFDF6, 'M', u'رسول'), - (0xFDF7, 'M', u'عليه'), - (0xFDF8, 'M', u'وسلم'), - (0xFDF9, 'M', u'صلى'), - (0xFDFA, '3', u'صلى الله عليه وسلم'), - (0xFDFB, '3', u'جل جلاله'), - (0xFDFC, 'M', u'ریال'), + (0xFDCF, 'V'), + (0xFDD0, 'X'), + (0xFDF0, 'M', 'صلے'), + (0xFDF1, 'M', 'قلے'), + (0xFDF2, 'M', 'الله'), + (0xFDF3, 'M', 'اكبر'), + (0xFDF4, 'M', 'محمد'), + (0xFDF5, 'M', 'صلعم'), + (0xFDF6, 'M', 'رسول'), + (0xFDF7, 'M', 'عليه'), + (0xFDF8, 'M', 'وسلم'), + (0xFDF9, 'M', 'صلى'), + (0xFDFA, '3', 'صلى الله عليه وسلم'), + (0xFDFB, '3', 'جل جلاله'), + (0xFDFC, 'M', 'ریال'), (0xFDFD, 'V'), - (0xFDFE, 'X'), (0xFE00, 'I'), - (0xFE10, '3', u','), - ] - -def _seg_49(): - return [ - (0xFE11, 'M', u'、'), + (0xFE10, '3', ','), + (0xFE11, 'M', '、'), (0xFE12, 'X'), - (0xFE13, '3', u':'), - (0xFE14, '3', u';'), - (0xFE15, '3', u'!'), - (0xFE16, '3', u'?'), - (0xFE17, 'M', u'〖'), - (0xFE18, 'M', u'〗'), + (0xFE13, '3', ':'), + (0xFE14, '3', ';'), + (0xFE15, '3', '!'), + (0xFE16, '3', '?'), + (0xFE17, 'M', '〖'), + (0xFE18, 'M', '〗'), (0xFE19, 'X'), (0xFE20, 'V'), (0xFE30, 'X'), - (0xFE31, 'M', u'—'), - (0xFE32, 'M', u'–'), - (0xFE33, '3', u'_'), - (0xFE35, '3', u'('), - (0xFE36, '3', u')'), - (0xFE37, '3', u'{'), - (0xFE38, '3', u'}'), - (0xFE39, 'M', u'〔'), - (0xFE3A, 'M', u'〕'), - (0xFE3B, 'M', u'【'), - (0xFE3C, 'M', u'】'), - (0xFE3D, 'M', u'《'), - (0xFE3E, 'M', u'》'), - (0xFE3F, 'M', u'〈'), - (0xFE40, 'M', u'〉'), - (0xFE41, 'M', u'「'), - (0xFE42, 'M', u'」'), - (0xFE43, 'M', u'『'), - (0xFE44, 'M', u'』'), + (0xFE31, 'M', '—'), + (0xFE32, 'M', '–'), + (0xFE33, '3', '_'), + (0xFE35, '3', '('), + (0xFE36, '3', ')'), + (0xFE37, '3', '{'), + (0xFE38, '3', '}'), + (0xFE39, 'M', '〔'), + (0xFE3A, 'M', '〕'), + (0xFE3B, 'M', '【'), + (0xFE3C, 'M', '】'), + (0xFE3D, 'M', '《'), + (0xFE3E, 'M', '》'), + (0xFE3F, 'M', '〈'), + (0xFE40, 'M', '〉'), + (0xFE41, 'M', '「'), + (0xFE42, 'M', '」'), + (0xFE43, 'M', '『'), + (0xFE44, 'M', '』'), (0xFE45, 'V'), - (0xFE47, '3', u'['), - (0xFE48, '3', u']'), - (0xFE49, '3', u' ̅'), - (0xFE4D, '3', u'_'), - (0xFE50, '3', u','), - (0xFE51, 'M', u'、'), + (0xFE47, '3', '['), + (0xFE48, '3', ']'), + (0xFE49, '3', ' ̅'), + (0xFE4D, '3', '_'), + (0xFE50, '3', ','), + (0xFE51, 'M', '、'), (0xFE52, 'X'), - (0xFE54, '3', u';'), - (0xFE55, '3', u':'), - (0xFE56, '3', u'?'), - (0xFE57, '3', u'!'), - (0xFE58, 'M', u'—'), - (0xFE59, '3', u'('), - (0xFE5A, '3', u')'), - (0xFE5B, '3', u'{'), - (0xFE5C, '3', u'}'), - (0xFE5D, 'M', u'〔'), - (0xFE5E, 'M', u'〕'), - (0xFE5F, '3', u'#'), - (0xFE60, '3', u'&'), - (0xFE61, '3', u'*'), - (0xFE62, '3', u'+'), - (0xFE63, 'M', u'-'), - (0xFE64, '3', u'<'), - (0xFE65, '3', u'>'), - (0xFE66, '3', u'='), + (0xFE54, '3', ';'), + (0xFE55, '3', ':'), + (0xFE56, '3', '?'), + (0xFE57, '3', '!'), + (0xFE58, 'M', '—'), + (0xFE59, '3', '('), + (0xFE5A, '3', ')'), + (0xFE5B, '3', '{'), + (0xFE5C, '3', '}'), + (0xFE5D, 'M', '〔'), + ] + +def _seg_50() -> List[Union[Tuple[int, str], Tuple[int, str, str]]]: + return [ + (0xFE5E, 'M', '〕'), + (0xFE5F, '3', '#'), + (0xFE60, '3', '&'), + (0xFE61, '3', '*'), + (0xFE62, '3', '+'), + (0xFE63, 'M', '-'), + (0xFE64, '3', '<'), + (0xFE65, '3', '>'), + (0xFE66, '3', '='), (0xFE67, 'X'), - (0xFE68, '3', u'\\'), - (0xFE69, '3', u'$'), - (0xFE6A, '3', u'%'), - (0xFE6B, '3', u'@'), + (0xFE68, '3', '\\'), + (0xFE69, '3', '$'), + (0xFE6A, '3', '%'), + (0xFE6B, '3', '@'), (0xFE6C, 'X'), - (0xFE70, '3', u' ً'), - (0xFE71, 'M', u'ـً'), - (0xFE72, '3', u' ٌ'), + (0xFE70, '3', ' ً'), + (0xFE71, 'M', 'ـً'), + (0xFE72, '3', ' ٌ'), (0xFE73, 'V'), - (0xFE74, '3', u' ٍ'), + (0xFE74, '3', ' ٍ'), (0xFE75, 'X'), - (0xFE76, '3', u' َ'), - (0xFE77, 'M', u'ـَ'), - (0xFE78, '3', u' ُ'), - (0xFE79, 'M', u'ـُ'), - (0xFE7A, '3', u' ِ'), - (0xFE7B, 'M', u'ـِ'), - (0xFE7C, '3', u' ّ'), - (0xFE7D, 'M', u'ـّ'), - (0xFE7E, '3', u' ْ'), - (0xFE7F, 'M', u'ـْ'), - (0xFE80, 'M', u'ء'), - (0xFE81, 'M', u'آ'), - (0xFE83, 'M', u'أ'), - (0xFE85, 'M', u'ؤ'), - (0xFE87, 'M', u'إ'), - (0xFE89, 'M', u'ئ'), - (0xFE8D, 'M', u'ا'), - (0xFE8F, 'M', u'ب'), - (0xFE93, 'M', u'ة'), - (0xFE95, 'M', u'ت'), - (0xFE99, 'M', u'ث'), - (0xFE9D, 'M', u'ج'), - (0xFEA1, 'M', u'ح'), - (0xFEA5, 'M', u'خ'), - (0xFEA9, 'M', u'د'), - (0xFEAB, 'M', u'ذ'), - (0xFEAD, 'M', u'ر'), - (0xFEAF, 'M', u'ز'), - (0xFEB1, 'M', u'س'), - (0xFEB5, 'M', u'ش'), - (0xFEB9, 'M', u'ص'), - ] - -def _seg_50(): - return [ - (0xFEBD, 'M', u'ض'), - (0xFEC1, 'M', u'ط'), - (0xFEC5, 'M', u'ظ'), - (0xFEC9, 'M', u'ع'), - (0xFECD, 'M', u'غ'), - (0xFED1, 'M', u'ف'), - (0xFED5, 'M', u'ق'), - (0xFED9, 'M', u'ك'), - (0xFEDD, 'M', u'ل'), - (0xFEE1, 'M', u'م'), - (0xFEE5, 'M', u'ن'), - (0xFEE9, 'M', u'ه'), - (0xFEED, 'M', u'و'), - (0xFEEF, 'M', u'ى'), - (0xFEF1, 'M', u'ي'), - (0xFEF5, 'M', u'لآ'), - (0xFEF7, 'M', u'لأ'), - (0xFEF9, 'M', u'لإ'), - (0xFEFB, 'M', u'لا'), + (0xFE76, '3', ' َ'), + (0xFE77, 'M', 'ـَ'), + (0xFE78, '3', ' ُ'), + (0xFE79, 'M', 'ـُ'), + (0xFE7A, '3', ' ِ'), + (0xFE7B, 'M', 'ـِ'), + (0xFE7C, '3', ' ّ'), + (0xFE7D, 'M', 'ـّ'), + (0xFE7E, '3', ' ْ'), + (0xFE7F, 'M', 'ـْ'), + (0xFE80, 'M', 'ء'), + (0xFE81, 'M', 'آ'), + (0xFE83, 'M', 'أ'), + (0xFE85, 'M', 'ؤ'), + (0xFE87, 'M', 'إ'), + (0xFE89, 'M', 'ئ'), + (0xFE8D, 'M', 'ا'), + (0xFE8F, 'M', 'ب'), + (0xFE93, 'M', 'ة'), + (0xFE95, 'M', 'ت'), + (0xFE99, 'M', 'ث'), + (0xFE9D, 'M', 'ج'), + (0xFEA1, 'M', 'ح'), + (0xFEA5, 'M', 'خ'), + (0xFEA9, 'M', 'د'), + (0xFEAB, 'M', 'ذ'), + (0xFEAD, 'M', 'ر'), + (0xFEAF, 'M', 'ز'), + (0xFEB1, 'M', 'س'), + (0xFEB5, 'M', 'ش'), + (0xFEB9, 'M', 'ص'), + (0xFEBD, 'M', 'ض'), + (0xFEC1, 'M', 'ط'), + (0xFEC5, 'M', 'ظ'), + (0xFEC9, 'M', 'ع'), + (0xFECD, 'M', 'غ'), + (0xFED1, 'M', 'ف'), + (0xFED5, 'M', 'ق'), + (0xFED9, 'M', 'ك'), + (0xFEDD, 'M', 'ل'), + (0xFEE1, 'M', 'م'), + (0xFEE5, 'M', 'ن'), + (0xFEE9, 'M', 'ه'), + (0xFEED, 'M', 'و'), + (0xFEEF, 'M', 'ى'), + (0xFEF1, 'M', 'ي'), + (0xFEF5, 'M', 'لآ'), + (0xFEF7, 'M', 'لأ'), + (0xFEF9, 'M', 'لإ'), + (0xFEFB, 'M', 'لا'), (0xFEFD, 'X'), (0xFEFF, 'I'), (0xFF00, 'X'), - (0xFF01, '3', u'!'), - (0xFF02, '3', u'"'), - (0xFF03, '3', u'#'), - (0xFF04, '3', u'$'), - (0xFF05, '3', u'%'), - (0xFF06, '3', u'&'), - (0xFF07, '3', u'\''), - (0xFF08, '3', u'('), - (0xFF09, '3', u')'), - (0xFF0A, '3', u'*'), - (0xFF0B, '3', u'+'), - (0xFF0C, '3', u','), - (0xFF0D, 'M', u'-'), - (0xFF0E, 'M', u'.'), - (0xFF0F, '3', u'/'), - (0xFF10, 'M', u'0'), - (0xFF11, 'M', u'1'), - (0xFF12, 'M', u'2'), - (0xFF13, 'M', u'3'), - (0xFF14, 'M', u'4'), - (0xFF15, 'M', u'5'), - (0xFF16, 'M', u'6'), - (0xFF17, 'M', u'7'), - (0xFF18, 'M', u'8'), - (0xFF19, 'M', u'9'), - (0xFF1A, '3', u':'), - (0xFF1B, '3', u';'), - (0xFF1C, '3', u'<'), - (0xFF1D, '3', u'='), - (0xFF1E, '3', u'>'), - (0xFF1F, '3', u'?'), - (0xFF20, '3', u'@'), - (0xFF21, 'M', u'a'), - (0xFF22, 'M', u'b'), - (0xFF23, 'M', u'c'), - (0xFF24, 'M', u'd'), - (0xFF25, 'M', u'e'), - (0xFF26, 'M', u'f'), - (0xFF27, 'M', u'g'), - (0xFF28, 'M', u'h'), - (0xFF29, 'M', u'i'), - (0xFF2A, 'M', u'j'), - (0xFF2B, 'M', u'k'), - (0xFF2C, 'M', u'l'), - (0xFF2D, 'M', u'm'), - (0xFF2E, 'M', u'n'), - (0xFF2F, 'M', u'o'), - (0xFF30, 'M', u'p'), - (0xFF31, 'M', u'q'), - (0xFF32, 'M', u'r'), - (0xFF33, 'M', u's'), - (0xFF34, 'M', u't'), - (0xFF35, 'M', u'u'), - (0xFF36, 'M', u'v'), - (0xFF37, 'M', u'w'), - (0xFF38, 'M', u'x'), - (0xFF39, 'M', u'y'), - (0xFF3A, 'M', u'z'), - (0xFF3B, '3', u'['), - (0xFF3C, '3', u'\\'), - (0xFF3D, '3', u']'), - (0xFF3E, '3', u'^'), - (0xFF3F, '3', u'_'), - (0xFF40, '3', u'`'), - (0xFF41, 'M', u'a'), - (0xFF42, 'M', u'b'), - (0xFF43, 'M', u'c'), - (0xFF44, 'M', u'd'), - (0xFF45, 'M', u'e'), - (0xFF46, 'M', u'f'), - (0xFF47, 'M', u'g'), - (0xFF48, 'M', u'h'), - (0xFF49, 'M', u'i'), - (0xFF4A, 'M', u'j'), - (0xFF4B, 'M', u'k'), - (0xFF4C, 'M', u'l'), - (0xFF4D, 'M', u'm'), - (0xFF4E, 'M', u'n'), + (0xFF01, '3', '!'), + (0xFF02, '3', '"'), + (0xFF03, '3', '#'), + (0xFF04, '3', '$'), + (0xFF05, '3', '%'), + (0xFF06, '3', '&'), + (0xFF07, '3', '\''), + (0xFF08, '3', '('), + (0xFF09, '3', ')'), + (0xFF0A, '3', '*'), + (0xFF0B, '3', '+'), + (0xFF0C, '3', ','), + (0xFF0D, 'M', '-'), + (0xFF0E, 'M', '.'), + (0xFF0F, '3', '/'), + (0xFF10, 'M', '0'), + (0xFF11, 'M', '1'), + (0xFF12, 'M', '2'), + (0xFF13, 'M', '3'), + (0xFF14, 'M', '4'), + (0xFF15, 'M', '5'), + (0xFF16, 'M', '6'), + (0xFF17, 'M', '7'), + (0xFF18, 'M', '8'), + (0xFF19, 'M', '9'), + (0xFF1A, '3', ':'), ] -def _seg_51(): +def _seg_51() -> List[Union[Tuple[int, str], Tuple[int, str, str]]]: return [ - (0xFF4F, 'M', u'o'), - (0xFF50, 'M', u'p'), - (0xFF51, 'M', u'q'), - (0xFF52, 'M', u'r'), - (0xFF53, 'M', u's'), - (0xFF54, 'M', u't'), - (0xFF55, 'M', u'u'), - (0xFF56, 'M', u'v'), - (0xFF57, 'M', u'w'), - (0xFF58, 'M', u'x'), - (0xFF59, 'M', u'y'), - (0xFF5A, 'M', u'z'), - (0xFF5B, '3', u'{'), - (0xFF5C, '3', u'|'), - (0xFF5D, '3', u'}'), - (0xFF5E, '3', u'~'), - (0xFF5F, 'M', u'⦅'), - (0xFF60, 'M', u'⦆'), - (0xFF61, 'M', u'.'), - (0xFF62, 'M', u'「'), - (0xFF63, 'M', u'」'), - (0xFF64, 'M', u'、'), - (0xFF65, 'M', u'・'), - (0xFF66, 'M', u'ヲ'), - (0xFF67, 'M', u'ァ'), - (0xFF68, 'M', u'ィ'), - (0xFF69, 'M', u'ゥ'), - (0xFF6A, 'M', u'ェ'), - (0xFF6B, 'M', u'ォ'), - (0xFF6C, 'M', u'ャ'), - (0xFF6D, 'M', u'ュ'), - (0xFF6E, 'M', u'ョ'), - (0xFF6F, 'M', u'ッ'), - (0xFF70, 'M', u'ー'), - (0xFF71, 'M', u'ア'), - (0xFF72, 'M', u'イ'), - (0xFF73, 'M', u'ウ'), - (0xFF74, 'M', u'エ'), - (0xFF75, 'M', u'オ'), - (0xFF76, 'M', u'カ'), - (0xFF77, 'M', u'キ'), - (0xFF78, 'M', u'ク'), - (0xFF79, 'M', u'ケ'), - (0xFF7A, 'M', u'コ'), - (0xFF7B, 'M', u'サ'), - (0xFF7C, 'M', u'シ'), - (0xFF7D, 'M', u'ス'), - (0xFF7E, 'M', u'セ'), - (0xFF7F, 'M', u'ソ'), - (0xFF80, 'M', u'タ'), - (0xFF81, 'M', u'チ'), - (0xFF82, 'M', u'ツ'), - (0xFF83, 'M', u'テ'), - (0xFF84, 'M', u'ト'), - (0xFF85, 'M', u'ナ'), - (0xFF86, 'M', u'ニ'), - (0xFF87, 'M', u'ヌ'), - (0xFF88, 'M', u'ネ'), - (0xFF89, 'M', u'ノ'), - (0xFF8A, 'M', u'ハ'), - (0xFF8B, 'M', u'ヒ'), - (0xFF8C, 'M', u'フ'), - (0xFF8D, 'M', u'ヘ'), - (0xFF8E, 'M', u'ホ'), - (0xFF8F, 'M', u'マ'), - (0xFF90, 'M', u'ミ'), - (0xFF91, 'M', u'ム'), - (0xFF92, 'M', u'メ'), - (0xFF93, 'M', u'モ'), - (0xFF94, 'M', u'ヤ'), - (0xFF95, 'M', u'ユ'), - (0xFF96, 'M', u'ヨ'), - (0xFF97, 'M', u'ラ'), - (0xFF98, 'M', u'リ'), - (0xFF99, 'M', u'ル'), - (0xFF9A, 'M', u'レ'), - (0xFF9B, 'M', u'ロ'), - (0xFF9C, 'M', u'ワ'), - (0xFF9D, 'M', u'ン'), - (0xFF9E, 'M', u'゙'), - (0xFF9F, 'M', u'゚'), - (0xFFA0, 'X'), - (0xFFA1, 'M', u'ᄀ'), - (0xFFA2, 'M', u'ᄁ'), - (0xFFA3, 'M', u'ᆪ'), - (0xFFA4, 'M', u'ᄂ'), - (0xFFA5, 'M', u'ᆬ'), - (0xFFA6, 'M', u'ᆭ'), - (0xFFA7, 'M', u'ᄃ'), - (0xFFA8, 'M', u'ᄄ'), - (0xFFA9, 'M', u'ᄅ'), - (0xFFAA, 'M', u'ᆰ'), - (0xFFAB, 'M', u'ᆱ'), - (0xFFAC, 'M', u'ᆲ'), - (0xFFAD, 'M', u'ᆳ'), - (0xFFAE, 'M', u'ᆴ'), - (0xFFAF, 'M', u'ᆵ'), - (0xFFB0, 'M', u'ᄚ'), - (0xFFB1, 'M', u'ᄆ'), - (0xFFB2, 'M', u'ᄇ'), + (0xFF1B, '3', ';'), + (0xFF1C, '3', '<'), + (0xFF1D, '3', '='), + (0xFF1E, '3', '>'), + (0xFF1F, '3', '?'), + (0xFF20, '3', '@'), + (0xFF21, 'M', 'a'), + (0xFF22, 'M', 'b'), + (0xFF23, 'M', 'c'), + (0xFF24, 'M', 'd'), + (0xFF25, 'M', 'e'), + (0xFF26, 'M', 'f'), + (0xFF27, 'M', 'g'), + (0xFF28, 'M', 'h'), + (0xFF29, 'M', 'i'), + (0xFF2A, 'M', 'j'), + (0xFF2B, 'M', 'k'), + (0xFF2C, 'M', 'l'), + (0xFF2D, 'M', 'm'), + (0xFF2E, 'M', 'n'), + (0xFF2F, 'M', 'o'), + (0xFF30, 'M', 'p'), + (0xFF31, 'M', 'q'), + (0xFF32, 'M', 'r'), + (0xFF33, 'M', 's'), + (0xFF34, 'M', 't'), + (0xFF35, 'M', 'u'), + (0xFF36, 'M', 'v'), + (0xFF37, 'M', 'w'), + (0xFF38, 'M', 'x'), + (0xFF39, 'M', 'y'), + (0xFF3A, 'M', 'z'), + (0xFF3B, '3', '['), + (0xFF3C, '3', '\\'), + (0xFF3D, '3', ']'), + (0xFF3E, '3', '^'), + (0xFF3F, '3', '_'), + (0xFF40, '3', '`'), + (0xFF41, 'M', 'a'), + (0xFF42, 'M', 'b'), + (0xFF43, 'M', 'c'), + (0xFF44, 'M', 'd'), + (0xFF45, 'M', 'e'), + (0xFF46, 'M', 'f'), + (0xFF47, 'M', 'g'), + (0xFF48, 'M', 'h'), + (0xFF49, 'M', 'i'), + (0xFF4A, 'M', 'j'), + (0xFF4B, 'M', 'k'), + (0xFF4C, 'M', 'l'), + (0xFF4D, 'M', 'm'), + (0xFF4E, 'M', 'n'), + (0xFF4F, 'M', 'o'), + (0xFF50, 'M', 'p'), + (0xFF51, 'M', 'q'), + (0xFF52, 'M', 'r'), + (0xFF53, 'M', 's'), + (0xFF54, 'M', 't'), + (0xFF55, 'M', 'u'), + (0xFF56, 'M', 'v'), + (0xFF57, 'M', 'w'), + (0xFF58, 'M', 'x'), + (0xFF59, 'M', 'y'), + (0xFF5A, 'M', 'z'), + (0xFF5B, '3', '{'), + (0xFF5C, '3', '|'), + (0xFF5D, '3', '}'), + (0xFF5E, '3', '~'), + (0xFF5F, 'M', '⦅'), + (0xFF60, 'M', '⦆'), + (0xFF61, 'M', '.'), + (0xFF62, 'M', '「'), + (0xFF63, 'M', '」'), + (0xFF64, 'M', '、'), + (0xFF65, 'M', '・'), + (0xFF66, 'M', 'ヲ'), + (0xFF67, 'M', 'ァ'), + (0xFF68, 'M', 'ィ'), + (0xFF69, 'M', 'ゥ'), + (0xFF6A, 'M', 'ェ'), + (0xFF6B, 'M', 'ォ'), + (0xFF6C, 'M', 'ャ'), + (0xFF6D, 'M', 'ュ'), + (0xFF6E, 'M', 'ョ'), + (0xFF6F, 'M', 'ッ'), + (0xFF70, 'M', 'ー'), + (0xFF71, 'M', 'ア'), + (0xFF72, 'M', 'イ'), + (0xFF73, 'M', 'ウ'), + (0xFF74, 'M', 'エ'), + (0xFF75, 'M', 'オ'), + (0xFF76, 'M', 'カ'), + (0xFF77, 'M', 'キ'), + (0xFF78, 'M', 'ク'), + (0xFF79, 'M', 'ケ'), + (0xFF7A, 'M', 'コ'), + (0xFF7B, 'M', 'サ'), + (0xFF7C, 'M', 'シ'), + (0xFF7D, 'M', 'ス'), + (0xFF7E, 'M', 'セ'), ] -def _seg_52(): +def _seg_52() -> List[Union[Tuple[int, str], Tuple[int, str, str]]]: return [ - (0xFFB3, 'M', u'ᄈ'), - (0xFFB4, 'M', u'ᄡ'), - (0xFFB5, 'M', u'ᄉ'), - (0xFFB6, 'M', u'ᄊ'), - (0xFFB7, 'M', u'ᄋ'), - (0xFFB8, 'M', u'ᄌ'), - (0xFFB9, 'M', u'ᄍ'), - (0xFFBA, 'M', u'ᄎ'), - (0xFFBB, 'M', u'ᄏ'), - (0xFFBC, 'M', u'ᄐ'), - (0xFFBD, 'M', u'ᄑ'), - (0xFFBE, 'M', u'ᄒ'), + (0xFF7F, 'M', 'ソ'), + (0xFF80, 'M', 'タ'), + (0xFF81, 'M', 'チ'), + (0xFF82, 'M', 'ツ'), + (0xFF83, 'M', 'テ'), + (0xFF84, 'M', 'ト'), + (0xFF85, 'M', 'ナ'), + (0xFF86, 'M', 'ニ'), + (0xFF87, 'M', 'ヌ'), + (0xFF88, 'M', 'ネ'), + (0xFF89, 'M', 'ノ'), + (0xFF8A, 'M', 'ハ'), + (0xFF8B, 'M', 'ヒ'), + (0xFF8C, 'M', 'フ'), + (0xFF8D, 'M', 'ヘ'), + (0xFF8E, 'M', 'ホ'), + (0xFF8F, 'M', 'マ'), + (0xFF90, 'M', 'ミ'), + (0xFF91, 'M', 'ム'), + (0xFF92, 'M', 'メ'), + (0xFF93, 'M', 'モ'), + (0xFF94, 'M', 'ヤ'), + (0xFF95, 'M', 'ユ'), + (0xFF96, 'M', 'ヨ'), + (0xFF97, 'M', 'ラ'), + (0xFF98, 'M', 'リ'), + (0xFF99, 'M', 'ル'), + (0xFF9A, 'M', 'レ'), + (0xFF9B, 'M', 'ロ'), + (0xFF9C, 'M', 'ワ'), + (0xFF9D, 'M', 'ン'), + (0xFF9E, 'M', '゙'), + (0xFF9F, 'M', '゚'), + (0xFFA0, 'X'), + (0xFFA1, 'M', 'ᄀ'), + (0xFFA2, 'M', 'ᄁ'), + (0xFFA3, 'M', 'ᆪ'), + (0xFFA4, 'M', 'ᄂ'), + (0xFFA5, 'M', 'ᆬ'), + (0xFFA6, 'M', 'ᆭ'), + (0xFFA7, 'M', 'ᄃ'), + (0xFFA8, 'M', 'ᄄ'), + (0xFFA9, 'M', 'ᄅ'), + (0xFFAA, 'M', 'ᆰ'), + (0xFFAB, 'M', 'ᆱ'), + (0xFFAC, 'M', 'ᆲ'), + (0xFFAD, 'M', 'ᆳ'), + (0xFFAE, 'M', 'ᆴ'), + (0xFFAF, 'M', 'ᆵ'), + (0xFFB0, 'M', 'ᄚ'), + (0xFFB1, 'M', 'ᄆ'), + (0xFFB2, 'M', 'ᄇ'), + (0xFFB3, 'M', 'ᄈ'), + (0xFFB4, 'M', 'ᄡ'), + (0xFFB5, 'M', 'ᄉ'), + (0xFFB6, 'M', 'ᄊ'), + (0xFFB7, 'M', 'ᄋ'), + (0xFFB8, 'M', 'ᄌ'), + (0xFFB9, 'M', 'ᄍ'), + (0xFFBA, 'M', 'ᄎ'), + (0xFFBB, 'M', 'ᄏ'), + (0xFFBC, 'M', 'ᄐ'), + (0xFFBD, 'M', 'ᄑ'), + (0xFFBE, 'M', 'ᄒ'), (0xFFBF, 'X'), - (0xFFC2, 'M', u'ᅡ'), - (0xFFC3, 'M', u'ᅢ'), - (0xFFC4, 'M', u'ᅣ'), - (0xFFC5, 'M', u'ᅤ'), - (0xFFC6, 'M', u'ᅥ'), - (0xFFC7, 'M', u'ᅦ'), + (0xFFC2, 'M', 'ᅡ'), + (0xFFC3, 'M', 'ᅢ'), + (0xFFC4, 'M', 'ᅣ'), + (0xFFC5, 'M', 'ᅤ'), + (0xFFC6, 'M', 'ᅥ'), + (0xFFC7, 'M', 'ᅦ'), (0xFFC8, 'X'), - (0xFFCA, 'M', u'ᅧ'), - (0xFFCB, 'M', u'ᅨ'), - (0xFFCC, 'M', u'ᅩ'), - (0xFFCD, 'M', u'ᅪ'), - (0xFFCE, 'M', u'ᅫ'), - (0xFFCF, 'M', u'ᅬ'), + (0xFFCA, 'M', 'ᅧ'), + (0xFFCB, 'M', 'ᅨ'), + (0xFFCC, 'M', 'ᅩ'), + (0xFFCD, 'M', 'ᅪ'), + (0xFFCE, 'M', 'ᅫ'), + (0xFFCF, 'M', 'ᅬ'), (0xFFD0, 'X'), - (0xFFD2, 'M', u'ᅭ'), - (0xFFD3, 'M', u'ᅮ'), - (0xFFD4, 'M', u'ᅯ'), - (0xFFD5, 'M', u'ᅰ'), - (0xFFD6, 'M', u'ᅱ'), - (0xFFD7, 'M', u'ᅲ'), + (0xFFD2, 'M', 'ᅭ'), + (0xFFD3, 'M', 'ᅮ'), + (0xFFD4, 'M', 'ᅯ'), + (0xFFD5, 'M', 'ᅰ'), + (0xFFD6, 'M', 'ᅱ'), + (0xFFD7, 'M', 'ᅲ'), (0xFFD8, 'X'), - (0xFFDA, 'M', u'ᅳ'), - (0xFFDB, 'M', u'ᅴ'), - (0xFFDC, 'M', u'ᅵ'), + (0xFFDA, 'M', 'ᅳ'), + (0xFFDB, 'M', 'ᅴ'), + (0xFFDC, 'M', 'ᅵ'), (0xFFDD, 'X'), - (0xFFE0, 'M', u'¢'), - (0xFFE1, 'M', u'£'), - (0xFFE2, 'M', u'¬'), - (0xFFE3, '3', u' ̄'), - (0xFFE4, 'M', u'¦'), - (0xFFE5, 'M', u'¥'), - (0xFFE6, 'M', u'₩'), + (0xFFE0, 'M', '¢'), + (0xFFE1, 'M', '£'), + (0xFFE2, 'M', '¬'), + (0xFFE3, '3', ' ̄'), + (0xFFE4, 'M', '¦'), + (0xFFE5, 'M', '¥'), + (0xFFE6, 'M', '₩'), (0xFFE7, 'X'), - (0xFFE8, 'M', u'│'), - (0xFFE9, 'M', u'←'), - (0xFFEA, 'M', u'↑'), - (0xFFEB, 'M', u'→'), - (0xFFEC, 'M', u'↓'), - (0xFFED, 'M', u'■'), - (0xFFEE, 'M', u'○'), + (0xFFE8, 'M', '│'), + (0xFFE9, 'M', '←'), + ] + +def _seg_53() -> List[Union[Tuple[int, str], Tuple[int, str, str]]]: + return [ + (0xFFEA, 'M', '↑'), + (0xFFEB, 'M', '→'), + (0xFFEC, 'M', '↓'), + (0xFFED, 'M', '■'), + (0xFFEE, 'M', '○'), (0xFFEF, 'X'), (0x10000, 'V'), (0x1000C, 'X'), @@ -5490,7 +5549,7 @@ def _seg_52(): (0x10137, 'V'), (0x1018F, 'X'), (0x10190, 'V'), - (0x1019C, 'X'), + (0x1019D, 'X'), (0x101A0, 'V'), (0x101A1, 'X'), (0x101D0, 'V'), @@ -5513,90 +5572,90 @@ def _seg_52(): (0x103C4, 'X'), (0x103C8, 'V'), (0x103D6, 'X'), - (0x10400, 'M', u'𐐨'), - (0x10401, 'M', u'𐐩'), - ] - -def _seg_53(): - return [ - (0x10402, 'M', u'𐐪'), - (0x10403, 'M', u'𐐫'), - (0x10404, 'M', u'𐐬'), - (0x10405, 'M', u'𐐭'), - (0x10406, 'M', u'𐐮'), - (0x10407, 'M', u'𐐯'), - (0x10408, 'M', u'𐐰'), - (0x10409, 'M', u'𐐱'), - (0x1040A, 'M', u'𐐲'), - (0x1040B, 'M', u'𐐳'), - (0x1040C, 'M', u'𐐴'), - (0x1040D, 'M', u'𐐵'), - (0x1040E, 'M', u'𐐶'), - (0x1040F, 'M', u'𐐷'), - (0x10410, 'M', u'𐐸'), - (0x10411, 'M', u'𐐹'), - (0x10412, 'M', u'𐐺'), - (0x10413, 'M', u'𐐻'), - (0x10414, 'M', u'𐐼'), - (0x10415, 'M', u'𐐽'), - (0x10416, 'M', u'𐐾'), - (0x10417, 'M', u'𐐿'), - (0x10418, 'M', u'𐑀'), - (0x10419, 'M', u'𐑁'), - (0x1041A, 'M', u'𐑂'), - (0x1041B, 'M', u'𐑃'), - (0x1041C, 'M', u'𐑄'), - (0x1041D, 'M', u'𐑅'), - (0x1041E, 'M', u'𐑆'), - (0x1041F, 'M', u'𐑇'), - (0x10420, 'M', u'𐑈'), - (0x10421, 'M', u'𐑉'), - (0x10422, 'M', u'𐑊'), - (0x10423, 'M', u'𐑋'), - (0x10424, 'M', u'𐑌'), - (0x10425, 'M', u'𐑍'), - (0x10426, 'M', u'𐑎'), - (0x10427, 'M', u'𐑏'), + (0x10400, 'M', '𐐨'), + (0x10401, 'M', '𐐩'), + (0x10402, 'M', '𐐪'), + (0x10403, 'M', '𐐫'), + (0x10404, 'M', '𐐬'), + (0x10405, 'M', '𐐭'), + (0x10406, 'M', '𐐮'), + (0x10407, 'M', '𐐯'), + (0x10408, 'M', '𐐰'), + (0x10409, 'M', '𐐱'), + (0x1040A, 'M', '𐐲'), + (0x1040B, 'M', '𐐳'), + (0x1040C, 'M', '𐐴'), + (0x1040D, 'M', '𐐵'), + (0x1040E, 'M', '𐐶'), + (0x1040F, 'M', '𐐷'), + (0x10410, 'M', '𐐸'), + (0x10411, 'M', '𐐹'), + (0x10412, 'M', '𐐺'), + (0x10413, 'M', '𐐻'), + (0x10414, 'M', '𐐼'), + (0x10415, 'M', '𐐽'), + (0x10416, 'M', '𐐾'), + (0x10417, 'M', '𐐿'), + (0x10418, 'M', '𐑀'), + (0x10419, 'M', '𐑁'), + (0x1041A, 'M', '𐑂'), + (0x1041B, 'M', '𐑃'), + (0x1041C, 'M', '𐑄'), + (0x1041D, 'M', '𐑅'), + (0x1041E, 'M', '𐑆'), + (0x1041F, 'M', '𐑇'), + (0x10420, 'M', '𐑈'), + (0x10421, 'M', '𐑉'), + (0x10422, 'M', '𐑊'), + (0x10423, 'M', '𐑋'), + (0x10424, 'M', '𐑌'), + (0x10425, 'M', '𐑍'), + (0x10426, 'M', '𐑎'), + (0x10427, 'M', '𐑏'), (0x10428, 'V'), (0x1049E, 'X'), (0x104A0, 'V'), (0x104AA, 'X'), - (0x104B0, 'M', u'𐓘'), - (0x104B1, 'M', u'𐓙'), - (0x104B2, 'M', u'𐓚'), - (0x104B3, 'M', u'𐓛'), - (0x104B4, 'M', u'𐓜'), - (0x104B5, 'M', u'𐓝'), - (0x104B6, 'M', u'𐓞'), - (0x104B7, 'M', u'𐓟'), - (0x104B8, 'M', u'𐓠'), - (0x104B9, 'M', u'𐓡'), - (0x104BA, 'M', u'𐓢'), - (0x104BB, 'M', u'𐓣'), - (0x104BC, 'M', u'𐓤'), - (0x104BD, 'M', u'𐓥'), - (0x104BE, 'M', u'𐓦'), - (0x104BF, 'M', u'𐓧'), - (0x104C0, 'M', u'𐓨'), - (0x104C1, 'M', u'𐓩'), - (0x104C2, 'M', u'𐓪'), - (0x104C3, 'M', u'𐓫'), - (0x104C4, 'M', u'𐓬'), - (0x104C5, 'M', u'𐓭'), - (0x104C6, 'M', u'𐓮'), - (0x104C7, 'M', u'𐓯'), - (0x104C8, 'M', u'𐓰'), - (0x104C9, 'M', u'𐓱'), - (0x104CA, 'M', u'𐓲'), - (0x104CB, 'M', u'𐓳'), - (0x104CC, 'M', u'𐓴'), - (0x104CD, 'M', u'𐓵'), - (0x104CE, 'M', u'𐓶'), - (0x104CF, 'M', u'𐓷'), - (0x104D0, 'M', u'𐓸'), - (0x104D1, 'M', u'𐓹'), - (0x104D2, 'M', u'𐓺'), - (0x104D3, 'M', u'𐓻'), + (0x104B0, 'M', '𐓘'), + (0x104B1, 'M', '𐓙'), + (0x104B2, 'M', '𐓚'), + (0x104B3, 'M', '𐓛'), + (0x104B4, 'M', '𐓜'), + (0x104B5, 'M', '𐓝'), + ] + +def _seg_54() -> List[Union[Tuple[int, str], Tuple[int, str, str]]]: + return [ + (0x104B6, 'M', '𐓞'), + (0x104B7, 'M', '𐓟'), + (0x104B8, 'M', '𐓠'), + (0x104B9, 'M', '𐓡'), + (0x104BA, 'M', '𐓢'), + (0x104BB, 'M', '𐓣'), + (0x104BC, 'M', '𐓤'), + (0x104BD, 'M', '𐓥'), + (0x104BE, 'M', '𐓦'), + (0x104BF, 'M', '𐓧'), + (0x104C0, 'M', '𐓨'), + (0x104C1, 'M', '𐓩'), + (0x104C2, 'M', '𐓪'), + (0x104C3, 'M', '𐓫'), + (0x104C4, 'M', '𐓬'), + (0x104C5, 'M', '𐓭'), + (0x104C6, 'M', '𐓮'), + (0x104C7, 'M', '𐓯'), + (0x104C8, 'M', '𐓰'), + (0x104C9, 'M', '𐓱'), + (0x104CA, 'M', '𐓲'), + (0x104CB, 'M', '𐓳'), + (0x104CC, 'M', '𐓴'), + (0x104CD, 'M', '𐓵'), + (0x104CE, 'M', '𐓶'), + (0x104CF, 'M', '𐓷'), + (0x104D0, 'M', '𐓸'), + (0x104D1, 'M', '𐓹'), + (0x104D2, 'M', '𐓺'), + (0x104D3, 'M', '𐓻'), (0x104D4, 'X'), (0x104D8, 'V'), (0x104FC, 'X'), @@ -5605,13 +5664,123 @@ def _seg_53(): (0x10530, 'V'), (0x10564, 'X'), (0x1056F, 'V'), - (0x10570, 'X'), + (0x10570, 'M', '𐖗'), + (0x10571, 'M', '𐖘'), + (0x10572, 'M', '𐖙'), + (0x10573, 'M', '𐖚'), + (0x10574, 'M', '𐖛'), + (0x10575, 'M', '𐖜'), + (0x10576, 'M', '𐖝'), + (0x10577, 'M', '𐖞'), + (0x10578, 'M', '𐖟'), + (0x10579, 'M', '𐖠'), + (0x1057A, 'M', '𐖡'), + (0x1057B, 'X'), + (0x1057C, 'M', '𐖣'), + (0x1057D, 'M', '𐖤'), + (0x1057E, 'M', '𐖥'), + (0x1057F, 'M', '𐖦'), + (0x10580, 'M', '𐖧'), + (0x10581, 'M', '𐖨'), + (0x10582, 'M', '𐖩'), + (0x10583, 'M', '𐖪'), + (0x10584, 'M', '𐖫'), + (0x10585, 'M', '𐖬'), + (0x10586, 'M', '𐖭'), + (0x10587, 'M', '𐖮'), + (0x10588, 'M', '𐖯'), + (0x10589, 'M', '𐖰'), + (0x1058A, 'M', '𐖱'), + (0x1058B, 'X'), + (0x1058C, 'M', '𐖳'), + (0x1058D, 'M', '𐖴'), + (0x1058E, 'M', '𐖵'), + (0x1058F, 'M', '𐖶'), + (0x10590, 'M', '𐖷'), + (0x10591, 'M', '𐖸'), + (0x10592, 'M', '𐖹'), + (0x10593, 'X'), + (0x10594, 'M', '𐖻'), + (0x10595, 'M', '𐖼'), + (0x10596, 'X'), + (0x10597, 'V'), + (0x105A2, 'X'), + (0x105A3, 'V'), + (0x105B2, 'X'), + (0x105B3, 'V'), + (0x105BA, 'X'), + (0x105BB, 'V'), + (0x105BD, 'X'), (0x10600, 'V'), (0x10737, 'X'), (0x10740, 'V'), (0x10756, 'X'), (0x10760, 'V'), (0x10768, 'X'), + (0x10780, 'V'), + (0x10781, 'M', 'ː'), + (0x10782, 'M', 'ˑ'), + (0x10783, 'M', 'æ'), + (0x10784, 'M', 'ʙ'), + (0x10785, 'M', 'ɓ'), + (0x10786, 'X'), + (0x10787, 'M', 'ʣ'), + (0x10788, 'M', 'ꭦ'), + ] + +def _seg_55() -> List[Union[Tuple[int, str], Tuple[int, str, str]]]: + return [ + (0x10789, 'M', 'ʥ'), + (0x1078A, 'M', 'ʤ'), + (0x1078B, 'M', 'ɖ'), + (0x1078C, 'M', 'ɗ'), + (0x1078D, 'M', 'ᶑ'), + (0x1078E, 'M', 'ɘ'), + (0x1078F, 'M', 'ɞ'), + (0x10790, 'M', 'ʩ'), + (0x10791, 'M', 'ɤ'), + (0x10792, 'M', 'ɢ'), + (0x10793, 'M', 'ɠ'), + (0x10794, 'M', 'ʛ'), + (0x10795, 'M', 'ħ'), + (0x10796, 'M', 'ʜ'), + (0x10797, 'M', 'ɧ'), + (0x10798, 'M', 'ʄ'), + (0x10799, 'M', 'ʪ'), + (0x1079A, 'M', 'ʫ'), + (0x1079B, 'M', 'ɬ'), + (0x1079C, 'M', '𝼄'), + (0x1079D, 'M', 'ꞎ'), + (0x1079E, 'M', 'ɮ'), + (0x1079F, 'M', '𝼅'), + (0x107A0, 'M', 'ʎ'), + (0x107A1, 'M', '𝼆'), + (0x107A2, 'M', 'ø'), + (0x107A3, 'M', 'ɶ'), + (0x107A4, 'M', 'ɷ'), + (0x107A5, 'M', 'q'), + (0x107A6, 'M', 'ɺ'), + (0x107A7, 'M', '𝼈'), + (0x107A8, 'M', 'ɽ'), + (0x107A9, 'M', 'ɾ'), + (0x107AA, 'M', 'ʀ'), + (0x107AB, 'M', 'ʨ'), + (0x107AC, 'M', 'ʦ'), + (0x107AD, 'M', 'ꭧ'), + (0x107AE, 'M', 'ʧ'), + (0x107AF, 'M', 'ʈ'), + (0x107B0, 'M', 'ⱱ'), + (0x107B1, 'X'), + (0x107B2, 'M', 'ʏ'), + (0x107B3, 'M', 'ʡ'), + (0x107B4, 'M', 'ʢ'), + (0x107B5, 'M', 'ʘ'), + (0x107B6, 'M', 'ǀ'), + (0x107B7, 'M', 'ǁ'), + (0x107B8, 'M', 'ǂ'), + (0x107B9, 'M', '𝼊'), + (0x107BA, 'M', '𝼞'), + (0x107BB, 'X'), (0x10800, 'V'), (0x10806, 'X'), (0x10808, 'V'), @@ -5619,10 +5788,6 @@ def _seg_53(): (0x1080A, 'V'), (0x10836, 'X'), (0x10837, 'V'), - ] - -def _seg_54(): - return [ (0x10839, 'X'), (0x1083C, 'V'), (0x1083D, 'X'), @@ -5665,6 +5830,10 @@ def _seg_54(): (0x10A60, 'V'), (0x10AA0, 'X'), (0x10AC0, 'V'), + ] + +def _seg_56() -> List[Union[Tuple[int, str], Tuple[int, str, str]]]: + return [ (0x10AE7, 'X'), (0x10AEB, 'V'), (0x10AF7, 'X'), @@ -5682,61 +5851,57 @@ def _seg_54(): (0x10BB0, 'X'), (0x10C00, 'V'), (0x10C49, 'X'), - (0x10C80, 'M', u'𐳀'), - (0x10C81, 'M', u'𐳁'), - (0x10C82, 'M', u'𐳂'), - (0x10C83, 'M', u'𐳃'), - (0x10C84, 'M', u'𐳄'), - (0x10C85, 'M', u'𐳅'), - (0x10C86, 'M', u'𐳆'), - (0x10C87, 'M', u'𐳇'), - (0x10C88, 'M', u'𐳈'), - (0x10C89, 'M', u'𐳉'), - (0x10C8A, 'M', u'𐳊'), - (0x10C8B, 'M', u'𐳋'), - (0x10C8C, 'M', u'𐳌'), - (0x10C8D, 'M', u'𐳍'), - (0x10C8E, 'M', u'𐳎'), - (0x10C8F, 'M', u'𐳏'), - (0x10C90, 'M', u'𐳐'), - (0x10C91, 'M', u'𐳑'), - (0x10C92, 'M', u'𐳒'), - (0x10C93, 'M', u'𐳓'), - (0x10C94, 'M', u'𐳔'), - (0x10C95, 'M', u'𐳕'), - (0x10C96, 'M', u'𐳖'), - (0x10C97, 'M', u'𐳗'), - (0x10C98, 'M', u'𐳘'), - (0x10C99, 'M', u'𐳙'), - (0x10C9A, 'M', u'𐳚'), - (0x10C9B, 'M', u'𐳛'), - (0x10C9C, 'M', u'𐳜'), - (0x10C9D, 'M', u'𐳝'), - (0x10C9E, 'M', u'𐳞'), - (0x10C9F, 'M', u'𐳟'), - (0x10CA0, 'M', u'𐳠'), - (0x10CA1, 'M', u'𐳡'), - (0x10CA2, 'M', u'𐳢'), - (0x10CA3, 'M', u'𐳣'), - (0x10CA4, 'M', u'𐳤'), - (0x10CA5, 'M', u'𐳥'), - (0x10CA6, 'M', u'𐳦'), - (0x10CA7, 'M', u'𐳧'), - (0x10CA8, 'M', u'𐳨'), - ] - -def _seg_55(): - return [ - (0x10CA9, 'M', u'𐳩'), - (0x10CAA, 'M', u'𐳪'), - (0x10CAB, 'M', u'𐳫'), - (0x10CAC, 'M', u'𐳬'), - (0x10CAD, 'M', u'𐳭'), - (0x10CAE, 'M', u'𐳮'), - (0x10CAF, 'M', u'𐳯'), - (0x10CB0, 'M', u'𐳰'), - (0x10CB1, 'M', u'𐳱'), - (0x10CB2, 'M', u'𐳲'), + (0x10C80, 'M', '𐳀'), + (0x10C81, 'M', '𐳁'), + (0x10C82, 'M', '𐳂'), + (0x10C83, 'M', '𐳃'), + (0x10C84, 'M', '𐳄'), + (0x10C85, 'M', '𐳅'), + (0x10C86, 'M', '𐳆'), + (0x10C87, 'M', '𐳇'), + (0x10C88, 'M', '𐳈'), + (0x10C89, 'M', '𐳉'), + (0x10C8A, 'M', '𐳊'), + (0x10C8B, 'M', '𐳋'), + (0x10C8C, 'M', '𐳌'), + (0x10C8D, 'M', '𐳍'), + (0x10C8E, 'M', '𐳎'), + (0x10C8F, 'M', '𐳏'), + (0x10C90, 'M', '𐳐'), + (0x10C91, 'M', '𐳑'), + (0x10C92, 'M', '𐳒'), + (0x10C93, 'M', '𐳓'), + (0x10C94, 'M', '𐳔'), + (0x10C95, 'M', '𐳕'), + (0x10C96, 'M', '𐳖'), + (0x10C97, 'M', '𐳗'), + (0x10C98, 'M', '𐳘'), + (0x10C99, 'M', '𐳙'), + (0x10C9A, 'M', '𐳚'), + (0x10C9B, 'M', '𐳛'), + (0x10C9C, 'M', '𐳜'), + (0x10C9D, 'M', '𐳝'), + (0x10C9E, 'M', '𐳞'), + (0x10C9F, 'M', '𐳟'), + (0x10CA0, 'M', '𐳠'), + (0x10CA1, 'M', '𐳡'), + (0x10CA2, 'M', '𐳢'), + (0x10CA3, 'M', '𐳣'), + (0x10CA4, 'M', '𐳤'), + (0x10CA5, 'M', '𐳥'), + (0x10CA6, 'M', '𐳦'), + (0x10CA7, 'M', '𐳧'), + (0x10CA8, 'M', '𐳨'), + (0x10CA9, 'M', '𐳩'), + (0x10CAA, 'M', '𐳪'), + (0x10CAB, 'M', '𐳫'), + (0x10CAC, 'M', '𐳬'), + (0x10CAD, 'M', '𐳭'), + (0x10CAE, 'M', '𐳮'), + (0x10CAF, 'M', '𐳯'), + (0x10CB0, 'M', '𐳰'), + (0x10CB1, 'M', '𐳱'), + (0x10CB2, 'M', '𐳲'), (0x10CB3, 'X'), (0x10CC0, 'V'), (0x10CF3, 'X'), @@ -5746,18 +5911,34 @@ def _seg_55(): (0x10D3A, 'X'), (0x10E60, 'V'), (0x10E7F, 'X'), - (0x10F00, 'V'), + (0x10E80, 'V'), + (0x10EAA, 'X'), + (0x10EAB, 'V'), + (0x10EAE, 'X'), + (0x10EB0, 'V'), + (0x10EB2, 'X'), + (0x10EFD, 'V'), (0x10F28, 'X'), (0x10F30, 'V'), (0x10F5A, 'X'), + (0x10F70, 'V'), + (0x10F8A, 'X'), + (0x10FB0, 'V'), + (0x10FCC, 'X'), + (0x10FE0, 'V'), + (0x10FF7, 'X'), (0x11000, 'V'), (0x1104E, 'X'), (0x11052, 'V'), - (0x11070, 'X'), + (0x11076, 'X'), (0x1107F, 'V'), (0x110BD, 'X'), (0x110BE, 'V'), - (0x110C2, 'X'), + ] + +def _seg_57() -> List[Union[Tuple[int, str], Tuple[int, str, str]]]: + return [ + (0x110C3, 'X'), (0x110D0, 'V'), (0x110E9, 'X'), (0x110F0, 'V'), @@ -5765,19 +5946,17 @@ def _seg_55(): (0x11100, 'V'), (0x11135, 'X'), (0x11136, 'V'), - (0x11147, 'X'), + (0x11148, 'X'), (0x11150, 'V'), (0x11177, 'X'), (0x11180, 'V'), - (0x111CE, 'X'), - (0x111D0, 'V'), (0x111E0, 'X'), (0x111E1, 'V'), (0x111F5, 'X'), (0x11200, 'V'), (0x11212, 'X'), (0x11213, 'V'), - (0x1123F, 'X'), + (0x11242, 'X'), (0x11280, 'V'), (0x11287, 'X'), (0x11288, 'V'), @@ -5823,15 +6002,9 @@ def _seg_55(): (0x11370, 'V'), (0x11375, 'X'), (0x11400, 'V'), - (0x1145A, 'X'), - (0x1145B, 'V'), (0x1145C, 'X'), (0x1145D, 'V'), - ] - -def _seg_56(): - return [ - (0x1145F, 'X'), + (0x11462, 'X'), (0x11480, 'V'), (0x114C8, 'X'), (0x114D0, 'V'), @@ -5847,7 +6020,7 @@ def _seg_56(): (0x11660, 'V'), (0x1166D, 'X'), (0x11680, 'V'), - (0x116B8, 'X'), + (0x116BA, 'X'), (0x116C0, 'V'), (0x116CA, 'X'), (0x11700, 'V'), @@ -5855,53 +6028,77 @@ def _seg_56(): (0x1171D, 'V'), (0x1172C, 'X'), (0x11730, 'V'), - (0x11740, 'X'), + (0x11747, 'X'), (0x11800, 'V'), (0x1183C, 'X'), - (0x118A0, 'M', u'𑣀'), - (0x118A1, 'M', u'𑣁'), - (0x118A2, 'M', u'𑣂'), - (0x118A3, 'M', u'𑣃'), - (0x118A4, 'M', u'𑣄'), - (0x118A5, 'M', u'𑣅'), - (0x118A6, 'M', u'𑣆'), - (0x118A7, 'M', u'𑣇'), - (0x118A8, 'M', u'𑣈'), - (0x118A9, 'M', u'𑣉'), - (0x118AA, 'M', u'𑣊'), - (0x118AB, 'M', u'𑣋'), - (0x118AC, 'M', u'𑣌'), - (0x118AD, 'M', u'𑣍'), - (0x118AE, 'M', u'𑣎'), - (0x118AF, 'M', u'𑣏'), - (0x118B0, 'M', u'𑣐'), - (0x118B1, 'M', u'𑣑'), - (0x118B2, 'M', u'𑣒'), - (0x118B3, 'M', u'𑣓'), - (0x118B4, 'M', u'𑣔'), - (0x118B5, 'M', u'𑣕'), - (0x118B6, 'M', u'𑣖'), - (0x118B7, 'M', u'𑣗'), - (0x118B8, 'M', u'𑣘'), - (0x118B9, 'M', u'𑣙'), - (0x118BA, 'M', u'𑣚'), - (0x118BB, 'M', u'𑣛'), - (0x118BC, 'M', u'𑣜'), - (0x118BD, 'M', u'𑣝'), - (0x118BE, 'M', u'𑣞'), - (0x118BF, 'M', u'𑣟'), + (0x118A0, 'M', '𑣀'), + (0x118A1, 'M', '𑣁'), + (0x118A2, 'M', '𑣂'), + (0x118A3, 'M', '𑣃'), + (0x118A4, 'M', '𑣄'), + (0x118A5, 'M', '𑣅'), + (0x118A6, 'M', '𑣆'), + ] + +def _seg_58() -> List[Union[Tuple[int, str], Tuple[int, str, str]]]: + return [ + (0x118A7, 'M', '𑣇'), + (0x118A8, 'M', '𑣈'), + (0x118A9, 'M', '𑣉'), + (0x118AA, 'M', '𑣊'), + (0x118AB, 'M', '𑣋'), + (0x118AC, 'M', '𑣌'), + (0x118AD, 'M', '𑣍'), + (0x118AE, 'M', '𑣎'), + (0x118AF, 'M', '𑣏'), + (0x118B0, 'M', '𑣐'), + (0x118B1, 'M', '𑣑'), + (0x118B2, 'M', '𑣒'), + (0x118B3, 'M', '𑣓'), + (0x118B4, 'M', '𑣔'), + (0x118B5, 'M', '𑣕'), + (0x118B6, 'M', '𑣖'), + (0x118B7, 'M', '𑣗'), + (0x118B8, 'M', '𑣘'), + (0x118B9, 'M', '𑣙'), + (0x118BA, 'M', '𑣚'), + (0x118BB, 'M', '𑣛'), + (0x118BC, 'M', '𑣜'), + (0x118BD, 'M', '𑣝'), + (0x118BE, 'M', '𑣞'), + (0x118BF, 'M', '𑣟'), (0x118C0, 'V'), (0x118F3, 'X'), (0x118FF, 'V'), - (0x11900, 'X'), + (0x11907, 'X'), + (0x11909, 'V'), + (0x1190A, 'X'), + (0x1190C, 'V'), + (0x11914, 'X'), + (0x11915, 'V'), + (0x11917, 'X'), + (0x11918, 'V'), + (0x11936, 'X'), + (0x11937, 'V'), + (0x11939, 'X'), + (0x1193B, 'V'), + (0x11947, 'X'), + (0x11950, 'V'), + (0x1195A, 'X'), + (0x119A0, 'V'), + (0x119A8, 'X'), + (0x119AA, 'V'), + (0x119D8, 'X'), + (0x119DA, 'V'), + (0x119E5, 'X'), (0x11A00, 'V'), (0x11A48, 'X'), (0x11A50, 'V'), - (0x11A84, 'X'), - (0x11A86, 'V'), (0x11AA3, 'X'), - (0x11AC0, 'V'), + (0x11AB0, 'V'), (0x11AF9, 'X'), + (0x11B00, 'V'), + (0x11B0A, 'X'), (0x11C00, 'V'), (0x11C09, 'X'), (0x11C0A, 'V'), @@ -5931,10 +6128,6 @@ def _seg_56(): (0x11D50, 'V'), (0x11D5A, 'X'), (0x11D60, 'V'), - ] - -def _seg_57(): - return [ (0x11D66, 'X'), (0x11D67, 'V'), (0x11D69, 'X'), @@ -5948,7 +6141,21 @@ def _seg_57(): (0x11DAA, 'X'), (0x11EE0, 'V'), (0x11EF9, 'X'), - (0x12000, 'V'), + (0x11F00, 'V'), + ] + +def _seg_59() -> List[Union[Tuple[int, str], Tuple[int, str, str]]]: + return [ + (0x11F11, 'X'), + (0x11F12, 'V'), + (0x11F3B, 'X'), + (0x11F3E, 'V'), + (0x11F5A, 'X'), + (0x11FB0, 'V'), + (0x11FB1, 'X'), + (0x11FC0, 'V'), + (0x11FF2, 'X'), + (0x11FFF, 'V'), (0x1239A, 'X'), (0x12400, 'V'), (0x1246F, 'X'), @@ -5956,8 +6163,12 @@ def _seg_57(): (0x12475, 'X'), (0x12480, 'V'), (0x12544, 'X'), + (0x12F90, 'V'), + (0x12FF3, 'X'), (0x13000, 'V'), - (0x1342F, 'X'), + (0x13430, 'X'), + (0x13440, 'V'), + (0x13456, 'X'), (0x14400, 'V'), (0x14647, 'X'), (0x16800, 'V'), @@ -5967,7 +6178,9 @@ def _seg_57(): (0x16A60, 'V'), (0x16A6A, 'X'), (0x16A6E, 'V'), - (0x16A70, 'X'), + (0x16ABF, 'X'), + (0x16AC0, 'V'), + (0x16ACA, 'X'), (0x16AD0, 'V'), (0x16AEE, 'X'), (0x16AF0, 'V'), @@ -5982,22 +6195,76 @@ def _seg_57(): (0x16B78, 'X'), (0x16B7D, 'V'), (0x16B90, 'X'), + (0x16E40, 'M', '𖹠'), + (0x16E41, 'M', '𖹡'), + (0x16E42, 'M', '𖹢'), + (0x16E43, 'M', '𖹣'), + (0x16E44, 'M', '𖹤'), + (0x16E45, 'M', '𖹥'), + (0x16E46, 'M', '𖹦'), + (0x16E47, 'M', '𖹧'), + (0x16E48, 'M', '𖹨'), + (0x16E49, 'M', '𖹩'), + (0x16E4A, 'M', '𖹪'), + (0x16E4B, 'M', '𖹫'), + (0x16E4C, 'M', '𖹬'), + (0x16E4D, 'M', '𖹭'), + (0x16E4E, 'M', '𖹮'), + (0x16E4F, 'M', '𖹯'), + (0x16E50, 'M', '𖹰'), + (0x16E51, 'M', '𖹱'), + (0x16E52, 'M', '𖹲'), + (0x16E53, 'M', '𖹳'), + (0x16E54, 'M', '𖹴'), + (0x16E55, 'M', '𖹵'), + (0x16E56, 'M', '𖹶'), + (0x16E57, 'M', '𖹷'), + (0x16E58, 'M', '𖹸'), + (0x16E59, 'M', '𖹹'), + (0x16E5A, 'M', '𖹺'), + (0x16E5B, 'M', '𖹻'), + (0x16E5C, 'M', '𖹼'), + (0x16E5D, 'M', '𖹽'), + (0x16E5E, 'M', '𖹾'), + (0x16E5F, 'M', '𖹿'), (0x16E60, 'V'), (0x16E9B, 'X'), (0x16F00, 'V'), - (0x16F45, 'X'), - (0x16F50, 'V'), - (0x16F7F, 'X'), + (0x16F4B, 'X'), + (0x16F4F, 'V'), + (0x16F88, 'X'), (0x16F8F, 'V'), (0x16FA0, 'X'), (0x16FE0, 'V'), - (0x16FE2, 'X'), + (0x16FE5, 'X'), + (0x16FF0, 'V'), + (0x16FF2, 'X'), (0x17000, 'V'), - (0x187F2, 'X'), + (0x187F8, 'X'), (0x18800, 'V'), - (0x18AF3, 'X'), + (0x18CD6, 'X'), + (0x18D00, 'V'), + (0x18D09, 'X'), + (0x1AFF0, 'V'), + ] + +def _seg_60() -> List[Union[Tuple[int, str], Tuple[int, str, str]]]: + return [ + (0x1AFF4, 'X'), + (0x1AFF5, 'V'), + (0x1AFFC, 'X'), + (0x1AFFD, 'V'), + (0x1AFFF, 'X'), (0x1B000, 'V'), - (0x1B11F, 'X'), + (0x1B123, 'X'), + (0x1B132, 'V'), + (0x1B133, 'X'), + (0x1B150, 'V'), + (0x1B153, 'X'), + (0x1B155, 'V'), + (0x1B156, 'X'), + (0x1B164, 'V'), + (0x1B168, 'X'), (0x1B170, 'V'), (0x1B2FC, 'X'), (0x1BC00, 'V'), @@ -6011,1097 +6278,1105 @@ def _seg_57(): (0x1BC9C, 'V'), (0x1BCA0, 'I'), (0x1BCA4, 'X'), + (0x1CF00, 'V'), + (0x1CF2E, 'X'), + (0x1CF30, 'V'), + (0x1CF47, 'X'), + (0x1CF50, 'V'), + (0x1CFC4, 'X'), (0x1D000, 'V'), (0x1D0F6, 'X'), (0x1D100, 'V'), (0x1D127, 'X'), (0x1D129, 'V'), - (0x1D15E, 'M', u'𝅗𝅥'), - (0x1D15F, 'M', u'𝅘𝅥'), - (0x1D160, 'M', u'𝅘𝅥𝅮'), - (0x1D161, 'M', u'𝅘𝅥𝅯'), - (0x1D162, 'M', u'𝅘𝅥𝅰'), - (0x1D163, 'M', u'𝅘𝅥𝅱'), - (0x1D164, 'M', u'𝅘𝅥𝅲'), + (0x1D15E, 'M', '𝅗𝅥'), + (0x1D15F, 'M', '𝅘𝅥'), + (0x1D160, 'M', '𝅘𝅥𝅮'), + (0x1D161, 'M', '𝅘𝅥𝅯'), + (0x1D162, 'M', '𝅘𝅥𝅰'), + (0x1D163, 'M', '𝅘𝅥𝅱'), + (0x1D164, 'M', '𝅘𝅥𝅲'), (0x1D165, 'V'), (0x1D173, 'X'), (0x1D17B, 'V'), - (0x1D1BB, 'M', u'𝆹𝅥'), - (0x1D1BC, 'M', u'𝆺𝅥'), - (0x1D1BD, 'M', u'𝆹𝅥𝅮'), - (0x1D1BE, 'M', u'𝆺𝅥𝅮'), - (0x1D1BF, 'M', u'𝆹𝅥𝅯'), - (0x1D1C0, 'M', u'𝆺𝅥𝅯'), + (0x1D1BB, 'M', '𝆹𝅥'), + (0x1D1BC, 'M', '𝆺𝅥'), + (0x1D1BD, 'M', '𝆹𝅥𝅮'), + (0x1D1BE, 'M', '𝆺𝅥𝅮'), + (0x1D1BF, 'M', '𝆹𝅥𝅯'), + (0x1D1C0, 'M', '𝆺𝅥𝅯'), (0x1D1C1, 'V'), - (0x1D1E9, 'X'), + (0x1D1EB, 'X'), (0x1D200, 'V'), - ] - -def _seg_58(): - return [ (0x1D246, 'X'), + (0x1D2C0, 'V'), + (0x1D2D4, 'X'), (0x1D2E0, 'V'), (0x1D2F4, 'X'), (0x1D300, 'V'), (0x1D357, 'X'), (0x1D360, 'V'), (0x1D379, 'X'), - (0x1D400, 'M', u'a'), - (0x1D401, 'M', u'b'), - (0x1D402, 'M', u'c'), - (0x1D403, 'M', u'd'), - (0x1D404, 'M', u'e'), - (0x1D405, 'M', u'f'), - (0x1D406, 'M', u'g'), - (0x1D407, 'M', u'h'), - (0x1D408, 'M', u'i'), - (0x1D409, 'M', u'j'), - (0x1D40A, 'M', u'k'), - (0x1D40B, 'M', u'l'), - (0x1D40C, 'M', u'm'), - (0x1D40D, 'M', u'n'), - (0x1D40E, 'M', u'o'), - (0x1D40F, 'M', u'p'), - (0x1D410, 'M', u'q'), - (0x1D411, 'M', u'r'), - (0x1D412, 'M', u's'), - (0x1D413, 'M', u't'), - (0x1D414, 'M', u'u'), - (0x1D415, 'M', u'v'), - (0x1D416, 'M', u'w'), - (0x1D417, 'M', u'x'), - (0x1D418, 'M', u'y'), - (0x1D419, 'M', u'z'), - (0x1D41A, 'M', u'a'), - (0x1D41B, 'M', u'b'), - (0x1D41C, 'M', u'c'), - (0x1D41D, 'M', u'd'), - (0x1D41E, 'M', u'e'), - (0x1D41F, 'M', u'f'), - (0x1D420, 'M', u'g'), - (0x1D421, 'M', u'h'), - (0x1D422, 'M', u'i'), - (0x1D423, 'M', u'j'), - (0x1D424, 'M', u'k'), - (0x1D425, 'M', u'l'), - (0x1D426, 'M', u'm'), - (0x1D427, 'M', u'n'), - (0x1D428, 'M', u'o'), - (0x1D429, 'M', u'p'), - (0x1D42A, 'M', u'q'), - (0x1D42B, 'M', u'r'), - (0x1D42C, 'M', u's'), - (0x1D42D, 'M', u't'), - (0x1D42E, 'M', u'u'), - (0x1D42F, 'M', u'v'), - (0x1D430, 'M', u'w'), - (0x1D431, 'M', u'x'), - (0x1D432, 'M', u'y'), - (0x1D433, 'M', u'z'), - (0x1D434, 'M', u'a'), - (0x1D435, 'M', u'b'), - (0x1D436, 'M', u'c'), - (0x1D437, 'M', u'd'), - (0x1D438, 'M', u'e'), - (0x1D439, 'M', u'f'), - (0x1D43A, 'M', u'g'), - (0x1D43B, 'M', u'h'), - (0x1D43C, 'M', u'i'), - (0x1D43D, 'M', u'j'), - (0x1D43E, 'M', u'k'), - (0x1D43F, 'M', u'l'), - (0x1D440, 'M', u'm'), - (0x1D441, 'M', u'n'), - (0x1D442, 'M', u'o'), - (0x1D443, 'M', u'p'), - (0x1D444, 'M', u'q'), - (0x1D445, 'M', u'r'), - (0x1D446, 'M', u's'), - (0x1D447, 'M', u't'), - (0x1D448, 'M', u'u'), - (0x1D449, 'M', u'v'), - (0x1D44A, 'M', u'w'), - (0x1D44B, 'M', u'x'), - (0x1D44C, 'M', u'y'), - (0x1D44D, 'M', u'z'), - (0x1D44E, 'M', u'a'), - (0x1D44F, 'M', u'b'), - (0x1D450, 'M', u'c'), - (0x1D451, 'M', u'd'), - (0x1D452, 'M', u'e'), - (0x1D453, 'M', u'f'), - (0x1D454, 'M', u'g'), + (0x1D400, 'M', 'a'), + (0x1D401, 'M', 'b'), + (0x1D402, 'M', 'c'), + (0x1D403, 'M', 'd'), + (0x1D404, 'M', 'e'), + (0x1D405, 'M', 'f'), + (0x1D406, 'M', 'g'), + (0x1D407, 'M', 'h'), + (0x1D408, 'M', 'i'), + (0x1D409, 'M', 'j'), + (0x1D40A, 'M', 'k'), + (0x1D40B, 'M', 'l'), + (0x1D40C, 'M', 'm'), + (0x1D40D, 'M', 'n'), + (0x1D40E, 'M', 'o'), + (0x1D40F, 'M', 'p'), + (0x1D410, 'M', 'q'), + (0x1D411, 'M', 'r'), + (0x1D412, 'M', 's'), + (0x1D413, 'M', 't'), + (0x1D414, 'M', 'u'), + (0x1D415, 'M', 'v'), + (0x1D416, 'M', 'w'), + (0x1D417, 'M', 'x'), + (0x1D418, 'M', 'y'), + (0x1D419, 'M', 'z'), + (0x1D41A, 'M', 'a'), + (0x1D41B, 'M', 'b'), + (0x1D41C, 'M', 'c'), + (0x1D41D, 'M', 'd'), + (0x1D41E, 'M', 'e'), + (0x1D41F, 'M', 'f'), + (0x1D420, 'M', 'g'), + ] + +def _seg_61() -> List[Union[Tuple[int, str], Tuple[int, str, str]]]: + return [ + (0x1D421, 'M', 'h'), + (0x1D422, 'M', 'i'), + (0x1D423, 'M', 'j'), + (0x1D424, 'M', 'k'), + (0x1D425, 'M', 'l'), + (0x1D426, 'M', 'm'), + (0x1D427, 'M', 'n'), + (0x1D428, 'M', 'o'), + (0x1D429, 'M', 'p'), + (0x1D42A, 'M', 'q'), + (0x1D42B, 'M', 'r'), + (0x1D42C, 'M', 's'), + (0x1D42D, 'M', 't'), + (0x1D42E, 'M', 'u'), + (0x1D42F, 'M', 'v'), + (0x1D430, 'M', 'w'), + (0x1D431, 'M', 'x'), + (0x1D432, 'M', 'y'), + (0x1D433, 'M', 'z'), + (0x1D434, 'M', 'a'), + (0x1D435, 'M', 'b'), + (0x1D436, 'M', 'c'), + (0x1D437, 'M', 'd'), + (0x1D438, 'M', 'e'), + (0x1D439, 'M', 'f'), + (0x1D43A, 'M', 'g'), + (0x1D43B, 'M', 'h'), + (0x1D43C, 'M', 'i'), + (0x1D43D, 'M', 'j'), + (0x1D43E, 'M', 'k'), + (0x1D43F, 'M', 'l'), + (0x1D440, 'M', 'm'), + (0x1D441, 'M', 'n'), + (0x1D442, 'M', 'o'), + (0x1D443, 'M', 'p'), + (0x1D444, 'M', 'q'), + (0x1D445, 'M', 'r'), + (0x1D446, 'M', 's'), + (0x1D447, 'M', 't'), + (0x1D448, 'M', 'u'), + (0x1D449, 'M', 'v'), + (0x1D44A, 'M', 'w'), + (0x1D44B, 'M', 'x'), + (0x1D44C, 'M', 'y'), + (0x1D44D, 'M', 'z'), + (0x1D44E, 'M', 'a'), + (0x1D44F, 'M', 'b'), + (0x1D450, 'M', 'c'), + (0x1D451, 'M', 'd'), + (0x1D452, 'M', 'e'), + (0x1D453, 'M', 'f'), + (0x1D454, 'M', 'g'), (0x1D455, 'X'), - (0x1D456, 'M', u'i'), - (0x1D457, 'M', u'j'), - (0x1D458, 'M', u'k'), - (0x1D459, 'M', u'l'), - (0x1D45A, 'M', u'm'), - (0x1D45B, 'M', u'n'), - (0x1D45C, 'M', u'o'), + (0x1D456, 'M', 'i'), + (0x1D457, 'M', 'j'), + (0x1D458, 'M', 'k'), + (0x1D459, 'M', 'l'), + (0x1D45A, 'M', 'm'), + (0x1D45B, 'M', 'n'), + (0x1D45C, 'M', 'o'), + (0x1D45D, 'M', 'p'), + (0x1D45E, 'M', 'q'), + (0x1D45F, 'M', 'r'), + (0x1D460, 'M', 's'), + (0x1D461, 'M', 't'), + (0x1D462, 'M', 'u'), + (0x1D463, 'M', 'v'), + (0x1D464, 'M', 'w'), + (0x1D465, 'M', 'x'), + (0x1D466, 'M', 'y'), + (0x1D467, 'M', 'z'), + (0x1D468, 'M', 'a'), + (0x1D469, 'M', 'b'), + (0x1D46A, 'M', 'c'), + (0x1D46B, 'M', 'd'), + (0x1D46C, 'M', 'e'), + (0x1D46D, 'M', 'f'), + (0x1D46E, 'M', 'g'), + (0x1D46F, 'M', 'h'), + (0x1D470, 'M', 'i'), + (0x1D471, 'M', 'j'), + (0x1D472, 'M', 'k'), + (0x1D473, 'M', 'l'), + (0x1D474, 'M', 'm'), + (0x1D475, 'M', 'n'), + (0x1D476, 'M', 'o'), + (0x1D477, 'M', 'p'), + (0x1D478, 'M', 'q'), + (0x1D479, 'M', 'r'), + (0x1D47A, 'M', 's'), + (0x1D47B, 'M', 't'), + (0x1D47C, 'M', 'u'), + (0x1D47D, 'M', 'v'), + (0x1D47E, 'M', 'w'), + (0x1D47F, 'M', 'x'), + (0x1D480, 'M', 'y'), + (0x1D481, 'M', 'z'), + (0x1D482, 'M', 'a'), + (0x1D483, 'M', 'b'), + (0x1D484, 'M', 'c'), ] -def _seg_59(): +def _seg_62() -> List[Union[Tuple[int, str], Tuple[int, str, str]]]: return [ - (0x1D45D, 'M', u'p'), - (0x1D45E, 'M', u'q'), - (0x1D45F, 'M', u'r'), - (0x1D460, 'M', u's'), - (0x1D461, 'M', u't'), - (0x1D462, 'M', u'u'), - (0x1D463, 'M', u'v'), - (0x1D464, 'M', u'w'), - (0x1D465, 'M', u'x'), - (0x1D466, 'M', u'y'), - (0x1D467, 'M', u'z'), - (0x1D468, 'M', u'a'), - (0x1D469, 'M', u'b'), - (0x1D46A, 'M', u'c'), - (0x1D46B, 'M', u'd'), - (0x1D46C, 'M', u'e'), - (0x1D46D, 'M', u'f'), - (0x1D46E, 'M', u'g'), - (0x1D46F, 'M', u'h'), - (0x1D470, 'M', u'i'), - (0x1D471, 'M', u'j'), - (0x1D472, 'M', u'k'), - (0x1D473, 'M', u'l'), - (0x1D474, 'M', u'm'), - (0x1D475, 'M', u'n'), - (0x1D476, 'M', u'o'), - (0x1D477, 'M', u'p'), - (0x1D478, 'M', u'q'), - (0x1D479, 'M', u'r'), - (0x1D47A, 'M', u's'), - (0x1D47B, 'M', u't'), - (0x1D47C, 'M', u'u'), - (0x1D47D, 'M', u'v'), - (0x1D47E, 'M', u'w'), - (0x1D47F, 'M', u'x'), - (0x1D480, 'M', u'y'), - (0x1D481, 'M', u'z'), - (0x1D482, 'M', u'a'), - (0x1D483, 'M', u'b'), - (0x1D484, 'M', u'c'), - (0x1D485, 'M', u'd'), - (0x1D486, 'M', u'e'), - (0x1D487, 'M', u'f'), - (0x1D488, 'M', u'g'), - (0x1D489, 'M', u'h'), - (0x1D48A, 'M', u'i'), - (0x1D48B, 'M', u'j'), - (0x1D48C, 'M', u'k'), - (0x1D48D, 'M', u'l'), - (0x1D48E, 'M', u'm'), - (0x1D48F, 'M', u'n'), - (0x1D490, 'M', u'o'), - (0x1D491, 'M', u'p'), - (0x1D492, 'M', u'q'), - (0x1D493, 'M', u'r'), - (0x1D494, 'M', u's'), - (0x1D495, 'M', u't'), - (0x1D496, 'M', u'u'), - (0x1D497, 'M', u'v'), - (0x1D498, 'M', u'w'), - (0x1D499, 'M', u'x'), - (0x1D49A, 'M', u'y'), - (0x1D49B, 'M', u'z'), - (0x1D49C, 'M', u'a'), + (0x1D485, 'M', 'd'), + (0x1D486, 'M', 'e'), + (0x1D487, 'M', 'f'), + (0x1D488, 'M', 'g'), + (0x1D489, 'M', 'h'), + (0x1D48A, 'M', 'i'), + (0x1D48B, 'M', 'j'), + (0x1D48C, 'M', 'k'), + (0x1D48D, 'M', 'l'), + (0x1D48E, 'M', 'm'), + (0x1D48F, 'M', 'n'), + (0x1D490, 'M', 'o'), + (0x1D491, 'M', 'p'), + (0x1D492, 'M', 'q'), + (0x1D493, 'M', 'r'), + (0x1D494, 'M', 's'), + (0x1D495, 'M', 't'), + (0x1D496, 'M', 'u'), + (0x1D497, 'M', 'v'), + (0x1D498, 'M', 'w'), + (0x1D499, 'M', 'x'), + (0x1D49A, 'M', 'y'), + (0x1D49B, 'M', 'z'), + (0x1D49C, 'M', 'a'), (0x1D49D, 'X'), - (0x1D49E, 'M', u'c'), - (0x1D49F, 'M', u'd'), + (0x1D49E, 'M', 'c'), + (0x1D49F, 'M', 'd'), (0x1D4A0, 'X'), - (0x1D4A2, 'M', u'g'), + (0x1D4A2, 'M', 'g'), (0x1D4A3, 'X'), - (0x1D4A5, 'M', u'j'), - (0x1D4A6, 'M', u'k'), + (0x1D4A5, 'M', 'j'), + (0x1D4A6, 'M', 'k'), (0x1D4A7, 'X'), - (0x1D4A9, 'M', u'n'), - (0x1D4AA, 'M', u'o'), - (0x1D4AB, 'M', u'p'), - (0x1D4AC, 'M', u'q'), + (0x1D4A9, 'M', 'n'), + (0x1D4AA, 'M', 'o'), + (0x1D4AB, 'M', 'p'), + (0x1D4AC, 'M', 'q'), (0x1D4AD, 'X'), - (0x1D4AE, 'M', u's'), - (0x1D4AF, 'M', u't'), - (0x1D4B0, 'M', u'u'), - (0x1D4B1, 'M', u'v'), - (0x1D4B2, 'M', u'w'), - (0x1D4B3, 'M', u'x'), - (0x1D4B4, 'M', u'y'), - (0x1D4B5, 'M', u'z'), - (0x1D4B6, 'M', u'a'), - (0x1D4B7, 'M', u'b'), - (0x1D4B8, 'M', u'c'), - (0x1D4B9, 'M', u'd'), + (0x1D4AE, 'M', 's'), + (0x1D4AF, 'M', 't'), + (0x1D4B0, 'M', 'u'), + (0x1D4B1, 'M', 'v'), + (0x1D4B2, 'M', 'w'), + (0x1D4B3, 'M', 'x'), + (0x1D4B4, 'M', 'y'), + (0x1D4B5, 'M', 'z'), + (0x1D4B6, 'M', 'a'), + (0x1D4B7, 'M', 'b'), + (0x1D4B8, 'M', 'c'), + (0x1D4B9, 'M', 'd'), (0x1D4BA, 'X'), - (0x1D4BB, 'M', u'f'), + (0x1D4BB, 'M', 'f'), (0x1D4BC, 'X'), - (0x1D4BD, 'M', u'h'), - (0x1D4BE, 'M', u'i'), - (0x1D4BF, 'M', u'j'), - (0x1D4C0, 'M', u'k'), - (0x1D4C1, 'M', u'l'), - (0x1D4C2, 'M', u'm'), - (0x1D4C3, 'M', u'n'), + (0x1D4BD, 'M', 'h'), + (0x1D4BE, 'M', 'i'), + (0x1D4BF, 'M', 'j'), + (0x1D4C0, 'M', 'k'), + (0x1D4C1, 'M', 'l'), + (0x1D4C2, 'M', 'm'), + (0x1D4C3, 'M', 'n'), + (0x1D4C4, 'X'), + (0x1D4C5, 'M', 'p'), + (0x1D4C6, 'M', 'q'), + (0x1D4C7, 'M', 'r'), + (0x1D4C8, 'M', 's'), + (0x1D4C9, 'M', 't'), + (0x1D4CA, 'M', 'u'), + (0x1D4CB, 'M', 'v'), + (0x1D4CC, 'M', 'w'), + (0x1D4CD, 'M', 'x'), + (0x1D4CE, 'M', 'y'), + (0x1D4CF, 'M', 'z'), + (0x1D4D0, 'M', 'a'), + (0x1D4D1, 'M', 'b'), + (0x1D4D2, 'M', 'c'), + (0x1D4D3, 'M', 'd'), + (0x1D4D4, 'M', 'e'), + (0x1D4D5, 'M', 'f'), + (0x1D4D6, 'M', 'g'), + (0x1D4D7, 'M', 'h'), + (0x1D4D8, 'M', 'i'), + (0x1D4D9, 'M', 'j'), + (0x1D4DA, 'M', 'k'), + (0x1D4DB, 'M', 'l'), + (0x1D4DC, 'M', 'm'), + (0x1D4DD, 'M', 'n'), + (0x1D4DE, 'M', 'o'), + (0x1D4DF, 'M', 'p'), + (0x1D4E0, 'M', 'q'), + (0x1D4E1, 'M', 'r'), + (0x1D4E2, 'M', 's'), + (0x1D4E3, 'M', 't'), + (0x1D4E4, 'M', 'u'), + (0x1D4E5, 'M', 'v'), + (0x1D4E6, 'M', 'w'), + (0x1D4E7, 'M', 'x'), + (0x1D4E8, 'M', 'y'), + (0x1D4E9, 'M', 'z'), + (0x1D4EA, 'M', 'a'), + (0x1D4EB, 'M', 'b'), ] -def _seg_60(): +def _seg_63() -> List[Union[Tuple[int, str], Tuple[int, str, str]]]: return [ - (0x1D4C4, 'X'), - (0x1D4C5, 'M', u'p'), - (0x1D4C6, 'M', u'q'), - (0x1D4C7, 'M', u'r'), - (0x1D4C8, 'M', u's'), - (0x1D4C9, 'M', u't'), - (0x1D4CA, 'M', u'u'), - (0x1D4CB, 'M', u'v'), - (0x1D4CC, 'M', u'w'), - (0x1D4CD, 'M', u'x'), - (0x1D4CE, 'M', u'y'), - (0x1D4CF, 'M', u'z'), - (0x1D4D0, 'M', u'a'), - (0x1D4D1, 'M', u'b'), - (0x1D4D2, 'M', u'c'), - (0x1D4D3, 'M', u'd'), - (0x1D4D4, 'M', u'e'), - (0x1D4D5, 'M', u'f'), - (0x1D4D6, 'M', u'g'), - (0x1D4D7, 'M', u'h'), - (0x1D4D8, 'M', u'i'), - (0x1D4D9, 'M', u'j'), - (0x1D4DA, 'M', u'k'), - (0x1D4DB, 'M', u'l'), - (0x1D4DC, 'M', u'm'), - (0x1D4DD, 'M', u'n'), - (0x1D4DE, 'M', u'o'), - (0x1D4DF, 'M', u'p'), - (0x1D4E0, 'M', u'q'), - (0x1D4E1, 'M', u'r'), - (0x1D4E2, 'M', u's'), - (0x1D4E3, 'M', u't'), - (0x1D4E4, 'M', u'u'), - (0x1D4E5, 'M', u'v'), - (0x1D4E6, 'M', u'w'), - (0x1D4E7, 'M', u'x'), - (0x1D4E8, 'M', u'y'), - (0x1D4E9, 'M', u'z'), - (0x1D4EA, 'M', u'a'), - (0x1D4EB, 'M', u'b'), - (0x1D4EC, 'M', u'c'), - (0x1D4ED, 'M', u'd'), - (0x1D4EE, 'M', u'e'), - (0x1D4EF, 'M', u'f'), - (0x1D4F0, 'M', u'g'), - (0x1D4F1, 'M', u'h'), - (0x1D4F2, 'M', u'i'), - (0x1D4F3, 'M', u'j'), - (0x1D4F4, 'M', u'k'), - (0x1D4F5, 'M', u'l'), - (0x1D4F6, 'M', u'm'), - (0x1D4F7, 'M', u'n'), - (0x1D4F8, 'M', u'o'), - (0x1D4F9, 'M', u'p'), - (0x1D4FA, 'M', u'q'), - (0x1D4FB, 'M', u'r'), - (0x1D4FC, 'M', u's'), - (0x1D4FD, 'M', u't'), - (0x1D4FE, 'M', u'u'), - (0x1D4FF, 'M', u'v'), - (0x1D500, 'M', u'w'), - (0x1D501, 'M', u'x'), - (0x1D502, 'M', u'y'), - (0x1D503, 'M', u'z'), - (0x1D504, 'M', u'a'), - (0x1D505, 'M', u'b'), + (0x1D4EC, 'M', 'c'), + (0x1D4ED, 'M', 'd'), + (0x1D4EE, 'M', 'e'), + (0x1D4EF, 'M', 'f'), + (0x1D4F0, 'M', 'g'), + (0x1D4F1, 'M', 'h'), + (0x1D4F2, 'M', 'i'), + (0x1D4F3, 'M', 'j'), + (0x1D4F4, 'M', 'k'), + (0x1D4F5, 'M', 'l'), + (0x1D4F6, 'M', 'm'), + (0x1D4F7, 'M', 'n'), + (0x1D4F8, 'M', 'o'), + (0x1D4F9, 'M', 'p'), + (0x1D4FA, 'M', 'q'), + (0x1D4FB, 'M', 'r'), + (0x1D4FC, 'M', 's'), + (0x1D4FD, 'M', 't'), + (0x1D4FE, 'M', 'u'), + (0x1D4FF, 'M', 'v'), + (0x1D500, 'M', 'w'), + (0x1D501, 'M', 'x'), + (0x1D502, 'M', 'y'), + (0x1D503, 'M', 'z'), + (0x1D504, 'M', 'a'), + (0x1D505, 'M', 'b'), (0x1D506, 'X'), - (0x1D507, 'M', u'd'), - (0x1D508, 'M', u'e'), - (0x1D509, 'M', u'f'), - (0x1D50A, 'M', u'g'), + (0x1D507, 'M', 'd'), + (0x1D508, 'M', 'e'), + (0x1D509, 'M', 'f'), + (0x1D50A, 'M', 'g'), (0x1D50B, 'X'), - (0x1D50D, 'M', u'j'), - (0x1D50E, 'M', u'k'), - (0x1D50F, 'M', u'l'), - (0x1D510, 'M', u'm'), - (0x1D511, 'M', u'n'), - (0x1D512, 'M', u'o'), - (0x1D513, 'M', u'p'), - (0x1D514, 'M', u'q'), + (0x1D50D, 'M', 'j'), + (0x1D50E, 'M', 'k'), + (0x1D50F, 'M', 'l'), + (0x1D510, 'M', 'm'), + (0x1D511, 'M', 'n'), + (0x1D512, 'M', 'o'), + (0x1D513, 'M', 'p'), + (0x1D514, 'M', 'q'), (0x1D515, 'X'), - (0x1D516, 'M', u's'), - (0x1D517, 'M', u't'), - (0x1D518, 'M', u'u'), - (0x1D519, 'M', u'v'), - (0x1D51A, 'M', u'w'), - (0x1D51B, 'M', u'x'), - (0x1D51C, 'M', u'y'), + (0x1D516, 'M', 's'), + (0x1D517, 'M', 't'), + (0x1D518, 'M', 'u'), + (0x1D519, 'M', 'v'), + (0x1D51A, 'M', 'w'), + (0x1D51B, 'M', 'x'), + (0x1D51C, 'M', 'y'), (0x1D51D, 'X'), - (0x1D51E, 'M', u'a'), - (0x1D51F, 'M', u'b'), - (0x1D520, 'M', u'c'), - (0x1D521, 'M', u'd'), - (0x1D522, 'M', u'e'), - (0x1D523, 'M', u'f'), - (0x1D524, 'M', u'g'), - (0x1D525, 'M', u'h'), - (0x1D526, 'M', u'i'), - (0x1D527, 'M', u'j'), - (0x1D528, 'M', u'k'), - ] - -def _seg_61(): - return [ - (0x1D529, 'M', u'l'), - (0x1D52A, 'M', u'm'), - (0x1D52B, 'M', u'n'), - (0x1D52C, 'M', u'o'), - (0x1D52D, 'M', u'p'), - (0x1D52E, 'M', u'q'), - (0x1D52F, 'M', u'r'), - (0x1D530, 'M', u's'), - (0x1D531, 'M', u't'), - (0x1D532, 'M', u'u'), - (0x1D533, 'M', u'v'), - (0x1D534, 'M', u'w'), - (0x1D535, 'M', u'x'), - (0x1D536, 'M', u'y'), - (0x1D537, 'M', u'z'), - (0x1D538, 'M', u'a'), - (0x1D539, 'M', u'b'), + (0x1D51E, 'M', 'a'), + (0x1D51F, 'M', 'b'), + (0x1D520, 'M', 'c'), + (0x1D521, 'M', 'd'), + (0x1D522, 'M', 'e'), + (0x1D523, 'M', 'f'), + (0x1D524, 'M', 'g'), + (0x1D525, 'M', 'h'), + (0x1D526, 'M', 'i'), + (0x1D527, 'M', 'j'), + (0x1D528, 'M', 'k'), + (0x1D529, 'M', 'l'), + (0x1D52A, 'M', 'm'), + (0x1D52B, 'M', 'n'), + (0x1D52C, 'M', 'o'), + (0x1D52D, 'M', 'p'), + (0x1D52E, 'M', 'q'), + (0x1D52F, 'M', 'r'), + (0x1D530, 'M', 's'), + (0x1D531, 'M', 't'), + (0x1D532, 'M', 'u'), + (0x1D533, 'M', 'v'), + (0x1D534, 'M', 'w'), + (0x1D535, 'M', 'x'), + (0x1D536, 'M', 'y'), + (0x1D537, 'M', 'z'), + (0x1D538, 'M', 'a'), + (0x1D539, 'M', 'b'), (0x1D53A, 'X'), - (0x1D53B, 'M', u'd'), - (0x1D53C, 'M', u'e'), - (0x1D53D, 'M', u'f'), - (0x1D53E, 'M', u'g'), + (0x1D53B, 'M', 'd'), + (0x1D53C, 'M', 'e'), + (0x1D53D, 'M', 'f'), + (0x1D53E, 'M', 'g'), (0x1D53F, 'X'), - (0x1D540, 'M', u'i'), - (0x1D541, 'M', u'j'), - (0x1D542, 'M', u'k'), - (0x1D543, 'M', u'l'), - (0x1D544, 'M', u'm'), + (0x1D540, 'M', 'i'), + (0x1D541, 'M', 'j'), + (0x1D542, 'M', 'k'), + (0x1D543, 'M', 'l'), + (0x1D544, 'M', 'm'), (0x1D545, 'X'), - (0x1D546, 'M', u'o'), + (0x1D546, 'M', 'o'), (0x1D547, 'X'), - (0x1D54A, 'M', u's'), - (0x1D54B, 'M', u't'), - (0x1D54C, 'M', u'u'), - (0x1D54D, 'M', u'v'), - (0x1D54E, 'M', u'w'), - (0x1D54F, 'M', u'x'), - (0x1D550, 'M', u'y'), + (0x1D54A, 'M', 's'), + (0x1D54B, 'M', 't'), + (0x1D54C, 'M', 'u'), + (0x1D54D, 'M', 'v'), + (0x1D54E, 'M', 'w'), + (0x1D54F, 'M', 'x'), + (0x1D550, 'M', 'y'), (0x1D551, 'X'), - (0x1D552, 'M', u'a'), - (0x1D553, 'M', u'b'), - (0x1D554, 'M', u'c'), - (0x1D555, 'M', u'd'), - (0x1D556, 'M', u'e'), - (0x1D557, 'M', u'f'), - (0x1D558, 'M', u'g'), - (0x1D559, 'M', u'h'), - (0x1D55A, 'M', u'i'), - (0x1D55B, 'M', u'j'), - (0x1D55C, 'M', u'k'), - (0x1D55D, 'M', u'l'), - (0x1D55E, 'M', u'm'), - (0x1D55F, 'M', u'n'), - (0x1D560, 'M', u'o'), - (0x1D561, 'M', u'p'), - (0x1D562, 'M', u'q'), - (0x1D563, 'M', u'r'), - (0x1D564, 'M', u's'), - (0x1D565, 'M', u't'), - (0x1D566, 'M', u'u'), - (0x1D567, 'M', u'v'), - (0x1D568, 'M', u'w'), - (0x1D569, 'M', u'x'), - (0x1D56A, 'M', u'y'), - (0x1D56B, 'M', u'z'), - (0x1D56C, 'M', u'a'), - (0x1D56D, 'M', u'b'), - (0x1D56E, 'M', u'c'), - (0x1D56F, 'M', u'd'), - (0x1D570, 'M', u'e'), - (0x1D571, 'M', u'f'), - (0x1D572, 'M', u'g'), - (0x1D573, 'M', u'h'), - (0x1D574, 'M', u'i'), - (0x1D575, 'M', u'j'), - (0x1D576, 'M', u'k'), - (0x1D577, 'M', u'l'), - (0x1D578, 'M', u'm'), - (0x1D579, 'M', u'n'), - (0x1D57A, 'M', u'o'), - (0x1D57B, 'M', u'p'), - (0x1D57C, 'M', u'q'), - (0x1D57D, 'M', u'r'), - (0x1D57E, 'M', u's'), - (0x1D57F, 'M', u't'), - (0x1D580, 'M', u'u'), - (0x1D581, 'M', u'v'), - (0x1D582, 'M', u'w'), - (0x1D583, 'M', u'x'), - (0x1D584, 'M', u'y'), - (0x1D585, 'M', u'z'), - (0x1D586, 'M', u'a'), - (0x1D587, 'M', u'b'), - (0x1D588, 'M', u'c'), - (0x1D589, 'M', u'd'), - (0x1D58A, 'M', u'e'), - (0x1D58B, 'M', u'f'), - (0x1D58C, 'M', u'g'), - (0x1D58D, 'M', u'h'), - (0x1D58E, 'M', u'i'), + (0x1D552, 'M', 'a'), ] -def _seg_62(): +def _seg_64() -> List[Union[Tuple[int, str], Tuple[int, str, str]]]: return [ - (0x1D58F, 'M', u'j'), - (0x1D590, 'M', u'k'), - (0x1D591, 'M', u'l'), - (0x1D592, 'M', u'm'), - (0x1D593, 'M', u'n'), - (0x1D594, 'M', u'o'), - (0x1D595, 'M', u'p'), - (0x1D596, 'M', u'q'), - (0x1D597, 'M', u'r'), - (0x1D598, 'M', u's'), - (0x1D599, 'M', u't'), - (0x1D59A, 'M', u'u'), - (0x1D59B, 'M', u'v'), - (0x1D59C, 'M', u'w'), - (0x1D59D, 'M', u'x'), - (0x1D59E, 'M', u'y'), - (0x1D59F, 'M', u'z'), - (0x1D5A0, 'M', u'a'), - (0x1D5A1, 'M', u'b'), - (0x1D5A2, 'M', u'c'), - (0x1D5A3, 'M', u'd'), - (0x1D5A4, 'M', u'e'), - (0x1D5A5, 'M', u'f'), - (0x1D5A6, 'M', u'g'), - (0x1D5A7, 'M', u'h'), - (0x1D5A8, 'M', u'i'), - (0x1D5A9, 'M', u'j'), - (0x1D5AA, 'M', u'k'), - (0x1D5AB, 'M', u'l'), - (0x1D5AC, 'M', u'm'), - (0x1D5AD, 'M', u'n'), - (0x1D5AE, 'M', u'o'), - (0x1D5AF, 'M', u'p'), - (0x1D5B0, 'M', u'q'), - (0x1D5B1, 'M', u'r'), - (0x1D5B2, 'M', u's'), - (0x1D5B3, 'M', u't'), - (0x1D5B4, 'M', u'u'), - (0x1D5B5, 'M', u'v'), - (0x1D5B6, 'M', u'w'), - (0x1D5B7, 'M', u'x'), - (0x1D5B8, 'M', u'y'), - (0x1D5B9, 'M', u'z'), - (0x1D5BA, 'M', u'a'), - (0x1D5BB, 'M', u'b'), - (0x1D5BC, 'M', u'c'), - (0x1D5BD, 'M', u'd'), - (0x1D5BE, 'M', u'e'), - (0x1D5BF, 'M', u'f'), - (0x1D5C0, 'M', u'g'), - (0x1D5C1, 'M', u'h'), - (0x1D5C2, 'M', u'i'), - (0x1D5C3, 'M', u'j'), - (0x1D5C4, 'M', u'k'), - (0x1D5C5, 'M', u'l'), - (0x1D5C6, 'M', u'm'), - (0x1D5C7, 'M', u'n'), - (0x1D5C8, 'M', u'o'), - (0x1D5C9, 'M', u'p'), - (0x1D5CA, 'M', u'q'), - (0x1D5CB, 'M', u'r'), - (0x1D5CC, 'M', u's'), - (0x1D5CD, 'M', u't'), - (0x1D5CE, 'M', u'u'), - (0x1D5CF, 'M', u'v'), - (0x1D5D0, 'M', u'w'), - (0x1D5D1, 'M', u'x'), - (0x1D5D2, 'M', u'y'), - (0x1D5D3, 'M', u'z'), - (0x1D5D4, 'M', u'a'), - (0x1D5D5, 'M', u'b'), - (0x1D5D6, 'M', u'c'), - (0x1D5D7, 'M', u'd'), - (0x1D5D8, 'M', u'e'), - (0x1D5D9, 'M', u'f'), - (0x1D5DA, 'M', u'g'), - (0x1D5DB, 'M', u'h'), - (0x1D5DC, 'M', u'i'), - (0x1D5DD, 'M', u'j'), - (0x1D5DE, 'M', u'k'), - (0x1D5DF, 'M', u'l'), - (0x1D5E0, 'M', u'm'), - (0x1D5E1, 'M', u'n'), - (0x1D5E2, 'M', u'o'), - (0x1D5E3, 'M', u'p'), - (0x1D5E4, 'M', u'q'), - (0x1D5E5, 'M', u'r'), - (0x1D5E6, 'M', u's'), - (0x1D5E7, 'M', u't'), - (0x1D5E8, 'M', u'u'), - (0x1D5E9, 'M', u'v'), - (0x1D5EA, 'M', u'w'), - (0x1D5EB, 'M', u'x'), - (0x1D5EC, 'M', u'y'), - (0x1D5ED, 'M', u'z'), - (0x1D5EE, 'M', u'a'), - (0x1D5EF, 'M', u'b'), - (0x1D5F0, 'M', u'c'), - (0x1D5F1, 'M', u'd'), - (0x1D5F2, 'M', u'e'), + (0x1D553, 'M', 'b'), + (0x1D554, 'M', 'c'), + (0x1D555, 'M', 'd'), + (0x1D556, 'M', 'e'), + (0x1D557, 'M', 'f'), + (0x1D558, 'M', 'g'), + (0x1D559, 'M', 'h'), + (0x1D55A, 'M', 'i'), + (0x1D55B, 'M', 'j'), + (0x1D55C, 'M', 'k'), + (0x1D55D, 'M', 'l'), + (0x1D55E, 'M', 'm'), + (0x1D55F, 'M', 'n'), + (0x1D560, 'M', 'o'), + (0x1D561, 'M', 'p'), + (0x1D562, 'M', 'q'), + (0x1D563, 'M', 'r'), + (0x1D564, 'M', 's'), + (0x1D565, 'M', 't'), + (0x1D566, 'M', 'u'), + (0x1D567, 'M', 'v'), + (0x1D568, 'M', 'w'), + (0x1D569, 'M', 'x'), + (0x1D56A, 'M', 'y'), + (0x1D56B, 'M', 'z'), + (0x1D56C, 'M', 'a'), + (0x1D56D, 'M', 'b'), + (0x1D56E, 'M', 'c'), + (0x1D56F, 'M', 'd'), + (0x1D570, 'M', 'e'), + (0x1D571, 'M', 'f'), + (0x1D572, 'M', 'g'), + (0x1D573, 'M', 'h'), + (0x1D574, 'M', 'i'), + (0x1D575, 'M', 'j'), + (0x1D576, 'M', 'k'), + (0x1D577, 'M', 'l'), + (0x1D578, 'M', 'm'), + (0x1D579, 'M', 'n'), + (0x1D57A, 'M', 'o'), + (0x1D57B, 'M', 'p'), + (0x1D57C, 'M', 'q'), + (0x1D57D, 'M', 'r'), + (0x1D57E, 'M', 's'), + (0x1D57F, 'M', 't'), + (0x1D580, 'M', 'u'), + (0x1D581, 'M', 'v'), + (0x1D582, 'M', 'w'), + (0x1D583, 'M', 'x'), + (0x1D584, 'M', 'y'), + (0x1D585, 'M', 'z'), + (0x1D586, 'M', 'a'), + (0x1D587, 'M', 'b'), + (0x1D588, 'M', 'c'), + (0x1D589, 'M', 'd'), + (0x1D58A, 'M', 'e'), + (0x1D58B, 'M', 'f'), + (0x1D58C, 'M', 'g'), + (0x1D58D, 'M', 'h'), + (0x1D58E, 'M', 'i'), + (0x1D58F, 'M', 'j'), + (0x1D590, 'M', 'k'), + (0x1D591, 'M', 'l'), + (0x1D592, 'M', 'm'), + (0x1D593, 'M', 'n'), + (0x1D594, 'M', 'o'), + (0x1D595, 'M', 'p'), + (0x1D596, 'M', 'q'), + (0x1D597, 'M', 'r'), + (0x1D598, 'M', 's'), + (0x1D599, 'M', 't'), + (0x1D59A, 'M', 'u'), + (0x1D59B, 'M', 'v'), + (0x1D59C, 'M', 'w'), + (0x1D59D, 'M', 'x'), + (0x1D59E, 'M', 'y'), + (0x1D59F, 'M', 'z'), + (0x1D5A0, 'M', 'a'), + (0x1D5A1, 'M', 'b'), + (0x1D5A2, 'M', 'c'), + (0x1D5A3, 'M', 'd'), + (0x1D5A4, 'M', 'e'), + (0x1D5A5, 'M', 'f'), + (0x1D5A6, 'M', 'g'), + (0x1D5A7, 'M', 'h'), + (0x1D5A8, 'M', 'i'), + (0x1D5A9, 'M', 'j'), + (0x1D5AA, 'M', 'k'), + (0x1D5AB, 'M', 'l'), + (0x1D5AC, 'M', 'm'), + (0x1D5AD, 'M', 'n'), + (0x1D5AE, 'M', 'o'), + (0x1D5AF, 'M', 'p'), + (0x1D5B0, 'M', 'q'), + (0x1D5B1, 'M', 'r'), + (0x1D5B2, 'M', 's'), + (0x1D5B3, 'M', 't'), + (0x1D5B4, 'M', 'u'), + (0x1D5B5, 'M', 'v'), + (0x1D5B6, 'M', 'w'), ] -def _seg_63(): +def _seg_65() -> List[Union[Tuple[int, str], Tuple[int, str, str]]]: return [ - (0x1D5F3, 'M', u'f'), - (0x1D5F4, 'M', u'g'), - (0x1D5F5, 'M', u'h'), - (0x1D5F6, 'M', u'i'), - (0x1D5F7, 'M', u'j'), - (0x1D5F8, 'M', u'k'), - (0x1D5F9, 'M', u'l'), - (0x1D5FA, 'M', u'm'), - (0x1D5FB, 'M', u'n'), - (0x1D5FC, 'M', u'o'), - (0x1D5FD, 'M', u'p'), - (0x1D5FE, 'M', u'q'), - (0x1D5FF, 'M', u'r'), - (0x1D600, 'M', u's'), - (0x1D601, 'M', u't'), - (0x1D602, 'M', u'u'), - (0x1D603, 'M', u'v'), - (0x1D604, 'M', u'w'), - (0x1D605, 'M', u'x'), - (0x1D606, 'M', u'y'), - (0x1D607, 'M', u'z'), - (0x1D608, 'M', u'a'), - (0x1D609, 'M', u'b'), - (0x1D60A, 'M', u'c'), - (0x1D60B, 'M', u'd'), - (0x1D60C, 'M', u'e'), - (0x1D60D, 'M', u'f'), - (0x1D60E, 'M', u'g'), - (0x1D60F, 'M', u'h'), - (0x1D610, 'M', u'i'), - (0x1D611, 'M', u'j'), - (0x1D612, 'M', u'k'), - (0x1D613, 'M', u'l'), - (0x1D614, 'M', u'm'), - (0x1D615, 'M', u'n'), - (0x1D616, 'M', u'o'), - (0x1D617, 'M', u'p'), - (0x1D618, 'M', u'q'), - (0x1D619, 'M', u'r'), - (0x1D61A, 'M', u's'), - (0x1D61B, 'M', u't'), - (0x1D61C, 'M', u'u'), - (0x1D61D, 'M', u'v'), - (0x1D61E, 'M', u'w'), - (0x1D61F, 'M', u'x'), - (0x1D620, 'M', u'y'), - (0x1D621, 'M', u'z'), - (0x1D622, 'M', u'a'), - (0x1D623, 'M', u'b'), - (0x1D624, 'M', u'c'), - (0x1D625, 'M', u'd'), - (0x1D626, 'M', u'e'), - (0x1D627, 'M', u'f'), - (0x1D628, 'M', u'g'), - (0x1D629, 'M', u'h'), - (0x1D62A, 'M', u'i'), - (0x1D62B, 'M', u'j'), - (0x1D62C, 'M', u'k'), - (0x1D62D, 'M', u'l'), - (0x1D62E, 'M', u'm'), - (0x1D62F, 'M', u'n'), - (0x1D630, 'M', u'o'), - (0x1D631, 'M', u'p'), - (0x1D632, 'M', u'q'), - (0x1D633, 'M', u'r'), - (0x1D634, 'M', u's'), - (0x1D635, 'M', u't'), - (0x1D636, 'M', u'u'), - (0x1D637, 'M', u'v'), - (0x1D638, 'M', u'w'), - (0x1D639, 'M', u'x'), - (0x1D63A, 'M', u'y'), - (0x1D63B, 'M', u'z'), - (0x1D63C, 'M', u'a'), - (0x1D63D, 'M', u'b'), - (0x1D63E, 'M', u'c'), - (0x1D63F, 'M', u'd'), - (0x1D640, 'M', u'e'), - (0x1D641, 'M', u'f'), - (0x1D642, 'M', u'g'), - (0x1D643, 'M', u'h'), - (0x1D644, 'M', u'i'), - (0x1D645, 'M', u'j'), - (0x1D646, 'M', u'k'), - (0x1D647, 'M', u'l'), - (0x1D648, 'M', u'm'), - (0x1D649, 'M', u'n'), - (0x1D64A, 'M', u'o'), - (0x1D64B, 'M', u'p'), - (0x1D64C, 'M', u'q'), - (0x1D64D, 'M', u'r'), - (0x1D64E, 'M', u's'), - (0x1D64F, 'M', u't'), - (0x1D650, 'M', u'u'), - (0x1D651, 'M', u'v'), - (0x1D652, 'M', u'w'), - (0x1D653, 'M', u'x'), - (0x1D654, 'M', u'y'), - (0x1D655, 'M', u'z'), - (0x1D656, 'M', u'a'), + (0x1D5B7, 'M', 'x'), + (0x1D5B8, 'M', 'y'), + (0x1D5B9, 'M', 'z'), + (0x1D5BA, 'M', 'a'), + (0x1D5BB, 'M', 'b'), + (0x1D5BC, 'M', 'c'), + (0x1D5BD, 'M', 'd'), + (0x1D5BE, 'M', 'e'), + (0x1D5BF, 'M', 'f'), + (0x1D5C0, 'M', 'g'), + (0x1D5C1, 'M', 'h'), + (0x1D5C2, 'M', 'i'), + (0x1D5C3, 'M', 'j'), + (0x1D5C4, 'M', 'k'), + (0x1D5C5, 'M', 'l'), + (0x1D5C6, 'M', 'm'), + (0x1D5C7, 'M', 'n'), + (0x1D5C8, 'M', 'o'), + (0x1D5C9, 'M', 'p'), + (0x1D5CA, 'M', 'q'), + (0x1D5CB, 'M', 'r'), + (0x1D5CC, 'M', 's'), + (0x1D5CD, 'M', 't'), + (0x1D5CE, 'M', 'u'), + (0x1D5CF, 'M', 'v'), + (0x1D5D0, 'M', 'w'), + (0x1D5D1, 'M', 'x'), + (0x1D5D2, 'M', 'y'), + (0x1D5D3, 'M', 'z'), + (0x1D5D4, 'M', 'a'), + (0x1D5D5, 'M', 'b'), + (0x1D5D6, 'M', 'c'), + (0x1D5D7, 'M', 'd'), + (0x1D5D8, 'M', 'e'), + (0x1D5D9, 'M', 'f'), + (0x1D5DA, 'M', 'g'), + (0x1D5DB, 'M', 'h'), + (0x1D5DC, 'M', 'i'), + (0x1D5DD, 'M', 'j'), + (0x1D5DE, 'M', 'k'), + (0x1D5DF, 'M', 'l'), + (0x1D5E0, 'M', 'm'), + (0x1D5E1, 'M', 'n'), + (0x1D5E2, 'M', 'o'), + (0x1D5E3, 'M', 'p'), + (0x1D5E4, 'M', 'q'), + (0x1D5E5, 'M', 'r'), + (0x1D5E6, 'M', 's'), + (0x1D5E7, 'M', 't'), + (0x1D5E8, 'M', 'u'), + (0x1D5E9, 'M', 'v'), + (0x1D5EA, 'M', 'w'), + (0x1D5EB, 'M', 'x'), + (0x1D5EC, 'M', 'y'), + (0x1D5ED, 'M', 'z'), + (0x1D5EE, 'M', 'a'), + (0x1D5EF, 'M', 'b'), + (0x1D5F0, 'M', 'c'), + (0x1D5F1, 'M', 'd'), + (0x1D5F2, 'M', 'e'), + (0x1D5F3, 'M', 'f'), + (0x1D5F4, 'M', 'g'), + (0x1D5F5, 'M', 'h'), + (0x1D5F6, 'M', 'i'), + (0x1D5F7, 'M', 'j'), + (0x1D5F8, 'M', 'k'), + (0x1D5F9, 'M', 'l'), + (0x1D5FA, 'M', 'm'), + (0x1D5FB, 'M', 'n'), + (0x1D5FC, 'M', 'o'), + (0x1D5FD, 'M', 'p'), + (0x1D5FE, 'M', 'q'), + (0x1D5FF, 'M', 'r'), + (0x1D600, 'M', 's'), + (0x1D601, 'M', 't'), + (0x1D602, 'M', 'u'), + (0x1D603, 'M', 'v'), + (0x1D604, 'M', 'w'), + (0x1D605, 'M', 'x'), + (0x1D606, 'M', 'y'), + (0x1D607, 'M', 'z'), + (0x1D608, 'M', 'a'), + (0x1D609, 'M', 'b'), + (0x1D60A, 'M', 'c'), + (0x1D60B, 'M', 'd'), + (0x1D60C, 'M', 'e'), + (0x1D60D, 'M', 'f'), + (0x1D60E, 'M', 'g'), + (0x1D60F, 'M', 'h'), + (0x1D610, 'M', 'i'), + (0x1D611, 'M', 'j'), + (0x1D612, 'M', 'k'), + (0x1D613, 'M', 'l'), + (0x1D614, 'M', 'm'), + (0x1D615, 'M', 'n'), + (0x1D616, 'M', 'o'), + (0x1D617, 'M', 'p'), + (0x1D618, 'M', 'q'), + (0x1D619, 'M', 'r'), + (0x1D61A, 'M', 's'), ] -def _seg_64(): +def _seg_66() -> List[Union[Tuple[int, str], Tuple[int, str, str]]]: return [ - (0x1D657, 'M', u'b'), - (0x1D658, 'M', u'c'), - (0x1D659, 'M', u'd'), - (0x1D65A, 'M', u'e'), - (0x1D65B, 'M', u'f'), - (0x1D65C, 'M', u'g'), - (0x1D65D, 'M', u'h'), - (0x1D65E, 'M', u'i'), - (0x1D65F, 'M', u'j'), - (0x1D660, 'M', u'k'), - (0x1D661, 'M', u'l'), - (0x1D662, 'M', u'm'), - (0x1D663, 'M', u'n'), - (0x1D664, 'M', u'o'), - (0x1D665, 'M', u'p'), - (0x1D666, 'M', u'q'), - (0x1D667, 'M', u'r'), - (0x1D668, 'M', u's'), - (0x1D669, 'M', u't'), - (0x1D66A, 'M', u'u'), - (0x1D66B, 'M', u'v'), - (0x1D66C, 'M', u'w'), - (0x1D66D, 'M', u'x'), - (0x1D66E, 'M', u'y'), - (0x1D66F, 'M', u'z'), - (0x1D670, 'M', u'a'), - (0x1D671, 'M', u'b'), - (0x1D672, 'M', u'c'), - (0x1D673, 'M', u'd'), - (0x1D674, 'M', u'e'), - (0x1D675, 'M', u'f'), - (0x1D676, 'M', u'g'), - (0x1D677, 'M', u'h'), - (0x1D678, 'M', u'i'), - (0x1D679, 'M', u'j'), - (0x1D67A, 'M', u'k'), - (0x1D67B, 'M', u'l'), - (0x1D67C, 'M', u'm'), - (0x1D67D, 'M', u'n'), - (0x1D67E, 'M', u'o'), - (0x1D67F, 'M', u'p'), - (0x1D680, 'M', u'q'), - (0x1D681, 'M', u'r'), - (0x1D682, 'M', u's'), - (0x1D683, 'M', u't'), - (0x1D684, 'M', u'u'), - (0x1D685, 'M', u'v'), - (0x1D686, 'M', u'w'), - (0x1D687, 'M', u'x'), - (0x1D688, 'M', u'y'), - (0x1D689, 'M', u'z'), - (0x1D68A, 'M', u'a'), - (0x1D68B, 'M', u'b'), - (0x1D68C, 'M', u'c'), - (0x1D68D, 'M', u'd'), - (0x1D68E, 'M', u'e'), - (0x1D68F, 'M', u'f'), - (0x1D690, 'M', u'g'), - (0x1D691, 'M', u'h'), - (0x1D692, 'M', u'i'), - (0x1D693, 'M', u'j'), - (0x1D694, 'M', u'k'), - (0x1D695, 'M', u'l'), - (0x1D696, 'M', u'm'), - (0x1D697, 'M', u'n'), - (0x1D698, 'M', u'o'), - (0x1D699, 'M', u'p'), - (0x1D69A, 'M', u'q'), - (0x1D69B, 'M', u'r'), - (0x1D69C, 'M', u's'), - (0x1D69D, 'M', u't'), - (0x1D69E, 'M', u'u'), - (0x1D69F, 'M', u'v'), - (0x1D6A0, 'M', u'w'), - (0x1D6A1, 'M', u'x'), - (0x1D6A2, 'M', u'y'), - (0x1D6A3, 'M', u'z'), - (0x1D6A4, 'M', u'ı'), - (0x1D6A5, 'M', u'ȷ'), - (0x1D6A6, 'X'), - (0x1D6A8, 'M', u'α'), - (0x1D6A9, 'M', u'β'), - (0x1D6AA, 'M', u'γ'), - (0x1D6AB, 'M', u'δ'), - (0x1D6AC, 'M', u'ε'), - (0x1D6AD, 'M', u'ζ'), - (0x1D6AE, 'M', u'η'), - (0x1D6AF, 'M', u'θ'), - (0x1D6B0, 'M', u'ι'), - (0x1D6B1, 'M', u'κ'), - (0x1D6B2, 'M', u'λ'), - (0x1D6B3, 'M', u'μ'), - (0x1D6B4, 'M', u'ν'), - (0x1D6B5, 'M', u'ξ'), - (0x1D6B6, 'M', u'ο'), - (0x1D6B7, 'M', u'π'), - (0x1D6B8, 'M', u'ρ'), - (0x1D6B9, 'M', u'θ'), - (0x1D6BA, 'M', u'σ'), - (0x1D6BB, 'M', u'τ'), + (0x1D61B, 'M', 't'), + (0x1D61C, 'M', 'u'), + (0x1D61D, 'M', 'v'), + (0x1D61E, 'M', 'w'), + (0x1D61F, 'M', 'x'), + (0x1D620, 'M', 'y'), + (0x1D621, 'M', 'z'), + (0x1D622, 'M', 'a'), + (0x1D623, 'M', 'b'), + (0x1D624, 'M', 'c'), + (0x1D625, 'M', 'd'), + (0x1D626, 'M', 'e'), + (0x1D627, 'M', 'f'), + (0x1D628, 'M', 'g'), + (0x1D629, 'M', 'h'), + (0x1D62A, 'M', 'i'), + (0x1D62B, 'M', 'j'), + (0x1D62C, 'M', 'k'), + (0x1D62D, 'M', 'l'), + (0x1D62E, 'M', 'm'), + (0x1D62F, 'M', 'n'), + (0x1D630, 'M', 'o'), + (0x1D631, 'M', 'p'), + (0x1D632, 'M', 'q'), + (0x1D633, 'M', 'r'), + (0x1D634, 'M', 's'), + (0x1D635, 'M', 't'), + (0x1D636, 'M', 'u'), + (0x1D637, 'M', 'v'), + (0x1D638, 'M', 'w'), + (0x1D639, 'M', 'x'), + (0x1D63A, 'M', 'y'), + (0x1D63B, 'M', 'z'), + (0x1D63C, 'M', 'a'), + (0x1D63D, 'M', 'b'), + (0x1D63E, 'M', 'c'), + (0x1D63F, 'M', 'd'), + (0x1D640, 'M', 'e'), + (0x1D641, 'M', 'f'), + (0x1D642, 'M', 'g'), + (0x1D643, 'M', 'h'), + (0x1D644, 'M', 'i'), + (0x1D645, 'M', 'j'), + (0x1D646, 'M', 'k'), + (0x1D647, 'M', 'l'), + (0x1D648, 'M', 'm'), + (0x1D649, 'M', 'n'), + (0x1D64A, 'M', 'o'), + (0x1D64B, 'M', 'p'), + (0x1D64C, 'M', 'q'), + (0x1D64D, 'M', 'r'), + (0x1D64E, 'M', 's'), + (0x1D64F, 'M', 't'), + (0x1D650, 'M', 'u'), + (0x1D651, 'M', 'v'), + (0x1D652, 'M', 'w'), + (0x1D653, 'M', 'x'), + (0x1D654, 'M', 'y'), + (0x1D655, 'M', 'z'), + (0x1D656, 'M', 'a'), + (0x1D657, 'M', 'b'), + (0x1D658, 'M', 'c'), + (0x1D659, 'M', 'd'), + (0x1D65A, 'M', 'e'), + (0x1D65B, 'M', 'f'), + (0x1D65C, 'M', 'g'), + (0x1D65D, 'M', 'h'), + (0x1D65E, 'M', 'i'), + (0x1D65F, 'M', 'j'), + (0x1D660, 'M', 'k'), + (0x1D661, 'M', 'l'), + (0x1D662, 'M', 'm'), + (0x1D663, 'M', 'n'), + (0x1D664, 'M', 'o'), + (0x1D665, 'M', 'p'), + (0x1D666, 'M', 'q'), + (0x1D667, 'M', 'r'), + (0x1D668, 'M', 's'), + (0x1D669, 'M', 't'), + (0x1D66A, 'M', 'u'), + (0x1D66B, 'M', 'v'), + (0x1D66C, 'M', 'w'), + (0x1D66D, 'M', 'x'), + (0x1D66E, 'M', 'y'), + (0x1D66F, 'M', 'z'), + (0x1D670, 'M', 'a'), + (0x1D671, 'M', 'b'), + (0x1D672, 'M', 'c'), + (0x1D673, 'M', 'd'), + (0x1D674, 'M', 'e'), + (0x1D675, 'M', 'f'), + (0x1D676, 'M', 'g'), + (0x1D677, 'M', 'h'), + (0x1D678, 'M', 'i'), + (0x1D679, 'M', 'j'), + (0x1D67A, 'M', 'k'), + (0x1D67B, 'M', 'l'), + (0x1D67C, 'M', 'm'), + (0x1D67D, 'M', 'n'), + (0x1D67E, 'M', 'o'), ] -def _seg_65(): +def _seg_67() -> List[Union[Tuple[int, str], Tuple[int, str, str]]]: return [ - (0x1D6BC, 'M', u'υ'), - (0x1D6BD, 'M', u'φ'), - (0x1D6BE, 'M', u'χ'), - (0x1D6BF, 'M', u'ψ'), - (0x1D6C0, 'M', u'ω'), - (0x1D6C1, 'M', u'∇'), - (0x1D6C2, 'M', u'α'), - (0x1D6C3, 'M', u'β'), - (0x1D6C4, 'M', u'γ'), - (0x1D6C5, 'M', u'δ'), - (0x1D6C6, 'M', u'ε'), - (0x1D6C7, 'M', u'ζ'), - (0x1D6C8, 'M', u'η'), - (0x1D6C9, 'M', u'θ'), - (0x1D6CA, 'M', u'ι'), - (0x1D6CB, 'M', u'κ'), - (0x1D6CC, 'M', u'λ'), - (0x1D6CD, 'M', u'μ'), - (0x1D6CE, 'M', u'ν'), - (0x1D6CF, 'M', u'ξ'), - (0x1D6D0, 'M', u'ο'), - (0x1D6D1, 'M', u'π'), - (0x1D6D2, 'M', u'ρ'), - (0x1D6D3, 'M', u'σ'), - (0x1D6D5, 'M', u'τ'), - (0x1D6D6, 'M', u'υ'), - (0x1D6D7, 'M', u'φ'), - (0x1D6D8, 'M', u'χ'), - (0x1D6D9, 'M', u'ψ'), - (0x1D6DA, 'M', u'ω'), - (0x1D6DB, 'M', u'∂'), - (0x1D6DC, 'M', u'ε'), - (0x1D6DD, 'M', u'θ'), - (0x1D6DE, 'M', u'κ'), - (0x1D6DF, 'M', u'φ'), - (0x1D6E0, 'M', u'ρ'), - (0x1D6E1, 'M', u'π'), - (0x1D6E2, 'M', u'α'), - (0x1D6E3, 'M', u'β'), - (0x1D6E4, 'M', u'γ'), - (0x1D6E5, 'M', u'δ'), - (0x1D6E6, 'M', u'ε'), - (0x1D6E7, 'M', u'ζ'), - (0x1D6E8, 'M', u'η'), - (0x1D6E9, 'M', u'θ'), - (0x1D6EA, 'M', u'ι'), - (0x1D6EB, 'M', u'κ'), - (0x1D6EC, 'M', u'λ'), - (0x1D6ED, 'M', u'μ'), - (0x1D6EE, 'M', u'ν'), - (0x1D6EF, 'M', u'ξ'), - (0x1D6F0, 'M', u'ο'), - (0x1D6F1, 'M', u'π'), - (0x1D6F2, 'M', u'ρ'), - (0x1D6F3, 'M', u'θ'), - (0x1D6F4, 'M', u'σ'), - (0x1D6F5, 'M', u'τ'), - (0x1D6F6, 'M', u'υ'), - (0x1D6F7, 'M', u'φ'), - (0x1D6F8, 'M', u'χ'), - (0x1D6F9, 'M', u'ψ'), - (0x1D6FA, 'M', u'ω'), - (0x1D6FB, 'M', u'∇'), - (0x1D6FC, 'M', u'α'), - (0x1D6FD, 'M', u'β'), - (0x1D6FE, 'M', u'γ'), - (0x1D6FF, 'M', u'δ'), - (0x1D700, 'M', u'ε'), - (0x1D701, 'M', u'ζ'), - (0x1D702, 'M', u'η'), - (0x1D703, 'M', u'θ'), - (0x1D704, 'M', u'ι'), - (0x1D705, 'M', u'κ'), - (0x1D706, 'M', u'λ'), - (0x1D707, 'M', u'μ'), - (0x1D708, 'M', u'ν'), - (0x1D709, 'M', u'ξ'), - (0x1D70A, 'M', u'ο'), - (0x1D70B, 'M', u'π'), - (0x1D70C, 'M', u'ρ'), - (0x1D70D, 'M', u'σ'), - (0x1D70F, 'M', u'τ'), - (0x1D710, 'M', u'υ'), - (0x1D711, 'M', u'φ'), - (0x1D712, 'M', u'χ'), - (0x1D713, 'M', u'ψ'), - (0x1D714, 'M', u'ω'), - (0x1D715, 'M', u'∂'), - (0x1D716, 'M', u'ε'), - (0x1D717, 'M', u'θ'), - (0x1D718, 'M', u'κ'), - (0x1D719, 'M', u'φ'), - (0x1D71A, 'M', u'ρ'), - (0x1D71B, 'M', u'π'), - (0x1D71C, 'M', u'α'), - (0x1D71D, 'M', u'β'), - (0x1D71E, 'M', u'γ'), - (0x1D71F, 'M', u'δ'), - (0x1D720, 'M', u'ε'), - (0x1D721, 'M', u'ζ'), + (0x1D67F, 'M', 'p'), + (0x1D680, 'M', 'q'), + (0x1D681, 'M', 'r'), + (0x1D682, 'M', 's'), + (0x1D683, 'M', 't'), + (0x1D684, 'M', 'u'), + (0x1D685, 'M', 'v'), + (0x1D686, 'M', 'w'), + (0x1D687, 'M', 'x'), + (0x1D688, 'M', 'y'), + (0x1D689, 'M', 'z'), + (0x1D68A, 'M', 'a'), + (0x1D68B, 'M', 'b'), + (0x1D68C, 'M', 'c'), + (0x1D68D, 'M', 'd'), + (0x1D68E, 'M', 'e'), + (0x1D68F, 'M', 'f'), + (0x1D690, 'M', 'g'), + (0x1D691, 'M', 'h'), + (0x1D692, 'M', 'i'), + (0x1D693, 'M', 'j'), + (0x1D694, 'M', 'k'), + (0x1D695, 'M', 'l'), + (0x1D696, 'M', 'm'), + (0x1D697, 'M', 'n'), + (0x1D698, 'M', 'o'), + (0x1D699, 'M', 'p'), + (0x1D69A, 'M', 'q'), + (0x1D69B, 'M', 'r'), + (0x1D69C, 'M', 's'), + (0x1D69D, 'M', 't'), + (0x1D69E, 'M', 'u'), + (0x1D69F, 'M', 'v'), + (0x1D6A0, 'M', 'w'), + (0x1D6A1, 'M', 'x'), + (0x1D6A2, 'M', 'y'), + (0x1D6A3, 'M', 'z'), + (0x1D6A4, 'M', 'ı'), + (0x1D6A5, 'M', 'ȷ'), + (0x1D6A6, 'X'), + (0x1D6A8, 'M', 'α'), + (0x1D6A9, 'M', 'β'), + (0x1D6AA, 'M', 'γ'), + (0x1D6AB, 'M', 'δ'), + (0x1D6AC, 'M', 'ε'), + (0x1D6AD, 'M', 'ζ'), + (0x1D6AE, 'M', 'η'), + (0x1D6AF, 'M', 'θ'), + (0x1D6B0, 'M', 'ι'), + (0x1D6B1, 'M', 'κ'), + (0x1D6B2, 'M', 'λ'), + (0x1D6B3, 'M', 'μ'), + (0x1D6B4, 'M', 'ν'), + (0x1D6B5, 'M', 'ξ'), + (0x1D6B6, 'M', 'ο'), + (0x1D6B7, 'M', 'π'), + (0x1D6B8, 'M', 'ρ'), + (0x1D6B9, 'M', 'θ'), + (0x1D6BA, 'M', 'σ'), + (0x1D6BB, 'M', 'τ'), + (0x1D6BC, 'M', 'υ'), + (0x1D6BD, 'M', 'φ'), + (0x1D6BE, 'M', 'χ'), + (0x1D6BF, 'M', 'ψ'), + (0x1D6C0, 'M', 'ω'), + (0x1D6C1, 'M', '∇'), + (0x1D6C2, 'M', 'α'), + (0x1D6C3, 'M', 'β'), + (0x1D6C4, 'M', 'γ'), + (0x1D6C5, 'M', 'δ'), + (0x1D6C6, 'M', 'ε'), + (0x1D6C7, 'M', 'ζ'), + (0x1D6C8, 'M', 'η'), + (0x1D6C9, 'M', 'θ'), + (0x1D6CA, 'M', 'ι'), + (0x1D6CB, 'M', 'κ'), + (0x1D6CC, 'M', 'λ'), + (0x1D6CD, 'M', 'μ'), + (0x1D6CE, 'M', 'ν'), + (0x1D6CF, 'M', 'ξ'), + (0x1D6D0, 'M', 'ο'), + (0x1D6D1, 'M', 'π'), + (0x1D6D2, 'M', 'ρ'), + (0x1D6D3, 'M', 'σ'), + (0x1D6D5, 'M', 'τ'), + (0x1D6D6, 'M', 'υ'), + (0x1D6D7, 'M', 'φ'), + (0x1D6D8, 'M', 'χ'), + (0x1D6D9, 'M', 'ψ'), + (0x1D6DA, 'M', 'ω'), + (0x1D6DB, 'M', '∂'), + (0x1D6DC, 'M', 'ε'), + (0x1D6DD, 'M', 'θ'), + (0x1D6DE, 'M', 'κ'), + (0x1D6DF, 'M', 'φ'), + (0x1D6E0, 'M', 'ρ'), + (0x1D6E1, 'M', 'π'), + (0x1D6E2, 'M', 'α'), + (0x1D6E3, 'M', 'β'), + (0x1D6E4, 'M', 'γ'), ] -def _seg_66(): +def _seg_68() -> List[Union[Tuple[int, str], Tuple[int, str, str]]]: return [ - (0x1D722, 'M', u'η'), - (0x1D723, 'M', u'θ'), - (0x1D724, 'M', u'ι'), - (0x1D725, 'M', u'κ'), - (0x1D726, 'M', u'λ'), - (0x1D727, 'M', u'μ'), - (0x1D728, 'M', u'ν'), - (0x1D729, 'M', u'ξ'), - (0x1D72A, 'M', u'ο'), - (0x1D72B, 'M', u'π'), - (0x1D72C, 'M', u'ρ'), - (0x1D72D, 'M', u'θ'), - (0x1D72E, 'M', u'σ'), - (0x1D72F, 'M', u'τ'), - (0x1D730, 'M', u'υ'), - (0x1D731, 'M', u'φ'), - (0x1D732, 'M', u'χ'), - (0x1D733, 'M', u'ψ'), - (0x1D734, 'M', u'ω'), - (0x1D735, 'M', u'∇'), - (0x1D736, 'M', u'α'), - (0x1D737, 'M', u'β'), - (0x1D738, 'M', u'γ'), - (0x1D739, 'M', u'δ'), - (0x1D73A, 'M', u'ε'), - (0x1D73B, 'M', u'ζ'), - (0x1D73C, 'M', u'η'), - (0x1D73D, 'M', u'θ'), - (0x1D73E, 'M', u'ι'), - (0x1D73F, 'M', u'κ'), - (0x1D740, 'M', u'λ'), - (0x1D741, 'M', u'μ'), - (0x1D742, 'M', u'ν'), - (0x1D743, 'M', u'ξ'), - (0x1D744, 'M', u'ο'), - (0x1D745, 'M', u'π'), - (0x1D746, 'M', u'ρ'), - (0x1D747, 'M', u'σ'), - (0x1D749, 'M', u'τ'), - (0x1D74A, 'M', u'υ'), - (0x1D74B, 'M', u'φ'), - (0x1D74C, 'M', u'χ'), - (0x1D74D, 'M', u'ψ'), - (0x1D74E, 'M', u'ω'), - (0x1D74F, 'M', u'∂'), - (0x1D750, 'M', u'ε'), - (0x1D751, 'M', u'θ'), - (0x1D752, 'M', u'κ'), - (0x1D753, 'M', u'φ'), - (0x1D754, 'M', u'ρ'), - (0x1D755, 'M', u'π'), - (0x1D756, 'M', u'α'), - (0x1D757, 'M', u'β'), - (0x1D758, 'M', u'γ'), - (0x1D759, 'M', u'δ'), - (0x1D75A, 'M', u'ε'), - (0x1D75B, 'M', u'ζ'), - (0x1D75C, 'M', u'η'), - (0x1D75D, 'M', u'θ'), - (0x1D75E, 'M', u'ι'), - (0x1D75F, 'M', u'κ'), - (0x1D760, 'M', u'λ'), - (0x1D761, 'M', u'μ'), - (0x1D762, 'M', u'ν'), - (0x1D763, 'M', u'ξ'), - (0x1D764, 'M', u'ο'), - (0x1D765, 'M', u'π'), - (0x1D766, 'M', u'ρ'), - (0x1D767, 'M', u'θ'), - (0x1D768, 'M', u'σ'), - (0x1D769, 'M', u'τ'), - (0x1D76A, 'M', u'υ'), - (0x1D76B, 'M', u'φ'), - (0x1D76C, 'M', u'χ'), - (0x1D76D, 'M', u'ψ'), - (0x1D76E, 'M', u'ω'), - (0x1D76F, 'M', u'∇'), - (0x1D770, 'M', u'α'), - (0x1D771, 'M', u'β'), - (0x1D772, 'M', u'γ'), - (0x1D773, 'M', u'δ'), - (0x1D774, 'M', u'ε'), - (0x1D775, 'M', u'ζ'), - (0x1D776, 'M', u'η'), - (0x1D777, 'M', u'θ'), - (0x1D778, 'M', u'ι'), - (0x1D779, 'M', u'κ'), - (0x1D77A, 'M', u'λ'), - (0x1D77B, 'M', u'μ'), - (0x1D77C, 'M', u'ν'), - (0x1D77D, 'M', u'ξ'), - (0x1D77E, 'M', u'ο'), - (0x1D77F, 'M', u'π'), - (0x1D780, 'M', u'ρ'), - (0x1D781, 'M', u'σ'), - (0x1D783, 'M', u'τ'), - (0x1D784, 'M', u'υ'), - (0x1D785, 'M', u'φ'), - (0x1D786, 'M', u'χ'), - (0x1D787, 'M', u'ψ'), + (0x1D6E5, 'M', 'δ'), + (0x1D6E6, 'M', 'ε'), + (0x1D6E7, 'M', 'ζ'), + (0x1D6E8, 'M', 'η'), + (0x1D6E9, 'M', 'θ'), + (0x1D6EA, 'M', 'ι'), + (0x1D6EB, 'M', 'κ'), + (0x1D6EC, 'M', 'λ'), + (0x1D6ED, 'M', 'μ'), + (0x1D6EE, 'M', 'ν'), + (0x1D6EF, 'M', 'ξ'), + (0x1D6F0, 'M', 'ο'), + (0x1D6F1, 'M', 'π'), + (0x1D6F2, 'M', 'ρ'), + (0x1D6F3, 'M', 'θ'), + (0x1D6F4, 'M', 'σ'), + (0x1D6F5, 'M', 'τ'), + (0x1D6F6, 'M', 'υ'), + (0x1D6F7, 'M', 'φ'), + (0x1D6F8, 'M', 'χ'), + (0x1D6F9, 'M', 'ψ'), + (0x1D6FA, 'M', 'ω'), + (0x1D6FB, 'M', '∇'), + (0x1D6FC, 'M', 'α'), + (0x1D6FD, 'M', 'β'), + (0x1D6FE, 'M', 'γ'), + (0x1D6FF, 'M', 'δ'), + (0x1D700, 'M', 'ε'), + (0x1D701, 'M', 'ζ'), + (0x1D702, 'M', 'η'), + (0x1D703, 'M', 'θ'), + (0x1D704, 'M', 'ι'), + (0x1D705, 'M', 'κ'), + (0x1D706, 'M', 'λ'), + (0x1D707, 'M', 'μ'), + (0x1D708, 'M', 'ν'), + (0x1D709, 'M', 'ξ'), + (0x1D70A, 'M', 'ο'), + (0x1D70B, 'M', 'π'), + (0x1D70C, 'M', 'ρ'), + (0x1D70D, 'M', 'σ'), + (0x1D70F, 'M', 'τ'), + (0x1D710, 'M', 'υ'), + (0x1D711, 'M', 'φ'), + (0x1D712, 'M', 'χ'), + (0x1D713, 'M', 'ψ'), + (0x1D714, 'M', 'ω'), + (0x1D715, 'M', '∂'), + (0x1D716, 'M', 'ε'), + (0x1D717, 'M', 'θ'), + (0x1D718, 'M', 'κ'), + (0x1D719, 'M', 'φ'), + (0x1D71A, 'M', 'ρ'), + (0x1D71B, 'M', 'π'), + (0x1D71C, 'M', 'α'), + (0x1D71D, 'M', 'β'), + (0x1D71E, 'M', 'γ'), + (0x1D71F, 'M', 'δ'), + (0x1D720, 'M', 'ε'), + (0x1D721, 'M', 'ζ'), + (0x1D722, 'M', 'η'), + (0x1D723, 'M', 'θ'), + (0x1D724, 'M', 'ι'), + (0x1D725, 'M', 'κ'), + (0x1D726, 'M', 'λ'), + (0x1D727, 'M', 'μ'), + (0x1D728, 'M', 'ν'), + (0x1D729, 'M', 'ξ'), + (0x1D72A, 'M', 'ο'), + (0x1D72B, 'M', 'π'), + (0x1D72C, 'M', 'ρ'), + (0x1D72D, 'M', 'θ'), + (0x1D72E, 'M', 'σ'), + (0x1D72F, 'M', 'τ'), + (0x1D730, 'M', 'υ'), + (0x1D731, 'M', 'φ'), + (0x1D732, 'M', 'χ'), + (0x1D733, 'M', 'ψ'), + (0x1D734, 'M', 'ω'), + (0x1D735, 'M', '∇'), + (0x1D736, 'M', 'α'), + (0x1D737, 'M', 'β'), + (0x1D738, 'M', 'γ'), + (0x1D739, 'M', 'δ'), + (0x1D73A, 'M', 'ε'), + (0x1D73B, 'M', 'ζ'), + (0x1D73C, 'M', 'η'), + (0x1D73D, 'M', 'θ'), + (0x1D73E, 'M', 'ι'), + (0x1D73F, 'M', 'κ'), + (0x1D740, 'M', 'λ'), + (0x1D741, 'M', 'μ'), + (0x1D742, 'M', 'ν'), + (0x1D743, 'M', 'ξ'), + (0x1D744, 'M', 'ο'), + (0x1D745, 'M', 'π'), + (0x1D746, 'M', 'ρ'), + (0x1D747, 'M', 'σ'), + (0x1D749, 'M', 'τ'), + (0x1D74A, 'M', 'υ'), ] -def _seg_67(): +def _seg_69() -> List[Union[Tuple[int, str], Tuple[int, str, str]]]: return [ - (0x1D788, 'M', u'ω'), - (0x1D789, 'M', u'∂'), - (0x1D78A, 'M', u'ε'), - (0x1D78B, 'M', u'θ'), - (0x1D78C, 'M', u'κ'), - (0x1D78D, 'M', u'φ'), - (0x1D78E, 'M', u'ρ'), - (0x1D78F, 'M', u'π'), - (0x1D790, 'M', u'α'), - (0x1D791, 'M', u'β'), - (0x1D792, 'M', u'γ'), - (0x1D793, 'M', u'δ'), - (0x1D794, 'M', u'ε'), - (0x1D795, 'M', u'ζ'), - (0x1D796, 'M', u'η'), - (0x1D797, 'M', u'θ'), - (0x1D798, 'M', u'ι'), - (0x1D799, 'M', u'κ'), - (0x1D79A, 'M', u'λ'), - (0x1D79B, 'M', u'μ'), - (0x1D79C, 'M', u'ν'), - (0x1D79D, 'M', u'ξ'), - (0x1D79E, 'M', u'ο'), - (0x1D79F, 'M', u'π'), - (0x1D7A0, 'M', u'ρ'), - (0x1D7A1, 'M', u'θ'), - (0x1D7A2, 'M', u'σ'), - (0x1D7A3, 'M', u'τ'), - (0x1D7A4, 'M', u'υ'), - (0x1D7A5, 'M', u'φ'), - (0x1D7A6, 'M', u'χ'), - (0x1D7A7, 'M', u'ψ'), - (0x1D7A8, 'M', u'ω'), - (0x1D7A9, 'M', u'∇'), - (0x1D7AA, 'M', u'α'), - (0x1D7AB, 'M', u'β'), - (0x1D7AC, 'M', u'γ'), - (0x1D7AD, 'M', u'δ'), - (0x1D7AE, 'M', u'ε'), - (0x1D7AF, 'M', u'ζ'), - (0x1D7B0, 'M', u'η'), - (0x1D7B1, 'M', u'θ'), - (0x1D7B2, 'M', u'ι'), - (0x1D7B3, 'M', u'κ'), - (0x1D7B4, 'M', u'λ'), - (0x1D7B5, 'M', u'μ'), - (0x1D7B6, 'M', u'ν'), - (0x1D7B7, 'M', u'ξ'), - (0x1D7B8, 'M', u'ο'), - (0x1D7B9, 'M', u'π'), - (0x1D7BA, 'M', u'ρ'), - (0x1D7BB, 'M', u'σ'), - (0x1D7BD, 'M', u'τ'), - (0x1D7BE, 'M', u'υ'), - (0x1D7BF, 'M', u'φ'), - (0x1D7C0, 'M', u'χ'), - (0x1D7C1, 'M', u'ψ'), - (0x1D7C2, 'M', u'ω'), - (0x1D7C3, 'M', u'∂'), - (0x1D7C4, 'M', u'ε'), - (0x1D7C5, 'M', u'θ'), - (0x1D7C6, 'M', u'κ'), - (0x1D7C7, 'M', u'φ'), - (0x1D7C8, 'M', u'ρ'), - (0x1D7C9, 'M', u'π'), - (0x1D7CA, 'M', u'ϝ'), - (0x1D7CC, 'X'), - (0x1D7CE, 'M', u'0'), - (0x1D7CF, 'M', u'1'), - (0x1D7D0, 'M', u'2'), - (0x1D7D1, 'M', u'3'), - (0x1D7D2, 'M', u'4'), - (0x1D7D3, 'M', u'5'), - (0x1D7D4, 'M', u'6'), - (0x1D7D5, 'M', u'7'), - (0x1D7D6, 'M', u'8'), - (0x1D7D7, 'M', u'9'), - (0x1D7D8, 'M', u'0'), - (0x1D7D9, 'M', u'1'), - (0x1D7DA, 'M', u'2'), - (0x1D7DB, 'M', u'3'), - (0x1D7DC, 'M', u'4'), - (0x1D7DD, 'M', u'5'), - (0x1D7DE, 'M', u'6'), - (0x1D7DF, 'M', u'7'), - (0x1D7E0, 'M', u'8'), - (0x1D7E1, 'M', u'9'), - (0x1D7E2, 'M', u'0'), - (0x1D7E3, 'M', u'1'), - (0x1D7E4, 'M', u'2'), - (0x1D7E5, 'M', u'3'), - (0x1D7E6, 'M', u'4'), - (0x1D7E7, 'M', u'5'), - (0x1D7E8, 'M', u'6'), - (0x1D7E9, 'M', u'7'), - (0x1D7EA, 'M', u'8'), - (0x1D7EB, 'M', u'9'), - (0x1D7EC, 'M', u'0'), - (0x1D7ED, 'M', u'1'), - (0x1D7EE, 'M', u'2'), + (0x1D74B, 'M', 'φ'), + (0x1D74C, 'M', 'χ'), + (0x1D74D, 'M', 'ψ'), + (0x1D74E, 'M', 'ω'), + (0x1D74F, 'M', '∂'), + (0x1D750, 'M', 'ε'), + (0x1D751, 'M', 'θ'), + (0x1D752, 'M', 'κ'), + (0x1D753, 'M', 'φ'), + (0x1D754, 'M', 'ρ'), + (0x1D755, 'M', 'π'), + (0x1D756, 'M', 'α'), + (0x1D757, 'M', 'β'), + (0x1D758, 'M', 'γ'), + (0x1D759, 'M', 'δ'), + (0x1D75A, 'M', 'ε'), + (0x1D75B, 'M', 'ζ'), + (0x1D75C, 'M', 'η'), + (0x1D75D, 'M', 'θ'), + (0x1D75E, 'M', 'ι'), + (0x1D75F, 'M', 'κ'), + (0x1D760, 'M', 'λ'), + (0x1D761, 'M', 'μ'), + (0x1D762, 'M', 'ν'), + (0x1D763, 'M', 'ξ'), + (0x1D764, 'M', 'ο'), + (0x1D765, 'M', 'π'), + (0x1D766, 'M', 'ρ'), + (0x1D767, 'M', 'θ'), + (0x1D768, 'M', 'σ'), + (0x1D769, 'M', 'τ'), + (0x1D76A, 'M', 'υ'), + (0x1D76B, 'M', 'φ'), + (0x1D76C, 'M', 'χ'), + (0x1D76D, 'M', 'ψ'), + (0x1D76E, 'M', 'ω'), + (0x1D76F, 'M', '∇'), + (0x1D770, 'M', 'α'), + (0x1D771, 'M', 'β'), + (0x1D772, 'M', 'γ'), + (0x1D773, 'M', 'δ'), + (0x1D774, 'M', 'ε'), + (0x1D775, 'M', 'ζ'), + (0x1D776, 'M', 'η'), + (0x1D777, 'M', 'θ'), + (0x1D778, 'M', 'ι'), + (0x1D779, 'M', 'κ'), + (0x1D77A, 'M', 'λ'), + (0x1D77B, 'M', 'μ'), + (0x1D77C, 'M', 'ν'), + (0x1D77D, 'M', 'ξ'), + (0x1D77E, 'M', 'ο'), + (0x1D77F, 'M', 'π'), + (0x1D780, 'M', 'ρ'), + (0x1D781, 'M', 'σ'), + (0x1D783, 'M', 'τ'), + (0x1D784, 'M', 'υ'), + (0x1D785, 'M', 'φ'), + (0x1D786, 'M', 'χ'), + (0x1D787, 'M', 'ψ'), + (0x1D788, 'M', 'ω'), + (0x1D789, 'M', '∂'), + (0x1D78A, 'M', 'ε'), + (0x1D78B, 'M', 'θ'), + (0x1D78C, 'M', 'κ'), + (0x1D78D, 'M', 'φ'), + (0x1D78E, 'M', 'ρ'), + (0x1D78F, 'M', 'π'), + (0x1D790, 'M', 'α'), + (0x1D791, 'M', 'β'), + (0x1D792, 'M', 'γ'), + (0x1D793, 'M', 'δ'), + (0x1D794, 'M', 'ε'), + (0x1D795, 'M', 'ζ'), + (0x1D796, 'M', 'η'), + (0x1D797, 'M', 'θ'), + (0x1D798, 'M', 'ι'), + (0x1D799, 'M', 'κ'), + (0x1D79A, 'M', 'λ'), + (0x1D79B, 'M', 'μ'), + (0x1D79C, 'M', 'ν'), + (0x1D79D, 'M', 'ξ'), + (0x1D79E, 'M', 'ο'), + (0x1D79F, 'M', 'π'), + (0x1D7A0, 'M', 'ρ'), + (0x1D7A1, 'M', 'θ'), + (0x1D7A2, 'M', 'σ'), + (0x1D7A3, 'M', 'τ'), + (0x1D7A4, 'M', 'υ'), + (0x1D7A5, 'M', 'φ'), + (0x1D7A6, 'M', 'χ'), + (0x1D7A7, 'M', 'ψ'), + (0x1D7A8, 'M', 'ω'), + (0x1D7A9, 'M', '∇'), + (0x1D7AA, 'M', 'α'), + (0x1D7AB, 'M', 'β'), + (0x1D7AC, 'M', 'γ'), + (0x1D7AD, 'M', 'δ'), + (0x1D7AE, 'M', 'ε'), + (0x1D7AF, 'M', 'ζ'), ] -def _seg_68(): +def _seg_70() -> List[Union[Tuple[int, str], Tuple[int, str, str]]]: return [ - (0x1D7EF, 'M', u'3'), - (0x1D7F0, 'M', u'4'), - (0x1D7F1, 'M', u'5'), - (0x1D7F2, 'M', u'6'), - (0x1D7F3, 'M', u'7'), - (0x1D7F4, 'M', u'8'), - (0x1D7F5, 'M', u'9'), - (0x1D7F6, 'M', u'0'), - (0x1D7F7, 'M', u'1'), - (0x1D7F8, 'M', u'2'), - (0x1D7F9, 'M', u'3'), - (0x1D7FA, 'M', u'4'), - (0x1D7FB, 'M', u'5'), - (0x1D7FC, 'M', u'6'), - (0x1D7FD, 'M', u'7'), - (0x1D7FE, 'M', u'8'), - (0x1D7FF, 'M', u'9'), + (0x1D7B0, 'M', 'η'), + (0x1D7B1, 'M', 'θ'), + (0x1D7B2, 'M', 'ι'), + (0x1D7B3, 'M', 'κ'), + (0x1D7B4, 'M', 'λ'), + (0x1D7B5, 'M', 'μ'), + (0x1D7B6, 'M', 'ν'), + (0x1D7B7, 'M', 'ξ'), + (0x1D7B8, 'M', 'ο'), + (0x1D7B9, 'M', 'π'), + (0x1D7BA, 'M', 'ρ'), + (0x1D7BB, 'M', 'σ'), + (0x1D7BD, 'M', 'τ'), + (0x1D7BE, 'M', 'υ'), + (0x1D7BF, 'M', 'φ'), + (0x1D7C0, 'M', 'χ'), + (0x1D7C1, 'M', 'ψ'), + (0x1D7C2, 'M', 'ω'), + (0x1D7C3, 'M', '∂'), + (0x1D7C4, 'M', 'ε'), + (0x1D7C5, 'M', 'θ'), + (0x1D7C6, 'M', 'κ'), + (0x1D7C7, 'M', 'φ'), + (0x1D7C8, 'M', 'ρ'), + (0x1D7C9, 'M', 'π'), + (0x1D7CA, 'M', 'ϝ'), + (0x1D7CC, 'X'), + (0x1D7CE, 'M', '0'), + (0x1D7CF, 'M', '1'), + (0x1D7D0, 'M', '2'), + (0x1D7D1, 'M', '3'), + (0x1D7D2, 'M', '4'), + (0x1D7D3, 'M', '5'), + (0x1D7D4, 'M', '6'), + (0x1D7D5, 'M', '7'), + (0x1D7D6, 'M', '8'), + (0x1D7D7, 'M', '9'), + (0x1D7D8, 'M', '0'), + (0x1D7D9, 'M', '1'), + (0x1D7DA, 'M', '2'), + (0x1D7DB, 'M', '3'), + (0x1D7DC, 'M', '4'), + (0x1D7DD, 'M', '5'), + (0x1D7DE, 'M', '6'), + (0x1D7DF, 'M', '7'), + (0x1D7E0, 'M', '8'), + (0x1D7E1, 'M', '9'), + (0x1D7E2, 'M', '0'), + (0x1D7E3, 'M', '1'), + (0x1D7E4, 'M', '2'), + (0x1D7E5, 'M', '3'), + (0x1D7E6, 'M', '4'), + (0x1D7E7, 'M', '5'), + (0x1D7E8, 'M', '6'), + (0x1D7E9, 'M', '7'), + (0x1D7EA, 'M', '8'), + (0x1D7EB, 'M', '9'), + (0x1D7EC, 'M', '0'), + (0x1D7ED, 'M', '1'), + (0x1D7EE, 'M', '2'), + (0x1D7EF, 'M', '3'), + (0x1D7F0, 'M', '4'), + (0x1D7F1, 'M', '5'), + (0x1D7F2, 'M', '6'), + (0x1D7F3, 'M', '7'), + (0x1D7F4, 'M', '8'), + (0x1D7F5, 'M', '9'), + (0x1D7F6, 'M', '0'), + (0x1D7F7, 'M', '1'), + (0x1D7F8, 'M', '2'), + (0x1D7F9, 'M', '3'), + (0x1D7FA, 'M', '4'), + (0x1D7FB, 'M', '5'), + (0x1D7FC, 'M', '6'), + (0x1D7FD, 'M', '7'), + (0x1D7FE, 'M', '8'), + (0x1D7FF, 'M', '9'), (0x1D800, 'V'), (0x1DA8C, 'X'), (0x1DA9B, 'V'), (0x1DAA0, 'X'), (0x1DAA1, 'V'), (0x1DAB0, 'X'), + (0x1DF00, 'V'), + (0x1DF1F, 'X'), + (0x1DF25, 'V'), + (0x1DF2B, 'X'), (0x1E000, 'V'), (0x1E007, 'X'), (0x1E008, 'V'), @@ -7112,233 +7387,332 @@ def _seg_68(): (0x1E025, 'X'), (0x1E026, 'V'), (0x1E02B, 'X'), + (0x1E030, 'M', 'а'), + (0x1E031, 'M', 'б'), + (0x1E032, 'M', 'в'), + ] + +def _seg_71() -> List[Union[Tuple[int, str], Tuple[int, str, str]]]: + return [ + (0x1E033, 'M', 'г'), + (0x1E034, 'M', 'д'), + (0x1E035, 'M', 'е'), + (0x1E036, 'M', 'ж'), + (0x1E037, 'M', 'з'), + (0x1E038, 'M', 'и'), + (0x1E039, 'M', 'к'), + (0x1E03A, 'M', 'л'), + (0x1E03B, 'M', 'м'), + (0x1E03C, 'M', 'о'), + (0x1E03D, 'M', 'п'), + (0x1E03E, 'M', 'р'), + (0x1E03F, 'M', 'с'), + (0x1E040, 'M', 'т'), + (0x1E041, 'M', 'у'), + (0x1E042, 'M', 'ф'), + (0x1E043, 'M', 'х'), + (0x1E044, 'M', 'ц'), + (0x1E045, 'M', 'ч'), + (0x1E046, 'M', 'ш'), + (0x1E047, 'M', 'ы'), + (0x1E048, 'M', 'э'), + (0x1E049, 'M', 'ю'), + (0x1E04A, 'M', 'ꚉ'), + (0x1E04B, 'M', 'ә'), + (0x1E04C, 'M', 'і'), + (0x1E04D, 'M', 'ј'), + (0x1E04E, 'M', 'ө'), + (0x1E04F, 'M', 'ү'), + (0x1E050, 'M', 'ӏ'), + (0x1E051, 'M', 'а'), + (0x1E052, 'M', 'б'), + (0x1E053, 'M', 'в'), + (0x1E054, 'M', 'г'), + (0x1E055, 'M', 'д'), + (0x1E056, 'M', 'е'), + (0x1E057, 'M', 'ж'), + (0x1E058, 'M', 'з'), + (0x1E059, 'M', 'и'), + (0x1E05A, 'M', 'к'), + (0x1E05B, 'M', 'л'), + (0x1E05C, 'M', 'о'), + (0x1E05D, 'M', 'п'), + (0x1E05E, 'M', 'с'), + (0x1E05F, 'M', 'у'), + (0x1E060, 'M', 'ф'), + (0x1E061, 'M', 'х'), + (0x1E062, 'M', 'ц'), + (0x1E063, 'M', 'ч'), + (0x1E064, 'M', 'ш'), + (0x1E065, 'M', 'ъ'), + (0x1E066, 'M', 'ы'), + (0x1E067, 'M', 'ґ'), + (0x1E068, 'M', 'і'), + (0x1E069, 'M', 'ѕ'), + (0x1E06A, 'M', 'џ'), + (0x1E06B, 'M', 'ҫ'), + (0x1E06C, 'M', 'ꙑ'), + (0x1E06D, 'M', 'ұ'), + (0x1E06E, 'X'), + (0x1E08F, 'V'), + (0x1E090, 'X'), + (0x1E100, 'V'), + (0x1E12D, 'X'), + (0x1E130, 'V'), + (0x1E13E, 'X'), + (0x1E140, 'V'), + (0x1E14A, 'X'), + (0x1E14E, 'V'), + (0x1E150, 'X'), + (0x1E290, 'V'), + (0x1E2AF, 'X'), + (0x1E2C0, 'V'), + (0x1E2FA, 'X'), + (0x1E2FF, 'V'), + (0x1E300, 'X'), + (0x1E4D0, 'V'), + (0x1E4FA, 'X'), + (0x1E7E0, 'V'), + (0x1E7E7, 'X'), + (0x1E7E8, 'V'), + (0x1E7EC, 'X'), + (0x1E7ED, 'V'), + (0x1E7EF, 'X'), + (0x1E7F0, 'V'), + (0x1E7FF, 'X'), (0x1E800, 'V'), (0x1E8C5, 'X'), (0x1E8C7, 'V'), (0x1E8D7, 'X'), - (0x1E900, 'M', u'𞤢'), - (0x1E901, 'M', u'𞤣'), - (0x1E902, 'M', u'𞤤'), - (0x1E903, 'M', u'𞤥'), - (0x1E904, 'M', u'𞤦'), - (0x1E905, 'M', u'𞤧'), - (0x1E906, 'M', u'𞤨'), - (0x1E907, 'M', u'𞤩'), - (0x1E908, 'M', u'𞤪'), - (0x1E909, 'M', u'𞤫'), - (0x1E90A, 'M', u'𞤬'), - (0x1E90B, 'M', u'𞤭'), - (0x1E90C, 'M', u'𞤮'), - (0x1E90D, 'M', u'𞤯'), - (0x1E90E, 'M', u'𞤰'), - (0x1E90F, 'M', u'𞤱'), - (0x1E910, 'M', u'𞤲'), - (0x1E911, 'M', u'𞤳'), - (0x1E912, 'M', u'𞤴'), - (0x1E913, 'M', u'𞤵'), - (0x1E914, 'M', u'𞤶'), - (0x1E915, 'M', u'𞤷'), - (0x1E916, 'M', u'𞤸'), - (0x1E917, 'M', u'𞤹'), - (0x1E918, 'M', u'𞤺'), - (0x1E919, 'M', u'𞤻'), - (0x1E91A, 'M', u'𞤼'), - (0x1E91B, 'M', u'𞤽'), - (0x1E91C, 'M', u'𞤾'), - (0x1E91D, 'M', u'𞤿'), - (0x1E91E, 'M', u'𞥀'), - (0x1E91F, 'M', u'𞥁'), - (0x1E920, 'M', u'𞥂'), - (0x1E921, 'M', u'𞥃'), + (0x1E900, 'M', '𞤢'), + (0x1E901, 'M', '𞤣'), + (0x1E902, 'M', '𞤤'), + (0x1E903, 'M', '𞤥'), + (0x1E904, 'M', '𞤦'), + (0x1E905, 'M', '𞤧'), + (0x1E906, 'M', '𞤨'), + (0x1E907, 'M', '𞤩'), + (0x1E908, 'M', '𞤪'), + (0x1E909, 'M', '𞤫'), + ] + +def _seg_72() -> List[Union[Tuple[int, str], Tuple[int, str, str]]]: + return [ + (0x1E90A, 'M', '𞤬'), + (0x1E90B, 'M', '𞤭'), + (0x1E90C, 'M', '𞤮'), + (0x1E90D, 'M', '𞤯'), + (0x1E90E, 'M', '𞤰'), + (0x1E90F, 'M', '𞤱'), + (0x1E910, 'M', '𞤲'), + (0x1E911, 'M', '𞤳'), + (0x1E912, 'M', '𞤴'), + (0x1E913, 'M', '𞤵'), + (0x1E914, 'M', '𞤶'), + (0x1E915, 'M', '𞤷'), + (0x1E916, 'M', '𞤸'), + (0x1E917, 'M', '𞤹'), + (0x1E918, 'M', '𞤺'), + (0x1E919, 'M', '𞤻'), + (0x1E91A, 'M', '𞤼'), + (0x1E91B, 'M', '𞤽'), + (0x1E91C, 'M', '𞤾'), + (0x1E91D, 'M', '𞤿'), + (0x1E91E, 'M', '𞥀'), + (0x1E91F, 'M', '𞥁'), + (0x1E920, 'M', '𞥂'), + (0x1E921, 'M', '𞥃'), (0x1E922, 'V'), - (0x1E94B, 'X'), + (0x1E94C, 'X'), (0x1E950, 'V'), (0x1E95A, 'X'), (0x1E95E, 'V'), (0x1E960, 'X'), (0x1EC71, 'V'), (0x1ECB5, 'X'), - (0x1EE00, 'M', u'ا'), - (0x1EE01, 'M', u'ب'), - (0x1EE02, 'M', u'ج'), - (0x1EE03, 'M', u'د'), + (0x1ED01, 'V'), + (0x1ED3E, 'X'), + (0x1EE00, 'M', 'ا'), + (0x1EE01, 'M', 'ب'), + (0x1EE02, 'M', 'ج'), + (0x1EE03, 'M', 'د'), (0x1EE04, 'X'), - (0x1EE05, 'M', u'و'), - (0x1EE06, 'M', u'ز'), - (0x1EE07, 'M', u'ح'), - (0x1EE08, 'M', u'ط'), - (0x1EE09, 'M', u'ي'), - (0x1EE0A, 'M', u'ك'), - (0x1EE0B, 'M', u'ل'), - (0x1EE0C, 'M', u'م'), - (0x1EE0D, 'M', u'ن'), - (0x1EE0E, 'M', u'س'), - (0x1EE0F, 'M', u'ع'), - (0x1EE10, 'M', u'ف'), - (0x1EE11, 'M', u'ص'), - (0x1EE12, 'M', u'ق'), - (0x1EE13, 'M', u'ر'), - (0x1EE14, 'M', u'ش'), - ] - -def _seg_69(): - return [ - (0x1EE15, 'M', u'ت'), - (0x1EE16, 'M', u'ث'), - (0x1EE17, 'M', u'خ'), - (0x1EE18, 'M', u'ذ'), - (0x1EE19, 'M', u'ض'), - (0x1EE1A, 'M', u'ظ'), - (0x1EE1B, 'M', u'غ'), - (0x1EE1C, 'M', u'ٮ'), - (0x1EE1D, 'M', u'ں'), - (0x1EE1E, 'M', u'ڡ'), - (0x1EE1F, 'M', u'ٯ'), + (0x1EE05, 'M', 'و'), + (0x1EE06, 'M', 'ز'), + (0x1EE07, 'M', 'ح'), + (0x1EE08, 'M', 'ط'), + (0x1EE09, 'M', 'ي'), + (0x1EE0A, 'M', 'ك'), + (0x1EE0B, 'M', 'ل'), + (0x1EE0C, 'M', 'م'), + (0x1EE0D, 'M', 'ن'), + (0x1EE0E, 'M', 'س'), + (0x1EE0F, 'M', 'ع'), + (0x1EE10, 'M', 'ف'), + (0x1EE11, 'M', 'ص'), + (0x1EE12, 'M', 'ق'), + (0x1EE13, 'M', 'ر'), + (0x1EE14, 'M', 'ش'), + (0x1EE15, 'M', 'ت'), + (0x1EE16, 'M', 'ث'), + (0x1EE17, 'M', 'خ'), + (0x1EE18, 'M', 'ذ'), + (0x1EE19, 'M', 'ض'), + (0x1EE1A, 'M', 'ظ'), + (0x1EE1B, 'M', 'غ'), + (0x1EE1C, 'M', 'ٮ'), + (0x1EE1D, 'M', 'ں'), + (0x1EE1E, 'M', 'ڡ'), + (0x1EE1F, 'M', 'ٯ'), (0x1EE20, 'X'), - (0x1EE21, 'M', u'ب'), - (0x1EE22, 'M', u'ج'), + (0x1EE21, 'M', 'ب'), + (0x1EE22, 'M', 'ج'), (0x1EE23, 'X'), - (0x1EE24, 'M', u'ه'), + (0x1EE24, 'M', 'ه'), (0x1EE25, 'X'), - (0x1EE27, 'M', u'ح'), + (0x1EE27, 'M', 'ح'), (0x1EE28, 'X'), - (0x1EE29, 'M', u'ي'), - (0x1EE2A, 'M', u'ك'), - (0x1EE2B, 'M', u'ل'), - (0x1EE2C, 'M', u'م'), - (0x1EE2D, 'M', u'ن'), - (0x1EE2E, 'M', u'س'), - (0x1EE2F, 'M', u'ع'), - (0x1EE30, 'M', u'ف'), - (0x1EE31, 'M', u'ص'), - (0x1EE32, 'M', u'ق'), + (0x1EE29, 'M', 'ي'), + (0x1EE2A, 'M', 'ك'), + (0x1EE2B, 'M', 'ل'), + (0x1EE2C, 'M', 'م'), + (0x1EE2D, 'M', 'ن'), + (0x1EE2E, 'M', 'س'), + (0x1EE2F, 'M', 'ع'), + (0x1EE30, 'M', 'ف'), + (0x1EE31, 'M', 'ص'), + (0x1EE32, 'M', 'ق'), (0x1EE33, 'X'), - (0x1EE34, 'M', u'ش'), - (0x1EE35, 'M', u'ت'), - (0x1EE36, 'M', u'ث'), - (0x1EE37, 'M', u'خ'), + (0x1EE34, 'M', 'ش'), + (0x1EE35, 'M', 'ت'), + (0x1EE36, 'M', 'ث'), + (0x1EE37, 'M', 'خ'), (0x1EE38, 'X'), - (0x1EE39, 'M', u'ض'), + (0x1EE39, 'M', 'ض'), (0x1EE3A, 'X'), - (0x1EE3B, 'M', u'غ'), + (0x1EE3B, 'M', 'غ'), (0x1EE3C, 'X'), - (0x1EE42, 'M', u'ج'), + (0x1EE42, 'M', 'ج'), (0x1EE43, 'X'), - (0x1EE47, 'M', u'ح'), + (0x1EE47, 'M', 'ح'), (0x1EE48, 'X'), - (0x1EE49, 'M', u'ي'), + (0x1EE49, 'M', 'ي'), (0x1EE4A, 'X'), - (0x1EE4B, 'M', u'ل'), + ] + +def _seg_73() -> List[Union[Tuple[int, str], Tuple[int, str, str]]]: + return [ + (0x1EE4B, 'M', 'ل'), (0x1EE4C, 'X'), - (0x1EE4D, 'M', u'ن'), - (0x1EE4E, 'M', u'س'), - (0x1EE4F, 'M', u'ع'), + (0x1EE4D, 'M', 'ن'), + (0x1EE4E, 'M', 'س'), + (0x1EE4F, 'M', 'ع'), (0x1EE50, 'X'), - (0x1EE51, 'M', u'ص'), - (0x1EE52, 'M', u'ق'), + (0x1EE51, 'M', 'ص'), + (0x1EE52, 'M', 'ق'), (0x1EE53, 'X'), - (0x1EE54, 'M', u'ش'), + (0x1EE54, 'M', 'ش'), (0x1EE55, 'X'), - (0x1EE57, 'M', u'خ'), + (0x1EE57, 'M', 'خ'), (0x1EE58, 'X'), - (0x1EE59, 'M', u'ض'), + (0x1EE59, 'M', 'ض'), (0x1EE5A, 'X'), - (0x1EE5B, 'M', u'غ'), + (0x1EE5B, 'M', 'غ'), (0x1EE5C, 'X'), - (0x1EE5D, 'M', u'ں'), + (0x1EE5D, 'M', 'ں'), (0x1EE5E, 'X'), - (0x1EE5F, 'M', u'ٯ'), + (0x1EE5F, 'M', 'ٯ'), (0x1EE60, 'X'), - (0x1EE61, 'M', u'ب'), - (0x1EE62, 'M', u'ج'), + (0x1EE61, 'M', 'ب'), + (0x1EE62, 'M', 'ج'), (0x1EE63, 'X'), - (0x1EE64, 'M', u'ه'), + (0x1EE64, 'M', 'ه'), (0x1EE65, 'X'), - (0x1EE67, 'M', u'ح'), - (0x1EE68, 'M', u'ط'), - (0x1EE69, 'M', u'ي'), - (0x1EE6A, 'M', u'ك'), + (0x1EE67, 'M', 'ح'), + (0x1EE68, 'M', 'ط'), + (0x1EE69, 'M', 'ي'), + (0x1EE6A, 'M', 'ك'), (0x1EE6B, 'X'), - (0x1EE6C, 'M', u'م'), - (0x1EE6D, 'M', u'ن'), - (0x1EE6E, 'M', u'س'), - (0x1EE6F, 'M', u'ع'), - (0x1EE70, 'M', u'ف'), - (0x1EE71, 'M', u'ص'), - (0x1EE72, 'M', u'ق'), + (0x1EE6C, 'M', 'م'), + (0x1EE6D, 'M', 'ن'), + (0x1EE6E, 'M', 'س'), + (0x1EE6F, 'M', 'ع'), + (0x1EE70, 'M', 'ف'), + (0x1EE71, 'M', 'ص'), + (0x1EE72, 'M', 'ق'), (0x1EE73, 'X'), - (0x1EE74, 'M', u'ش'), - (0x1EE75, 'M', u'ت'), - (0x1EE76, 'M', u'ث'), - (0x1EE77, 'M', u'خ'), + (0x1EE74, 'M', 'ش'), + (0x1EE75, 'M', 'ت'), + (0x1EE76, 'M', 'ث'), + (0x1EE77, 'M', 'خ'), (0x1EE78, 'X'), - (0x1EE79, 'M', u'ض'), - (0x1EE7A, 'M', u'ظ'), - (0x1EE7B, 'M', u'غ'), - (0x1EE7C, 'M', u'ٮ'), + (0x1EE79, 'M', 'ض'), + (0x1EE7A, 'M', 'ظ'), + (0x1EE7B, 'M', 'غ'), + (0x1EE7C, 'M', 'ٮ'), (0x1EE7D, 'X'), - (0x1EE7E, 'M', u'ڡ'), + (0x1EE7E, 'M', 'ڡ'), (0x1EE7F, 'X'), - (0x1EE80, 'M', u'ا'), - (0x1EE81, 'M', u'ب'), - (0x1EE82, 'M', u'ج'), - (0x1EE83, 'M', u'د'), - ] - -def _seg_70(): - return [ - (0x1EE84, 'M', u'ه'), - (0x1EE85, 'M', u'و'), - (0x1EE86, 'M', u'ز'), - (0x1EE87, 'M', u'ح'), - (0x1EE88, 'M', u'ط'), - (0x1EE89, 'M', u'ي'), + (0x1EE80, 'M', 'ا'), + (0x1EE81, 'M', 'ب'), + (0x1EE82, 'M', 'ج'), + (0x1EE83, 'M', 'د'), + (0x1EE84, 'M', 'ه'), + (0x1EE85, 'M', 'و'), + (0x1EE86, 'M', 'ز'), + (0x1EE87, 'M', 'ح'), + (0x1EE88, 'M', 'ط'), + (0x1EE89, 'M', 'ي'), (0x1EE8A, 'X'), - (0x1EE8B, 'M', u'ل'), - (0x1EE8C, 'M', u'م'), - (0x1EE8D, 'M', u'ن'), - (0x1EE8E, 'M', u'س'), - (0x1EE8F, 'M', u'ع'), - (0x1EE90, 'M', u'ف'), - (0x1EE91, 'M', u'ص'), - (0x1EE92, 'M', u'ق'), - (0x1EE93, 'M', u'ر'), - (0x1EE94, 'M', u'ش'), - (0x1EE95, 'M', u'ت'), - (0x1EE96, 'M', u'ث'), - (0x1EE97, 'M', u'خ'), - (0x1EE98, 'M', u'ذ'), - (0x1EE99, 'M', u'ض'), - (0x1EE9A, 'M', u'ظ'), - (0x1EE9B, 'M', u'غ'), + (0x1EE8B, 'M', 'ل'), + (0x1EE8C, 'M', 'م'), + (0x1EE8D, 'M', 'ن'), + (0x1EE8E, 'M', 'س'), + (0x1EE8F, 'M', 'ع'), + (0x1EE90, 'M', 'ف'), + (0x1EE91, 'M', 'ص'), + (0x1EE92, 'M', 'ق'), + (0x1EE93, 'M', 'ر'), + (0x1EE94, 'M', 'ش'), + (0x1EE95, 'M', 'ت'), + (0x1EE96, 'M', 'ث'), + (0x1EE97, 'M', 'خ'), + (0x1EE98, 'M', 'ذ'), + (0x1EE99, 'M', 'ض'), + (0x1EE9A, 'M', 'ظ'), + (0x1EE9B, 'M', 'غ'), (0x1EE9C, 'X'), - (0x1EEA1, 'M', u'ب'), - (0x1EEA2, 'M', u'ج'), - (0x1EEA3, 'M', u'د'), + (0x1EEA1, 'M', 'ب'), + (0x1EEA2, 'M', 'ج'), + (0x1EEA3, 'M', 'د'), (0x1EEA4, 'X'), - (0x1EEA5, 'M', u'و'), - (0x1EEA6, 'M', u'ز'), - (0x1EEA7, 'M', u'ح'), - (0x1EEA8, 'M', u'ط'), - (0x1EEA9, 'M', u'ي'), + (0x1EEA5, 'M', 'و'), + (0x1EEA6, 'M', 'ز'), + (0x1EEA7, 'M', 'ح'), + (0x1EEA8, 'M', 'ط'), + (0x1EEA9, 'M', 'ي'), (0x1EEAA, 'X'), - (0x1EEAB, 'M', u'ل'), - (0x1EEAC, 'M', u'م'), - (0x1EEAD, 'M', u'ن'), - (0x1EEAE, 'M', u'س'), - (0x1EEAF, 'M', u'ع'), - (0x1EEB0, 'M', u'ف'), - (0x1EEB1, 'M', u'ص'), - (0x1EEB2, 'M', u'ق'), - (0x1EEB3, 'M', u'ر'), - (0x1EEB4, 'M', u'ش'), - (0x1EEB5, 'M', u'ت'), - (0x1EEB6, 'M', u'ث'), - (0x1EEB7, 'M', u'خ'), - (0x1EEB8, 'M', u'ذ'), - (0x1EEB9, 'M', u'ض'), - (0x1EEBA, 'M', u'ظ'), - (0x1EEBB, 'M', u'غ'), + (0x1EEAB, 'M', 'ل'), + (0x1EEAC, 'M', 'م'), + (0x1EEAD, 'M', 'ن'), + (0x1EEAE, 'M', 'س'), + (0x1EEAF, 'M', 'ع'), + (0x1EEB0, 'M', 'ف'), + (0x1EEB1, 'M', 'ص'), + (0x1EEB2, 'M', 'ق'), + (0x1EEB3, 'M', 'ر'), + (0x1EEB4, 'M', 'ش'), + ] + +def _seg_74() -> List[Union[Tuple[int, str], Tuple[int, str, str]]]: + return [ + (0x1EEB5, 'M', 'ت'), + (0x1EEB6, 'M', 'ث'), + (0x1EEB7, 'M', 'خ'), + (0x1EEB8, 'M', 'ذ'), + (0x1EEB9, 'M', 'ض'), + (0x1EEBA, 'M', 'ظ'), + (0x1EEBB, 'M', 'غ'), (0x1EEBC, 'X'), (0x1EEF0, 'V'), (0x1EEF2, 'X'), @@ -7354,173 +7728,172 @@ def _seg_70(): (0x1F0D0, 'X'), (0x1F0D1, 'V'), (0x1F0F6, 'X'), - (0x1F101, '3', u'0,'), - (0x1F102, '3', u'1,'), - (0x1F103, '3', u'2,'), - (0x1F104, '3', u'3,'), - (0x1F105, '3', u'4,'), - (0x1F106, '3', u'5,'), - (0x1F107, '3', u'6,'), - (0x1F108, '3', u'7,'), - (0x1F109, '3', u'8,'), - (0x1F10A, '3', u'9,'), + (0x1F101, '3', '0,'), + (0x1F102, '3', '1,'), + (0x1F103, '3', '2,'), + (0x1F104, '3', '3,'), + (0x1F105, '3', '4,'), + (0x1F106, '3', '5,'), + (0x1F107, '3', '6,'), + (0x1F108, '3', '7,'), + (0x1F109, '3', '8,'), + (0x1F10A, '3', '9,'), (0x1F10B, 'V'), - (0x1F10D, 'X'), - (0x1F110, '3', u'(a)'), - (0x1F111, '3', u'(b)'), - (0x1F112, '3', u'(c)'), - (0x1F113, '3', u'(d)'), - (0x1F114, '3', u'(e)'), - (0x1F115, '3', u'(f)'), - (0x1F116, '3', u'(g)'), - (0x1F117, '3', u'(h)'), - (0x1F118, '3', u'(i)'), - (0x1F119, '3', u'(j)'), - (0x1F11A, '3', u'(k)'), - (0x1F11B, '3', u'(l)'), - (0x1F11C, '3', u'(m)'), - (0x1F11D, '3', u'(n)'), - (0x1F11E, '3', u'(o)'), - (0x1F11F, '3', u'(p)'), - (0x1F120, '3', u'(q)'), - (0x1F121, '3', u'(r)'), - (0x1F122, '3', u'(s)'), - (0x1F123, '3', u'(t)'), - (0x1F124, '3', u'(u)'), + (0x1F110, '3', '(a)'), + (0x1F111, '3', '(b)'), + (0x1F112, '3', '(c)'), + (0x1F113, '3', '(d)'), + (0x1F114, '3', '(e)'), + (0x1F115, '3', '(f)'), + (0x1F116, '3', '(g)'), + (0x1F117, '3', '(h)'), + (0x1F118, '3', '(i)'), + (0x1F119, '3', '(j)'), + (0x1F11A, '3', '(k)'), + (0x1F11B, '3', '(l)'), + (0x1F11C, '3', '(m)'), + (0x1F11D, '3', '(n)'), + (0x1F11E, '3', '(o)'), + (0x1F11F, '3', '(p)'), + (0x1F120, '3', '(q)'), + (0x1F121, '3', '(r)'), + (0x1F122, '3', '(s)'), + (0x1F123, '3', '(t)'), + (0x1F124, '3', '(u)'), + (0x1F125, '3', '(v)'), + (0x1F126, '3', '(w)'), + (0x1F127, '3', '(x)'), + (0x1F128, '3', '(y)'), + (0x1F129, '3', '(z)'), + (0x1F12A, 'M', '〔s〕'), + (0x1F12B, 'M', 'c'), + (0x1F12C, 'M', 'r'), + (0x1F12D, 'M', 'cd'), + (0x1F12E, 'M', 'wz'), + (0x1F12F, 'V'), + (0x1F130, 'M', 'a'), + (0x1F131, 'M', 'b'), + (0x1F132, 'M', 'c'), + (0x1F133, 'M', 'd'), + (0x1F134, 'M', 'e'), + (0x1F135, 'M', 'f'), + (0x1F136, 'M', 'g'), + (0x1F137, 'M', 'h'), + (0x1F138, 'M', 'i'), + (0x1F139, 'M', 'j'), + (0x1F13A, 'M', 'k'), + (0x1F13B, 'M', 'l'), + (0x1F13C, 'M', 'm'), + (0x1F13D, 'M', 'n'), + (0x1F13E, 'M', 'o'), + (0x1F13F, 'M', 'p'), + (0x1F140, 'M', 'q'), + (0x1F141, 'M', 'r'), + (0x1F142, 'M', 's'), + (0x1F143, 'M', 't'), + (0x1F144, 'M', 'u'), + (0x1F145, 'M', 'v'), + (0x1F146, 'M', 'w'), + (0x1F147, 'M', 'x'), + (0x1F148, 'M', 'y'), + (0x1F149, 'M', 'z'), + (0x1F14A, 'M', 'hv'), + (0x1F14B, 'M', 'mv'), + (0x1F14C, 'M', 'sd'), + (0x1F14D, 'M', 'ss'), + (0x1F14E, 'M', 'ppv'), + (0x1F14F, 'M', 'wc'), + (0x1F150, 'V'), + (0x1F16A, 'M', 'mc'), + (0x1F16B, 'M', 'md'), ] -def _seg_71(): +def _seg_75() -> List[Union[Tuple[int, str], Tuple[int, str, str]]]: return [ - (0x1F125, '3', u'(v)'), - (0x1F126, '3', u'(w)'), - (0x1F127, '3', u'(x)'), - (0x1F128, '3', u'(y)'), - (0x1F129, '3', u'(z)'), - (0x1F12A, 'M', u'〔s〕'), - (0x1F12B, 'M', u'c'), - (0x1F12C, 'M', u'r'), - (0x1F12D, 'M', u'cd'), - (0x1F12E, 'M', u'wz'), - (0x1F12F, 'V'), - (0x1F130, 'M', u'a'), - (0x1F131, 'M', u'b'), - (0x1F132, 'M', u'c'), - (0x1F133, 'M', u'd'), - (0x1F134, 'M', u'e'), - (0x1F135, 'M', u'f'), - (0x1F136, 'M', u'g'), - (0x1F137, 'M', u'h'), - (0x1F138, 'M', u'i'), - (0x1F139, 'M', u'j'), - (0x1F13A, 'M', u'k'), - (0x1F13B, 'M', u'l'), - (0x1F13C, 'M', u'm'), - (0x1F13D, 'M', u'n'), - (0x1F13E, 'M', u'o'), - (0x1F13F, 'M', u'p'), - (0x1F140, 'M', u'q'), - (0x1F141, 'M', u'r'), - (0x1F142, 'M', u's'), - (0x1F143, 'M', u't'), - (0x1F144, 'M', u'u'), - (0x1F145, 'M', u'v'), - (0x1F146, 'M', u'w'), - (0x1F147, 'M', u'x'), - (0x1F148, 'M', u'y'), - (0x1F149, 'M', u'z'), - (0x1F14A, 'M', u'hv'), - (0x1F14B, 'M', u'mv'), - (0x1F14C, 'M', u'sd'), - (0x1F14D, 'M', u'ss'), - (0x1F14E, 'M', u'ppv'), - (0x1F14F, 'M', u'wc'), - (0x1F150, 'V'), - (0x1F16A, 'M', u'mc'), - (0x1F16B, 'M', u'md'), - (0x1F16C, 'X'), - (0x1F170, 'V'), - (0x1F190, 'M', u'dj'), + (0x1F16C, 'M', 'mr'), + (0x1F16D, 'V'), + (0x1F190, 'M', 'dj'), (0x1F191, 'V'), - (0x1F1AD, 'X'), + (0x1F1AE, 'X'), (0x1F1E6, 'V'), - (0x1F200, 'M', u'ほか'), - (0x1F201, 'M', u'ココ'), - (0x1F202, 'M', u'サ'), + (0x1F200, 'M', 'ほか'), + (0x1F201, 'M', 'ココ'), + (0x1F202, 'M', 'サ'), (0x1F203, 'X'), - (0x1F210, 'M', u'手'), - (0x1F211, 'M', u'字'), - (0x1F212, 'M', u'双'), - (0x1F213, 'M', u'デ'), - (0x1F214, 'M', u'二'), - (0x1F215, 'M', u'多'), - (0x1F216, 'M', u'解'), - (0x1F217, 'M', u'天'), - (0x1F218, 'M', u'交'), - (0x1F219, 'M', u'映'), - (0x1F21A, 'M', u'無'), - (0x1F21B, 'M', u'料'), - (0x1F21C, 'M', u'前'), - (0x1F21D, 'M', u'後'), - (0x1F21E, 'M', u'再'), - (0x1F21F, 'M', u'新'), - (0x1F220, 'M', u'初'), - (0x1F221, 'M', u'終'), - (0x1F222, 'M', u'生'), - (0x1F223, 'M', u'販'), - (0x1F224, 'M', u'声'), - (0x1F225, 'M', u'吹'), - (0x1F226, 'M', u'演'), - (0x1F227, 'M', u'投'), - (0x1F228, 'M', u'捕'), - (0x1F229, 'M', u'一'), - (0x1F22A, 'M', u'三'), - (0x1F22B, 'M', u'遊'), - (0x1F22C, 'M', u'左'), - (0x1F22D, 'M', u'中'), - (0x1F22E, 'M', u'右'), - (0x1F22F, 'M', u'指'), - (0x1F230, 'M', u'走'), - (0x1F231, 'M', u'打'), - (0x1F232, 'M', u'禁'), - (0x1F233, 'M', u'空'), - (0x1F234, 'M', u'合'), - (0x1F235, 'M', u'満'), - (0x1F236, 'M', u'有'), - (0x1F237, 'M', u'月'), - (0x1F238, 'M', u'申'), - (0x1F239, 'M', u'割'), - (0x1F23A, 'M', u'営'), - (0x1F23B, 'M', u'配'), - ] - -def _seg_72(): - return [ + (0x1F210, 'M', '手'), + (0x1F211, 'M', '字'), + (0x1F212, 'M', '双'), + (0x1F213, 'M', 'デ'), + (0x1F214, 'M', '二'), + (0x1F215, 'M', '多'), + (0x1F216, 'M', '解'), + (0x1F217, 'M', '天'), + (0x1F218, 'M', '交'), + (0x1F219, 'M', '映'), + (0x1F21A, 'M', '無'), + (0x1F21B, 'M', '料'), + (0x1F21C, 'M', '前'), + (0x1F21D, 'M', '後'), + (0x1F21E, 'M', '再'), + (0x1F21F, 'M', '新'), + (0x1F220, 'M', '初'), + (0x1F221, 'M', '終'), + (0x1F222, 'M', '生'), + (0x1F223, 'M', '販'), + (0x1F224, 'M', '声'), + (0x1F225, 'M', '吹'), + (0x1F226, 'M', '演'), + (0x1F227, 'M', '投'), + (0x1F228, 'M', '捕'), + (0x1F229, 'M', '一'), + (0x1F22A, 'M', '三'), + (0x1F22B, 'M', '遊'), + (0x1F22C, 'M', '左'), + (0x1F22D, 'M', '中'), + (0x1F22E, 'M', '右'), + (0x1F22F, 'M', '指'), + (0x1F230, 'M', '走'), + (0x1F231, 'M', '打'), + (0x1F232, 'M', '禁'), + (0x1F233, 'M', '空'), + (0x1F234, 'M', '合'), + (0x1F235, 'M', '満'), + (0x1F236, 'M', '有'), + (0x1F237, 'M', '月'), + (0x1F238, 'M', '申'), + (0x1F239, 'M', '割'), + (0x1F23A, 'M', '営'), + (0x1F23B, 'M', '配'), (0x1F23C, 'X'), - (0x1F240, 'M', u'〔本〕'), - (0x1F241, 'M', u'〔三〕'), - (0x1F242, 'M', u'〔二〕'), - (0x1F243, 'M', u'〔安〕'), - (0x1F244, 'M', u'〔点〕'), - (0x1F245, 'M', u'〔打〕'), - (0x1F246, 'M', u'〔盗〕'), - (0x1F247, 'M', u'〔勝〕'), - (0x1F248, 'M', u'〔敗〕'), + (0x1F240, 'M', '〔本〕'), + (0x1F241, 'M', '〔三〕'), + (0x1F242, 'M', '〔二〕'), + (0x1F243, 'M', '〔安〕'), + (0x1F244, 'M', '〔点〕'), + (0x1F245, 'M', '〔打〕'), + (0x1F246, 'M', '〔盗〕'), + (0x1F247, 'M', '〔勝〕'), + (0x1F248, 'M', '〔敗〕'), (0x1F249, 'X'), - (0x1F250, 'M', u'得'), - (0x1F251, 'M', u'可'), + (0x1F250, 'M', '得'), + (0x1F251, 'M', '可'), (0x1F252, 'X'), (0x1F260, 'V'), (0x1F266, 'X'), (0x1F300, 'V'), - (0x1F6D5, 'X'), - (0x1F6E0, 'V'), + (0x1F6D8, 'X'), + (0x1F6DC, 'V'), (0x1F6ED, 'X'), (0x1F6F0, 'V'), - (0x1F6FA, 'X'), + (0x1F6FD, 'X'), (0x1F700, 'V'), - (0x1F774, 'X'), - (0x1F780, 'V'), - (0x1F7D9, 'X'), + (0x1F777, 'X'), + (0x1F77B, 'V'), + (0x1F7DA, 'X'), + (0x1F7E0, 'V'), + (0x1F7EC, 'X'), + (0x1F7F0, 'V'), + (0x1F7F1, 'X'), (0x1F800, 'V'), (0x1F80C, 'X'), (0x1F810, 'V'), @@ -7531,594 +7904,613 @@ def _seg_72(): (0x1F888, 'X'), (0x1F890, 'V'), (0x1F8AE, 'X'), + (0x1F8B0, 'V'), + (0x1F8B2, 'X'), (0x1F900, 'V'), - (0x1F90C, 'X'), - (0x1F910, 'V'), - (0x1F93F, 'X'), - (0x1F940, 'V'), - (0x1F971, 'X'), - (0x1F973, 'V'), - (0x1F977, 'X'), - (0x1F97A, 'V'), - (0x1F97B, 'X'), - (0x1F97C, 'V'), - (0x1F9A3, 'X'), - (0x1F9B0, 'V'), - (0x1F9BA, 'X'), - (0x1F9C0, 'V'), - (0x1F9C3, 'X'), - (0x1F9D0, 'V'), - (0x1FA00, 'X'), + (0x1FA54, 'X'), (0x1FA60, 'V'), (0x1FA6E, 'X'), + ] + +def _seg_76() -> List[Union[Tuple[int, str], Tuple[int, str, str]]]: + return [ + (0x1FA70, 'V'), + (0x1FA7D, 'X'), + (0x1FA80, 'V'), + (0x1FA89, 'X'), + (0x1FA90, 'V'), + (0x1FABE, 'X'), + (0x1FABF, 'V'), + (0x1FAC6, 'X'), + (0x1FACE, 'V'), + (0x1FADC, 'X'), + (0x1FAE0, 'V'), + (0x1FAE9, 'X'), + (0x1FAF0, 'V'), + (0x1FAF9, 'X'), + (0x1FB00, 'V'), + (0x1FB93, 'X'), + (0x1FB94, 'V'), + (0x1FBCB, 'X'), + (0x1FBF0, 'M', '0'), + (0x1FBF1, 'M', '1'), + (0x1FBF2, 'M', '2'), + (0x1FBF3, 'M', '3'), + (0x1FBF4, 'M', '4'), + (0x1FBF5, 'M', '5'), + (0x1FBF6, 'M', '6'), + (0x1FBF7, 'M', '7'), + (0x1FBF8, 'M', '8'), + (0x1FBF9, 'M', '9'), + (0x1FBFA, 'X'), (0x20000, 'V'), - (0x2A6D7, 'X'), + (0x2A6E0, 'X'), (0x2A700, 'V'), - (0x2B735, 'X'), + (0x2B73A, 'X'), (0x2B740, 'V'), (0x2B81E, 'X'), (0x2B820, 'V'), (0x2CEA2, 'X'), (0x2CEB0, 'V'), (0x2EBE1, 'X'), - (0x2F800, 'M', u'丽'), - (0x2F801, 'M', u'丸'), - (0x2F802, 'M', u'乁'), - (0x2F803, 'M', u'𠄢'), - (0x2F804, 'M', u'你'), - (0x2F805, 'M', u'侮'), - (0x2F806, 'M', u'侻'), - (0x2F807, 'M', u'倂'), - (0x2F808, 'M', u'偺'), - (0x2F809, 'M', u'備'), - (0x2F80A, 'M', u'僧'), - (0x2F80B, 'M', u'像'), - (0x2F80C, 'M', u'㒞'), - (0x2F80D, 'M', u'𠘺'), - (0x2F80E, 'M', u'免'), - (0x2F80F, 'M', u'兔'), - (0x2F810, 'M', u'兤'), - (0x2F811, 'M', u'具'), - (0x2F812, 'M', u'𠔜'), - (0x2F813, 'M', u'㒹'), - (0x2F814, 'M', u'內'), - (0x2F815, 'M', u'再'), - (0x2F816, 'M', u'𠕋'), - (0x2F817, 'M', u'冗'), - (0x2F818, 'M', u'冤'), - (0x2F819, 'M', u'仌'), - (0x2F81A, 'M', u'冬'), - (0x2F81B, 'M', u'况'), - (0x2F81C, 'M', u'𩇟'), - (0x2F81D, 'M', u'凵'), - (0x2F81E, 'M', u'刃'), - (0x2F81F, 'M', u'㓟'), - (0x2F820, 'M', u'刻'), - (0x2F821, 'M', u'剆'), + (0x2F800, 'M', '丽'), + (0x2F801, 'M', '丸'), + (0x2F802, 'M', '乁'), + (0x2F803, 'M', '𠄢'), + (0x2F804, 'M', '你'), + (0x2F805, 'M', '侮'), + (0x2F806, 'M', '侻'), + (0x2F807, 'M', '倂'), + (0x2F808, 'M', '偺'), + (0x2F809, 'M', '備'), + (0x2F80A, 'M', '僧'), + (0x2F80B, 'M', '像'), + (0x2F80C, 'M', '㒞'), + (0x2F80D, 'M', '𠘺'), + (0x2F80E, 'M', '免'), + (0x2F80F, 'M', '兔'), + (0x2F810, 'M', '兤'), + (0x2F811, 'M', '具'), + (0x2F812, 'M', '𠔜'), + (0x2F813, 'M', '㒹'), + (0x2F814, 'M', '內'), + (0x2F815, 'M', '再'), + (0x2F816, 'M', '𠕋'), + (0x2F817, 'M', '冗'), + (0x2F818, 'M', '冤'), + (0x2F819, 'M', '仌'), + (0x2F81A, 'M', '冬'), + (0x2F81B, 'M', '况'), + (0x2F81C, 'M', '𩇟'), + (0x2F81D, 'M', '凵'), + (0x2F81E, 'M', '刃'), + (0x2F81F, 'M', '㓟'), + (0x2F820, 'M', '刻'), + (0x2F821, 'M', '剆'), + (0x2F822, 'M', '割'), + (0x2F823, 'M', '剷'), + (0x2F824, 'M', '㔕'), + (0x2F825, 'M', '勇'), + (0x2F826, 'M', '勉'), + (0x2F827, 'M', '勤'), + (0x2F828, 'M', '勺'), + (0x2F829, 'M', '包'), + (0x2F82A, 'M', '匆'), + (0x2F82B, 'M', '北'), + (0x2F82C, 'M', '卉'), + (0x2F82D, 'M', '卑'), + (0x2F82E, 'M', '博'), + (0x2F82F, 'M', '即'), + (0x2F830, 'M', '卽'), + (0x2F831, 'M', '卿'), + (0x2F834, 'M', '𠨬'), + (0x2F835, 'M', '灰'), + (0x2F836, 'M', '及'), + (0x2F837, 'M', '叟'), + (0x2F838, 'M', '𠭣'), + (0x2F839, 'M', '叫'), + (0x2F83A, 'M', '叱'), + (0x2F83B, 'M', '吆'), + (0x2F83C, 'M', '咞'), + (0x2F83D, 'M', '吸'), + (0x2F83E, 'M', '呈'), ] -def _seg_73(): +def _seg_77() -> List[Union[Tuple[int, str], Tuple[int, str, str]]]: return [ - (0x2F822, 'M', u'割'), - (0x2F823, 'M', u'剷'), - (0x2F824, 'M', u'㔕'), - (0x2F825, 'M', u'勇'), - (0x2F826, 'M', u'勉'), - (0x2F827, 'M', u'勤'), - (0x2F828, 'M', u'勺'), - (0x2F829, 'M', u'包'), - (0x2F82A, 'M', u'匆'), - (0x2F82B, 'M', u'北'), - (0x2F82C, 'M', u'卉'), - (0x2F82D, 'M', u'卑'), - (0x2F82E, 'M', u'博'), - (0x2F82F, 'M', u'即'), - (0x2F830, 'M', u'卽'), - (0x2F831, 'M', u'卿'), - (0x2F834, 'M', u'𠨬'), - (0x2F835, 'M', u'灰'), - (0x2F836, 'M', u'及'), - (0x2F837, 'M', u'叟'), - (0x2F838, 'M', u'𠭣'), - (0x2F839, 'M', u'叫'), - (0x2F83A, 'M', u'叱'), - (0x2F83B, 'M', u'吆'), - (0x2F83C, 'M', u'咞'), - (0x2F83D, 'M', u'吸'), - (0x2F83E, 'M', u'呈'), - (0x2F83F, 'M', u'周'), - (0x2F840, 'M', u'咢'), - (0x2F841, 'M', u'哶'), - (0x2F842, 'M', u'唐'), - (0x2F843, 'M', u'啓'), - (0x2F844, 'M', u'啣'), - (0x2F845, 'M', u'善'), - (0x2F847, 'M', u'喙'), - (0x2F848, 'M', u'喫'), - (0x2F849, 'M', u'喳'), - (0x2F84A, 'M', u'嗂'), - (0x2F84B, 'M', u'圖'), - (0x2F84C, 'M', u'嘆'), - (0x2F84D, 'M', u'圗'), - (0x2F84E, 'M', u'噑'), - (0x2F84F, 'M', u'噴'), - (0x2F850, 'M', u'切'), - (0x2F851, 'M', u'壮'), - (0x2F852, 'M', u'城'), - (0x2F853, 'M', u'埴'), - (0x2F854, 'M', u'堍'), - (0x2F855, 'M', u'型'), - (0x2F856, 'M', u'堲'), - (0x2F857, 'M', u'報'), - (0x2F858, 'M', u'墬'), - (0x2F859, 'M', u'𡓤'), - (0x2F85A, 'M', u'売'), - (0x2F85B, 'M', u'壷'), - (0x2F85C, 'M', u'夆'), - (0x2F85D, 'M', u'多'), - (0x2F85E, 'M', u'夢'), - (0x2F85F, 'M', u'奢'), - (0x2F860, 'M', u'𡚨'), - (0x2F861, 'M', u'𡛪'), - (0x2F862, 'M', u'姬'), - (0x2F863, 'M', u'娛'), - (0x2F864, 'M', u'娧'), - (0x2F865, 'M', u'姘'), - (0x2F866, 'M', u'婦'), - (0x2F867, 'M', u'㛮'), + (0x2F83F, 'M', '周'), + (0x2F840, 'M', '咢'), + (0x2F841, 'M', '哶'), + (0x2F842, 'M', '唐'), + (0x2F843, 'M', '啓'), + (0x2F844, 'M', '啣'), + (0x2F845, 'M', '善'), + (0x2F847, 'M', '喙'), + (0x2F848, 'M', '喫'), + (0x2F849, 'M', '喳'), + (0x2F84A, 'M', '嗂'), + (0x2F84B, 'M', '圖'), + (0x2F84C, 'M', '嘆'), + (0x2F84D, 'M', '圗'), + (0x2F84E, 'M', '噑'), + (0x2F84F, 'M', '噴'), + (0x2F850, 'M', '切'), + (0x2F851, 'M', '壮'), + (0x2F852, 'M', '城'), + (0x2F853, 'M', '埴'), + (0x2F854, 'M', '堍'), + (0x2F855, 'M', '型'), + (0x2F856, 'M', '堲'), + (0x2F857, 'M', '報'), + (0x2F858, 'M', '墬'), + (0x2F859, 'M', '𡓤'), + (0x2F85A, 'M', '売'), + (0x2F85B, 'M', '壷'), + (0x2F85C, 'M', '夆'), + (0x2F85D, 'M', '多'), + (0x2F85E, 'M', '夢'), + (0x2F85F, 'M', '奢'), + (0x2F860, 'M', '𡚨'), + (0x2F861, 'M', '𡛪'), + (0x2F862, 'M', '姬'), + (0x2F863, 'M', '娛'), + (0x2F864, 'M', '娧'), + (0x2F865, 'M', '姘'), + (0x2F866, 'M', '婦'), + (0x2F867, 'M', '㛮'), (0x2F868, 'X'), - (0x2F869, 'M', u'嬈'), - (0x2F86A, 'M', u'嬾'), - (0x2F86C, 'M', u'𡧈'), - (0x2F86D, 'M', u'寃'), - (0x2F86E, 'M', u'寘'), - (0x2F86F, 'M', u'寧'), - (0x2F870, 'M', u'寳'), - (0x2F871, 'M', u'𡬘'), - (0x2F872, 'M', u'寿'), - (0x2F873, 'M', u'将'), + (0x2F869, 'M', '嬈'), + (0x2F86A, 'M', '嬾'), + (0x2F86C, 'M', '𡧈'), + (0x2F86D, 'M', '寃'), + (0x2F86E, 'M', '寘'), + (0x2F86F, 'M', '寧'), + (0x2F870, 'M', '寳'), + (0x2F871, 'M', '𡬘'), + (0x2F872, 'M', '寿'), + (0x2F873, 'M', '将'), (0x2F874, 'X'), - (0x2F875, 'M', u'尢'), - (0x2F876, 'M', u'㞁'), - (0x2F877, 'M', u'屠'), - (0x2F878, 'M', u'屮'), - (0x2F879, 'M', u'峀'), - (0x2F87A, 'M', u'岍'), - (0x2F87B, 'M', u'𡷤'), - (0x2F87C, 'M', u'嵃'), - (0x2F87D, 'M', u'𡷦'), - (0x2F87E, 'M', u'嵮'), - (0x2F87F, 'M', u'嵫'), - (0x2F880, 'M', u'嵼'), - (0x2F881, 'M', u'巡'), - (0x2F882, 'M', u'巢'), - (0x2F883, 'M', u'㠯'), - (0x2F884, 'M', u'巽'), - (0x2F885, 'M', u'帨'), - (0x2F886, 'M', u'帽'), - (0x2F887, 'M', u'幩'), - (0x2F888, 'M', u'㡢'), - (0x2F889, 'M', u'𢆃'), + (0x2F875, 'M', '尢'), + (0x2F876, 'M', '㞁'), + (0x2F877, 'M', '屠'), + (0x2F878, 'M', '屮'), + (0x2F879, 'M', '峀'), + (0x2F87A, 'M', '岍'), + (0x2F87B, 'M', '𡷤'), + (0x2F87C, 'M', '嵃'), + (0x2F87D, 'M', '𡷦'), + (0x2F87E, 'M', '嵮'), + (0x2F87F, 'M', '嵫'), + (0x2F880, 'M', '嵼'), + (0x2F881, 'M', '巡'), + (0x2F882, 'M', '巢'), + (0x2F883, 'M', '㠯'), + (0x2F884, 'M', '巽'), + (0x2F885, 'M', '帨'), + (0x2F886, 'M', '帽'), + (0x2F887, 'M', '幩'), + (0x2F888, 'M', '㡢'), + (0x2F889, 'M', '𢆃'), + (0x2F88A, 'M', '㡼'), + (0x2F88B, 'M', '庰'), + (0x2F88C, 'M', '庳'), + (0x2F88D, 'M', '庶'), + (0x2F88E, 'M', '廊'), + (0x2F88F, 'M', '𪎒'), + (0x2F890, 'M', '廾'), + (0x2F891, 'M', '𢌱'), + (0x2F893, 'M', '舁'), + (0x2F894, 'M', '弢'), + (0x2F896, 'M', '㣇'), + (0x2F897, 'M', '𣊸'), + (0x2F898, 'M', '𦇚'), + (0x2F899, 'M', '形'), + (0x2F89A, 'M', '彫'), + (0x2F89B, 'M', '㣣'), + (0x2F89C, 'M', '徚'), + (0x2F89D, 'M', '忍'), + (0x2F89E, 'M', '志'), + (0x2F89F, 'M', '忹'), + (0x2F8A0, 'M', '悁'), + (0x2F8A1, 'M', '㤺'), + (0x2F8A2, 'M', '㤜'), + (0x2F8A3, 'M', '悔'), + (0x2F8A4, 'M', '𢛔'), + (0x2F8A5, 'M', '惇'), + (0x2F8A6, 'M', '慈'), ] -def _seg_74(): +def _seg_78() -> List[Union[Tuple[int, str], Tuple[int, str, str]]]: return [ - (0x2F88A, 'M', u'㡼'), - (0x2F88B, 'M', u'庰'), - (0x2F88C, 'M', u'庳'), - (0x2F88D, 'M', u'庶'), - (0x2F88E, 'M', u'廊'), - (0x2F88F, 'M', u'𪎒'), - (0x2F890, 'M', u'廾'), - (0x2F891, 'M', u'𢌱'), - (0x2F893, 'M', u'舁'), - (0x2F894, 'M', u'弢'), - (0x2F896, 'M', u'㣇'), - (0x2F897, 'M', u'𣊸'), - (0x2F898, 'M', u'𦇚'), - (0x2F899, 'M', u'形'), - (0x2F89A, 'M', u'彫'), - (0x2F89B, 'M', u'㣣'), - (0x2F89C, 'M', u'徚'), - (0x2F89D, 'M', u'忍'), - (0x2F89E, 'M', u'志'), - (0x2F89F, 'M', u'忹'), - (0x2F8A0, 'M', u'悁'), - (0x2F8A1, 'M', u'㤺'), - (0x2F8A2, 'M', u'㤜'), - (0x2F8A3, 'M', u'悔'), - (0x2F8A4, 'M', u'𢛔'), - (0x2F8A5, 'M', u'惇'), - (0x2F8A6, 'M', u'慈'), - (0x2F8A7, 'M', u'慌'), - (0x2F8A8, 'M', u'慎'), - (0x2F8A9, 'M', u'慌'), - (0x2F8AA, 'M', u'慺'), - (0x2F8AB, 'M', u'憎'), - (0x2F8AC, 'M', u'憲'), - (0x2F8AD, 'M', u'憤'), - (0x2F8AE, 'M', u'憯'), - (0x2F8AF, 'M', u'懞'), - (0x2F8B0, 'M', u'懲'), - (0x2F8B1, 'M', u'懶'), - (0x2F8B2, 'M', u'成'), - (0x2F8B3, 'M', u'戛'), - (0x2F8B4, 'M', u'扝'), - (0x2F8B5, 'M', u'抱'), - (0x2F8B6, 'M', u'拔'), - (0x2F8B7, 'M', u'捐'), - (0x2F8B8, 'M', u'𢬌'), - (0x2F8B9, 'M', u'挽'), - (0x2F8BA, 'M', u'拼'), - (0x2F8BB, 'M', u'捨'), - (0x2F8BC, 'M', u'掃'), - (0x2F8BD, 'M', u'揤'), - (0x2F8BE, 'M', u'𢯱'), - (0x2F8BF, 'M', u'搢'), - (0x2F8C0, 'M', u'揅'), - (0x2F8C1, 'M', u'掩'), - (0x2F8C2, 'M', u'㨮'), - (0x2F8C3, 'M', u'摩'), - (0x2F8C4, 'M', u'摾'), - (0x2F8C5, 'M', u'撝'), - (0x2F8C6, 'M', u'摷'), - (0x2F8C7, 'M', u'㩬'), - (0x2F8C8, 'M', u'敏'), - (0x2F8C9, 'M', u'敬'), - (0x2F8CA, 'M', u'𣀊'), - (0x2F8CB, 'M', u'旣'), - (0x2F8CC, 'M', u'書'), - (0x2F8CD, 'M', u'晉'), - (0x2F8CE, 'M', u'㬙'), - (0x2F8CF, 'M', u'暑'), - (0x2F8D0, 'M', u'㬈'), - (0x2F8D1, 'M', u'㫤'), - (0x2F8D2, 'M', u'冒'), - (0x2F8D3, 'M', u'冕'), - (0x2F8D4, 'M', u'最'), - (0x2F8D5, 'M', u'暜'), - (0x2F8D6, 'M', u'肭'), - (0x2F8D7, 'M', u'䏙'), - (0x2F8D8, 'M', u'朗'), - (0x2F8D9, 'M', u'望'), - (0x2F8DA, 'M', u'朡'), - (0x2F8DB, 'M', u'杞'), - (0x2F8DC, 'M', u'杓'), - (0x2F8DD, 'M', u'𣏃'), - (0x2F8DE, 'M', u'㭉'), - (0x2F8DF, 'M', u'柺'), - (0x2F8E0, 'M', u'枅'), - (0x2F8E1, 'M', u'桒'), - (0x2F8E2, 'M', u'梅'), - (0x2F8E3, 'M', u'𣑭'), - (0x2F8E4, 'M', u'梎'), - (0x2F8E5, 'M', u'栟'), - (0x2F8E6, 'M', u'椔'), - (0x2F8E7, 'M', u'㮝'), - (0x2F8E8, 'M', u'楂'), - (0x2F8E9, 'M', u'榣'), - (0x2F8EA, 'M', u'槪'), - (0x2F8EB, 'M', u'檨'), - (0x2F8EC, 'M', u'𣚣'), - (0x2F8ED, 'M', u'櫛'), - (0x2F8EE, 'M', u'㰘'), - (0x2F8EF, 'M', u'次'), + (0x2F8A7, 'M', '慌'), + (0x2F8A8, 'M', '慎'), + (0x2F8A9, 'M', '慌'), + (0x2F8AA, 'M', '慺'), + (0x2F8AB, 'M', '憎'), + (0x2F8AC, 'M', '憲'), + (0x2F8AD, 'M', '憤'), + (0x2F8AE, 'M', '憯'), + (0x2F8AF, 'M', '懞'), + (0x2F8B0, 'M', '懲'), + (0x2F8B1, 'M', '懶'), + (0x2F8B2, 'M', '成'), + (0x2F8B3, 'M', '戛'), + (0x2F8B4, 'M', '扝'), + (0x2F8B5, 'M', '抱'), + (0x2F8B6, 'M', '拔'), + (0x2F8B7, 'M', '捐'), + (0x2F8B8, 'M', '𢬌'), + (0x2F8B9, 'M', '挽'), + (0x2F8BA, 'M', '拼'), + (0x2F8BB, 'M', '捨'), + (0x2F8BC, 'M', '掃'), + (0x2F8BD, 'M', '揤'), + (0x2F8BE, 'M', '𢯱'), + (0x2F8BF, 'M', '搢'), + (0x2F8C0, 'M', '揅'), + (0x2F8C1, 'M', '掩'), + (0x2F8C2, 'M', '㨮'), + (0x2F8C3, 'M', '摩'), + (0x2F8C4, 'M', '摾'), + (0x2F8C5, 'M', '撝'), + (0x2F8C6, 'M', '摷'), + (0x2F8C7, 'M', '㩬'), + (0x2F8C8, 'M', '敏'), + (0x2F8C9, 'M', '敬'), + (0x2F8CA, 'M', '𣀊'), + (0x2F8CB, 'M', '旣'), + (0x2F8CC, 'M', '書'), + (0x2F8CD, 'M', '晉'), + (0x2F8CE, 'M', '㬙'), + (0x2F8CF, 'M', '暑'), + (0x2F8D0, 'M', '㬈'), + (0x2F8D1, 'M', '㫤'), + (0x2F8D2, 'M', '冒'), + (0x2F8D3, 'M', '冕'), + (0x2F8D4, 'M', '最'), + (0x2F8D5, 'M', '暜'), + (0x2F8D6, 'M', '肭'), + (0x2F8D7, 'M', '䏙'), + (0x2F8D8, 'M', '朗'), + (0x2F8D9, 'M', '望'), + (0x2F8DA, 'M', '朡'), + (0x2F8DB, 'M', '杞'), + (0x2F8DC, 'M', '杓'), + (0x2F8DD, 'M', '𣏃'), + (0x2F8DE, 'M', '㭉'), + (0x2F8DF, 'M', '柺'), + (0x2F8E0, 'M', '枅'), + (0x2F8E1, 'M', '桒'), + (0x2F8E2, 'M', '梅'), + (0x2F8E3, 'M', '𣑭'), + (0x2F8E4, 'M', '梎'), + (0x2F8E5, 'M', '栟'), + (0x2F8E6, 'M', '椔'), + (0x2F8E7, 'M', '㮝'), + (0x2F8E8, 'M', '楂'), + (0x2F8E9, 'M', '榣'), + (0x2F8EA, 'M', '槪'), + (0x2F8EB, 'M', '檨'), + (0x2F8EC, 'M', '𣚣'), + (0x2F8ED, 'M', '櫛'), + (0x2F8EE, 'M', '㰘'), + (0x2F8EF, 'M', '次'), + (0x2F8F0, 'M', '𣢧'), + (0x2F8F1, 'M', '歔'), + (0x2F8F2, 'M', '㱎'), + (0x2F8F3, 'M', '歲'), + (0x2F8F4, 'M', '殟'), + (0x2F8F5, 'M', '殺'), + (0x2F8F6, 'M', '殻'), + (0x2F8F7, 'M', '𣪍'), + (0x2F8F8, 'M', '𡴋'), + (0x2F8F9, 'M', '𣫺'), + (0x2F8FA, 'M', '汎'), + (0x2F8FB, 'M', '𣲼'), + (0x2F8FC, 'M', '沿'), + (0x2F8FD, 'M', '泍'), + (0x2F8FE, 'M', '汧'), + (0x2F8FF, 'M', '洖'), + (0x2F900, 'M', '派'), + (0x2F901, 'M', '海'), + (0x2F902, 'M', '流'), + (0x2F903, 'M', '浩'), + (0x2F904, 'M', '浸'), + (0x2F905, 'M', '涅'), + (0x2F906, 'M', '𣴞'), + (0x2F907, 'M', '洴'), + (0x2F908, 'M', '港'), + (0x2F909, 'M', '湮'), + (0x2F90A, 'M', '㴳'), ] -def _seg_75(): +def _seg_79() -> List[Union[Tuple[int, str], Tuple[int, str, str]]]: return [ - (0x2F8F0, 'M', u'𣢧'), - (0x2F8F1, 'M', u'歔'), - (0x2F8F2, 'M', u'㱎'), - (0x2F8F3, 'M', u'歲'), - (0x2F8F4, 'M', u'殟'), - (0x2F8F5, 'M', u'殺'), - (0x2F8F6, 'M', u'殻'), - (0x2F8F7, 'M', u'𣪍'), - (0x2F8F8, 'M', u'𡴋'), - (0x2F8F9, 'M', u'𣫺'), - (0x2F8FA, 'M', u'汎'), - (0x2F8FB, 'M', u'𣲼'), - (0x2F8FC, 'M', u'沿'), - (0x2F8FD, 'M', u'泍'), - (0x2F8FE, 'M', u'汧'), - (0x2F8FF, 'M', u'洖'), - (0x2F900, 'M', u'派'), - (0x2F901, 'M', u'海'), - (0x2F902, 'M', u'流'), - (0x2F903, 'M', u'浩'), - (0x2F904, 'M', u'浸'), - (0x2F905, 'M', u'涅'), - (0x2F906, 'M', u'𣴞'), - (0x2F907, 'M', u'洴'), - (0x2F908, 'M', u'港'), - (0x2F909, 'M', u'湮'), - (0x2F90A, 'M', u'㴳'), - (0x2F90B, 'M', u'滋'), - (0x2F90C, 'M', u'滇'), - (0x2F90D, 'M', u'𣻑'), - (0x2F90E, 'M', u'淹'), - (0x2F90F, 'M', u'潮'), - (0x2F910, 'M', u'𣽞'), - (0x2F911, 'M', u'𣾎'), - (0x2F912, 'M', u'濆'), - (0x2F913, 'M', u'瀹'), - (0x2F914, 'M', u'瀞'), - (0x2F915, 'M', u'瀛'), - (0x2F916, 'M', u'㶖'), - (0x2F917, 'M', u'灊'), - (0x2F918, 'M', u'災'), - (0x2F919, 'M', u'灷'), - (0x2F91A, 'M', u'炭'), - (0x2F91B, 'M', u'𠔥'), - (0x2F91C, 'M', u'煅'), - (0x2F91D, 'M', u'𤉣'), - (0x2F91E, 'M', u'熜'), + (0x2F90B, 'M', '滋'), + (0x2F90C, 'M', '滇'), + (0x2F90D, 'M', '𣻑'), + (0x2F90E, 'M', '淹'), + (0x2F90F, 'M', '潮'), + (0x2F910, 'M', '𣽞'), + (0x2F911, 'M', '𣾎'), + (0x2F912, 'M', '濆'), + (0x2F913, 'M', '瀹'), + (0x2F914, 'M', '瀞'), + (0x2F915, 'M', '瀛'), + (0x2F916, 'M', '㶖'), + (0x2F917, 'M', '灊'), + (0x2F918, 'M', '災'), + (0x2F919, 'M', '灷'), + (0x2F91A, 'M', '炭'), + (0x2F91B, 'M', '𠔥'), + (0x2F91C, 'M', '煅'), + (0x2F91D, 'M', '𤉣'), + (0x2F91E, 'M', '熜'), (0x2F91F, 'X'), - (0x2F920, 'M', u'爨'), - (0x2F921, 'M', u'爵'), - (0x2F922, 'M', u'牐'), - (0x2F923, 'M', u'𤘈'), - (0x2F924, 'M', u'犀'), - (0x2F925, 'M', u'犕'), - (0x2F926, 'M', u'𤜵'), - (0x2F927, 'M', u'𤠔'), - (0x2F928, 'M', u'獺'), - (0x2F929, 'M', u'王'), - (0x2F92A, 'M', u'㺬'), - (0x2F92B, 'M', u'玥'), - (0x2F92C, 'M', u'㺸'), - (0x2F92E, 'M', u'瑇'), - (0x2F92F, 'M', u'瑜'), - (0x2F930, 'M', u'瑱'), - (0x2F931, 'M', u'璅'), - (0x2F932, 'M', u'瓊'), - (0x2F933, 'M', u'㼛'), - (0x2F934, 'M', u'甤'), - (0x2F935, 'M', u'𤰶'), - (0x2F936, 'M', u'甾'), - (0x2F937, 'M', u'𤲒'), - (0x2F938, 'M', u'異'), - (0x2F939, 'M', u'𢆟'), - (0x2F93A, 'M', u'瘐'), - (0x2F93B, 'M', u'𤾡'), - (0x2F93C, 'M', u'𤾸'), - (0x2F93D, 'M', u'𥁄'), - (0x2F93E, 'M', u'㿼'), - (0x2F93F, 'M', u'䀈'), - (0x2F940, 'M', u'直'), - (0x2F941, 'M', u'𥃳'), - (0x2F942, 'M', u'𥃲'), - (0x2F943, 'M', u'𥄙'), - (0x2F944, 'M', u'𥄳'), - (0x2F945, 'M', u'眞'), - (0x2F946, 'M', u'真'), - (0x2F948, 'M', u'睊'), - (0x2F949, 'M', u'䀹'), - (0x2F94A, 'M', u'瞋'), - (0x2F94B, 'M', u'䁆'), - (0x2F94C, 'M', u'䂖'), - (0x2F94D, 'M', u'𥐝'), - (0x2F94E, 'M', u'硎'), - (0x2F94F, 'M', u'碌'), - (0x2F950, 'M', u'磌'), - (0x2F951, 'M', u'䃣'), - (0x2F952, 'M', u'𥘦'), - (0x2F953, 'M', u'祖'), - (0x2F954, 'M', u'𥚚'), - (0x2F955, 'M', u'𥛅'), - ] - -def _seg_76(): - return [ - (0x2F956, 'M', u'福'), - (0x2F957, 'M', u'秫'), - (0x2F958, 'M', u'䄯'), - (0x2F959, 'M', u'穀'), - (0x2F95A, 'M', u'穊'), - (0x2F95B, 'M', u'穏'), - (0x2F95C, 'M', u'𥥼'), - (0x2F95D, 'M', u'𥪧'), + (0x2F920, 'M', '爨'), + (0x2F921, 'M', '爵'), + (0x2F922, 'M', '牐'), + (0x2F923, 'M', '𤘈'), + (0x2F924, 'M', '犀'), + (0x2F925, 'M', '犕'), + (0x2F926, 'M', '𤜵'), + (0x2F927, 'M', '𤠔'), + (0x2F928, 'M', '獺'), + (0x2F929, 'M', '王'), + (0x2F92A, 'M', '㺬'), + (0x2F92B, 'M', '玥'), + (0x2F92C, 'M', '㺸'), + (0x2F92E, 'M', '瑇'), + (0x2F92F, 'M', '瑜'), + (0x2F930, 'M', '瑱'), + (0x2F931, 'M', '璅'), + (0x2F932, 'M', '瓊'), + (0x2F933, 'M', '㼛'), + (0x2F934, 'M', '甤'), + (0x2F935, 'M', '𤰶'), + (0x2F936, 'M', '甾'), + (0x2F937, 'M', '𤲒'), + (0x2F938, 'M', '異'), + (0x2F939, 'M', '𢆟'), + (0x2F93A, 'M', '瘐'), + (0x2F93B, 'M', '𤾡'), + (0x2F93C, 'M', '𤾸'), + (0x2F93D, 'M', '𥁄'), + (0x2F93E, 'M', '㿼'), + (0x2F93F, 'M', '䀈'), + (0x2F940, 'M', '直'), + (0x2F941, 'M', '𥃳'), + (0x2F942, 'M', '𥃲'), + (0x2F943, 'M', '𥄙'), + (0x2F944, 'M', '𥄳'), + (0x2F945, 'M', '眞'), + (0x2F946, 'M', '真'), + (0x2F948, 'M', '睊'), + (0x2F949, 'M', '䀹'), + (0x2F94A, 'M', '瞋'), + (0x2F94B, 'M', '䁆'), + (0x2F94C, 'M', '䂖'), + (0x2F94D, 'M', '𥐝'), + (0x2F94E, 'M', '硎'), + (0x2F94F, 'M', '碌'), + (0x2F950, 'M', '磌'), + (0x2F951, 'M', '䃣'), + (0x2F952, 'M', '𥘦'), + (0x2F953, 'M', '祖'), + (0x2F954, 'M', '𥚚'), + (0x2F955, 'M', '𥛅'), + (0x2F956, 'M', '福'), + (0x2F957, 'M', '秫'), + (0x2F958, 'M', '䄯'), + (0x2F959, 'M', '穀'), + (0x2F95A, 'M', '穊'), + (0x2F95B, 'M', '穏'), + (0x2F95C, 'M', '𥥼'), + (0x2F95D, 'M', '𥪧'), (0x2F95F, 'X'), - (0x2F960, 'M', u'䈂'), - (0x2F961, 'M', u'𥮫'), - (0x2F962, 'M', u'篆'), - (0x2F963, 'M', u'築'), - (0x2F964, 'M', u'䈧'), - (0x2F965, 'M', u'𥲀'), - (0x2F966, 'M', u'糒'), - (0x2F967, 'M', u'䊠'), - (0x2F968, 'M', u'糨'), - (0x2F969, 'M', u'糣'), - (0x2F96A, 'M', u'紀'), - (0x2F96B, 'M', u'𥾆'), - (0x2F96C, 'M', u'絣'), - (0x2F96D, 'M', u'䌁'), - (0x2F96E, 'M', u'緇'), - (0x2F96F, 'M', u'縂'), - (0x2F970, 'M', u'繅'), - (0x2F971, 'M', u'䌴'), - (0x2F972, 'M', u'𦈨'), - (0x2F973, 'M', u'𦉇'), - (0x2F974, 'M', u'䍙'), - (0x2F975, 'M', u'𦋙'), - (0x2F976, 'M', u'罺'), - (0x2F977, 'M', u'𦌾'), - (0x2F978, 'M', u'羕'), - (0x2F979, 'M', u'翺'), - (0x2F97A, 'M', u'者'), - (0x2F97B, 'M', u'𦓚'), - (0x2F97C, 'M', u'𦔣'), - (0x2F97D, 'M', u'聠'), - (0x2F97E, 'M', u'𦖨'), - (0x2F97F, 'M', u'聰'), - (0x2F980, 'M', u'𣍟'), - (0x2F981, 'M', u'䏕'), - (0x2F982, 'M', u'育'), - (0x2F983, 'M', u'脃'), - (0x2F984, 'M', u'䐋'), - (0x2F985, 'M', u'脾'), - (0x2F986, 'M', u'媵'), - (0x2F987, 'M', u'𦞧'), - (0x2F988, 'M', u'𦞵'), - (0x2F989, 'M', u'𣎓'), - (0x2F98A, 'M', u'𣎜'), - (0x2F98B, 'M', u'舁'), - (0x2F98C, 'M', u'舄'), - (0x2F98D, 'M', u'辞'), - (0x2F98E, 'M', u'䑫'), - (0x2F98F, 'M', u'芑'), - (0x2F990, 'M', u'芋'), - (0x2F991, 'M', u'芝'), - (0x2F992, 'M', u'劳'), - (0x2F993, 'M', u'花'), - (0x2F994, 'M', u'芳'), - (0x2F995, 'M', u'芽'), - (0x2F996, 'M', u'苦'), - (0x2F997, 'M', u'𦬼'), - (0x2F998, 'M', u'若'), - (0x2F999, 'M', u'茝'), - (0x2F99A, 'M', u'荣'), - (0x2F99B, 'M', u'莭'), - (0x2F99C, 'M', u'茣'), - (0x2F99D, 'M', u'莽'), - (0x2F99E, 'M', u'菧'), - (0x2F99F, 'M', u'著'), - (0x2F9A0, 'M', u'荓'), - (0x2F9A1, 'M', u'菊'), - (0x2F9A2, 'M', u'菌'), - (0x2F9A3, 'M', u'菜'), - (0x2F9A4, 'M', u'𦰶'), - (0x2F9A5, 'M', u'𦵫'), - (0x2F9A6, 'M', u'𦳕'), - (0x2F9A7, 'M', u'䔫'), - (0x2F9A8, 'M', u'蓱'), - (0x2F9A9, 'M', u'蓳'), - (0x2F9AA, 'M', u'蔖'), - (0x2F9AB, 'M', u'𧏊'), - (0x2F9AC, 'M', u'蕤'), - (0x2F9AD, 'M', u'𦼬'), - (0x2F9AE, 'M', u'䕝'), - (0x2F9AF, 'M', u'䕡'), - (0x2F9B0, 'M', u'𦾱'), - (0x2F9B1, 'M', u'𧃒'), - (0x2F9B2, 'M', u'䕫'), - (0x2F9B3, 'M', u'虐'), - (0x2F9B4, 'M', u'虜'), - (0x2F9B5, 'M', u'虧'), - (0x2F9B6, 'M', u'虩'), - (0x2F9B7, 'M', u'蚩'), - (0x2F9B8, 'M', u'蚈'), - (0x2F9B9, 'M', u'蜎'), - (0x2F9BA, 'M', u'蛢'), + (0x2F960, 'M', '䈂'), + (0x2F961, 'M', '𥮫'), + (0x2F962, 'M', '篆'), + (0x2F963, 'M', '築'), + (0x2F964, 'M', '䈧'), + (0x2F965, 'M', '𥲀'), + (0x2F966, 'M', '糒'), + (0x2F967, 'M', '䊠'), + (0x2F968, 'M', '糨'), + (0x2F969, 'M', '糣'), + (0x2F96A, 'M', '紀'), + (0x2F96B, 'M', '𥾆'), + (0x2F96C, 'M', '絣'), + (0x2F96D, 'M', '䌁'), + (0x2F96E, 'M', '緇'), + (0x2F96F, 'M', '縂'), + (0x2F970, 'M', '繅'), + (0x2F971, 'M', '䌴'), ] -def _seg_77(): +def _seg_80() -> List[Union[Tuple[int, str], Tuple[int, str, str]]]: return [ - (0x2F9BB, 'M', u'蝹'), - (0x2F9BC, 'M', u'蜨'), - (0x2F9BD, 'M', u'蝫'), - (0x2F9BE, 'M', u'螆'), + (0x2F972, 'M', '𦈨'), + (0x2F973, 'M', '𦉇'), + (0x2F974, 'M', '䍙'), + (0x2F975, 'M', '𦋙'), + (0x2F976, 'M', '罺'), + (0x2F977, 'M', '𦌾'), + (0x2F978, 'M', '羕'), + (0x2F979, 'M', '翺'), + (0x2F97A, 'M', '者'), + (0x2F97B, 'M', '𦓚'), + (0x2F97C, 'M', '𦔣'), + (0x2F97D, 'M', '聠'), + (0x2F97E, 'M', '𦖨'), + (0x2F97F, 'M', '聰'), + (0x2F980, 'M', '𣍟'), + (0x2F981, 'M', '䏕'), + (0x2F982, 'M', '育'), + (0x2F983, 'M', '脃'), + (0x2F984, 'M', '䐋'), + (0x2F985, 'M', '脾'), + (0x2F986, 'M', '媵'), + (0x2F987, 'M', '𦞧'), + (0x2F988, 'M', '𦞵'), + (0x2F989, 'M', '𣎓'), + (0x2F98A, 'M', '𣎜'), + (0x2F98B, 'M', '舁'), + (0x2F98C, 'M', '舄'), + (0x2F98D, 'M', '辞'), + (0x2F98E, 'M', '䑫'), + (0x2F98F, 'M', '芑'), + (0x2F990, 'M', '芋'), + (0x2F991, 'M', '芝'), + (0x2F992, 'M', '劳'), + (0x2F993, 'M', '花'), + (0x2F994, 'M', '芳'), + (0x2F995, 'M', '芽'), + (0x2F996, 'M', '苦'), + (0x2F997, 'M', '𦬼'), + (0x2F998, 'M', '若'), + (0x2F999, 'M', '茝'), + (0x2F99A, 'M', '荣'), + (0x2F99B, 'M', '莭'), + (0x2F99C, 'M', '茣'), + (0x2F99D, 'M', '莽'), + (0x2F99E, 'M', '菧'), + (0x2F99F, 'M', '著'), + (0x2F9A0, 'M', '荓'), + (0x2F9A1, 'M', '菊'), + (0x2F9A2, 'M', '菌'), + (0x2F9A3, 'M', '菜'), + (0x2F9A4, 'M', '𦰶'), + (0x2F9A5, 'M', '𦵫'), + (0x2F9A6, 'M', '𦳕'), + (0x2F9A7, 'M', '䔫'), + (0x2F9A8, 'M', '蓱'), + (0x2F9A9, 'M', '蓳'), + (0x2F9AA, 'M', '蔖'), + (0x2F9AB, 'M', '𧏊'), + (0x2F9AC, 'M', '蕤'), + (0x2F9AD, 'M', '𦼬'), + (0x2F9AE, 'M', '䕝'), + (0x2F9AF, 'M', '䕡'), + (0x2F9B0, 'M', '𦾱'), + (0x2F9B1, 'M', '𧃒'), + (0x2F9B2, 'M', '䕫'), + (0x2F9B3, 'M', '虐'), + (0x2F9B4, 'M', '虜'), + (0x2F9B5, 'M', '虧'), + (0x2F9B6, 'M', '虩'), + (0x2F9B7, 'M', '蚩'), + (0x2F9B8, 'M', '蚈'), + (0x2F9B9, 'M', '蜎'), + (0x2F9BA, 'M', '蛢'), + (0x2F9BB, 'M', '蝹'), + (0x2F9BC, 'M', '蜨'), + (0x2F9BD, 'M', '蝫'), + (0x2F9BE, 'M', '螆'), (0x2F9BF, 'X'), - (0x2F9C0, 'M', u'蟡'), - (0x2F9C1, 'M', u'蠁'), - (0x2F9C2, 'M', u'䗹'), - (0x2F9C3, 'M', u'衠'), - (0x2F9C4, 'M', u'衣'), - (0x2F9C5, 'M', u'𧙧'), - (0x2F9C6, 'M', u'裗'), - (0x2F9C7, 'M', u'裞'), - (0x2F9C8, 'M', u'䘵'), - (0x2F9C9, 'M', u'裺'), - (0x2F9CA, 'M', u'㒻'), - (0x2F9CB, 'M', u'𧢮'), - (0x2F9CC, 'M', u'𧥦'), - (0x2F9CD, 'M', u'䚾'), - (0x2F9CE, 'M', u'䛇'), - (0x2F9CF, 'M', u'誠'), - (0x2F9D0, 'M', u'諭'), - (0x2F9D1, 'M', u'變'), - (0x2F9D2, 'M', u'豕'), - (0x2F9D3, 'M', u'𧲨'), - (0x2F9D4, 'M', u'貫'), - (0x2F9D5, 'M', u'賁'), - (0x2F9D6, 'M', u'贛'), - (0x2F9D7, 'M', u'起'), - (0x2F9D8, 'M', u'𧼯'), - (0x2F9D9, 'M', u'𠠄'), - (0x2F9DA, 'M', u'跋'), - (0x2F9DB, 'M', u'趼'), - (0x2F9DC, 'M', u'跰'), - (0x2F9DD, 'M', u'𠣞'), - (0x2F9DE, 'M', u'軔'), - (0x2F9DF, 'M', u'輸'), - (0x2F9E0, 'M', u'𨗒'), - (0x2F9E1, 'M', u'𨗭'), - (0x2F9E2, 'M', u'邔'), - (0x2F9E3, 'M', u'郱'), - (0x2F9E4, 'M', u'鄑'), - (0x2F9E5, 'M', u'𨜮'), - (0x2F9E6, 'M', u'鄛'), - (0x2F9E7, 'M', u'鈸'), - (0x2F9E8, 'M', u'鋗'), - (0x2F9E9, 'M', u'鋘'), - (0x2F9EA, 'M', u'鉼'), - (0x2F9EB, 'M', u'鏹'), - (0x2F9EC, 'M', u'鐕'), - (0x2F9ED, 'M', u'𨯺'), - (0x2F9EE, 'M', u'開'), - (0x2F9EF, 'M', u'䦕'), - (0x2F9F0, 'M', u'閷'), - (0x2F9F1, 'M', u'𨵷'), - (0x2F9F2, 'M', u'䧦'), - (0x2F9F3, 'M', u'雃'), - (0x2F9F4, 'M', u'嶲'), - (0x2F9F5, 'M', u'霣'), - (0x2F9F6, 'M', u'𩅅'), - (0x2F9F7, 'M', u'𩈚'), - (0x2F9F8, 'M', u'䩮'), - (0x2F9F9, 'M', u'䩶'), - (0x2F9FA, 'M', u'韠'), - (0x2F9FB, 'M', u'𩐊'), - (0x2F9FC, 'M', u'䪲'), - (0x2F9FD, 'M', u'𩒖'), - (0x2F9FE, 'M', u'頋'), - (0x2FA00, 'M', u'頩'), - (0x2FA01, 'M', u'𩖶'), - (0x2FA02, 'M', u'飢'), - (0x2FA03, 'M', u'䬳'), - (0x2FA04, 'M', u'餩'), - (0x2FA05, 'M', u'馧'), - (0x2FA06, 'M', u'駂'), - (0x2FA07, 'M', u'駾'), - (0x2FA08, 'M', u'䯎'), - (0x2FA09, 'M', u'𩬰'), - (0x2FA0A, 'M', u'鬒'), - (0x2FA0B, 'M', u'鱀'), - (0x2FA0C, 'M', u'鳽'), - (0x2FA0D, 'M', u'䳎'), - (0x2FA0E, 'M', u'䳭'), - (0x2FA0F, 'M', u'鵧'), - (0x2FA10, 'M', u'𪃎'), - (0x2FA11, 'M', u'䳸'), - (0x2FA12, 'M', u'𪄅'), - (0x2FA13, 'M', u'𪈎'), - (0x2FA14, 'M', u'𪊑'), - (0x2FA15, 'M', u'麻'), - (0x2FA16, 'M', u'䵖'), - (0x2FA17, 'M', u'黹'), - (0x2FA18, 'M', u'黾'), - (0x2FA19, 'M', u'鼅'), - (0x2FA1A, 'M', u'鼏'), - (0x2FA1B, 'M', u'鼖'), - (0x2FA1C, 'M', u'鼻'), - (0x2FA1D, 'M', u'𪘀'), - (0x2FA1E, 'X'), - (0xE0100, 'I'), + (0x2F9C0, 'M', '蟡'), + (0x2F9C1, 'M', '蠁'), + (0x2F9C2, 'M', '䗹'), + (0x2F9C3, 'M', '衠'), + (0x2F9C4, 'M', '衣'), + (0x2F9C5, 'M', '𧙧'), + (0x2F9C6, 'M', '裗'), + (0x2F9C7, 'M', '裞'), + (0x2F9C8, 'M', '䘵'), + (0x2F9C9, 'M', '裺'), + (0x2F9CA, 'M', '㒻'), + (0x2F9CB, 'M', '𧢮'), + (0x2F9CC, 'M', '𧥦'), + (0x2F9CD, 'M', '䚾'), + (0x2F9CE, 'M', '䛇'), + (0x2F9CF, 'M', '誠'), + (0x2F9D0, 'M', '諭'), + (0x2F9D1, 'M', '變'), + (0x2F9D2, 'M', '豕'), + (0x2F9D3, 'M', '𧲨'), + (0x2F9D4, 'M', '貫'), + (0x2F9D5, 'M', '賁'), ] -def _seg_78(): +def _seg_81() -> List[Union[Tuple[int, str], Tuple[int, str, str]]]: return [ + (0x2F9D6, 'M', '贛'), + (0x2F9D7, 'M', '起'), + (0x2F9D8, 'M', '𧼯'), + (0x2F9D9, 'M', '𠠄'), + (0x2F9DA, 'M', '跋'), + (0x2F9DB, 'M', '趼'), + (0x2F9DC, 'M', '跰'), + (0x2F9DD, 'M', '𠣞'), + (0x2F9DE, 'M', '軔'), + (0x2F9DF, 'M', '輸'), + (0x2F9E0, 'M', '𨗒'), + (0x2F9E1, 'M', '𨗭'), + (0x2F9E2, 'M', '邔'), + (0x2F9E3, 'M', '郱'), + (0x2F9E4, 'M', '鄑'), + (0x2F9E5, 'M', '𨜮'), + (0x2F9E6, 'M', '鄛'), + (0x2F9E7, 'M', '鈸'), + (0x2F9E8, 'M', '鋗'), + (0x2F9E9, 'M', '鋘'), + (0x2F9EA, 'M', '鉼'), + (0x2F9EB, 'M', '鏹'), + (0x2F9EC, 'M', '鐕'), + (0x2F9ED, 'M', '𨯺'), + (0x2F9EE, 'M', '開'), + (0x2F9EF, 'M', '䦕'), + (0x2F9F0, 'M', '閷'), + (0x2F9F1, 'M', '𨵷'), + (0x2F9F2, 'M', '䧦'), + (0x2F9F3, 'M', '雃'), + (0x2F9F4, 'M', '嶲'), + (0x2F9F5, 'M', '霣'), + (0x2F9F6, 'M', '𩅅'), + (0x2F9F7, 'M', '𩈚'), + (0x2F9F8, 'M', '䩮'), + (0x2F9F9, 'M', '䩶'), + (0x2F9FA, 'M', '韠'), + (0x2F9FB, 'M', '𩐊'), + (0x2F9FC, 'M', '䪲'), + (0x2F9FD, 'M', '𩒖'), + (0x2F9FE, 'M', '頋'), + (0x2FA00, 'M', '頩'), + (0x2FA01, 'M', '𩖶'), + (0x2FA02, 'M', '飢'), + (0x2FA03, 'M', '䬳'), + (0x2FA04, 'M', '餩'), + (0x2FA05, 'M', '馧'), + (0x2FA06, 'M', '駂'), + (0x2FA07, 'M', '駾'), + (0x2FA08, 'M', '䯎'), + (0x2FA09, 'M', '𩬰'), + (0x2FA0A, 'M', '鬒'), + (0x2FA0B, 'M', '鱀'), + (0x2FA0C, 'M', '鳽'), + (0x2FA0D, 'M', '䳎'), + (0x2FA0E, 'M', '䳭'), + (0x2FA0F, 'M', '鵧'), + (0x2FA10, 'M', '𪃎'), + (0x2FA11, 'M', '䳸'), + (0x2FA12, 'M', '𪄅'), + (0x2FA13, 'M', '𪈎'), + (0x2FA14, 'M', '𪊑'), + (0x2FA15, 'M', '麻'), + (0x2FA16, 'M', '䵖'), + (0x2FA17, 'M', '黹'), + (0x2FA18, 'M', '黾'), + (0x2FA19, 'M', '鼅'), + (0x2FA1A, 'M', '鼏'), + (0x2FA1B, 'M', '鼖'), + (0x2FA1C, 'M', '鼻'), + (0x2FA1D, 'M', '𪘀'), + (0x2FA1E, 'X'), + (0x30000, 'V'), + (0x3134B, 'X'), + (0x31350, 'V'), + (0x323B0, 'X'), + (0xE0100, 'I'), (0xE01F0, 'X'), ] @@ -8202,4 +8594,7 @@ def _seg_78(): + _seg_76() + _seg_77() + _seg_78() -) + + _seg_79() + + _seg_80() + + _seg_81() +) # type: Tuple[Union[Tuple[int, str], Tuple[int, str, str]], ...] diff --git a/Splunk_TA_paloalto/bin/splunk_ta_paloalto/aob_py3/importlib_metadata-4.11.3.dist-info/INSTALLER b/Splunk_TA_paloalto/bin/splunk_ta_paloalto/aob_py3/importlib_metadata-4.11.3.dist-info/INSTALLER new file mode 100644 index 00000000..a1b589e3 --- /dev/null +++ b/Splunk_TA_paloalto/bin/splunk_ta_paloalto/aob_py3/importlib_metadata-4.11.3.dist-info/INSTALLER @@ -0,0 +1 @@ +pip diff --git a/Splunk_TA_paloalto/bin/splunk_ta_paloalto/aob_py3/importlib_metadata-4.11.3.dist-info/LICENSE b/Splunk_TA_paloalto/bin/splunk_ta_paloalto/aob_py3/importlib_metadata-4.11.3.dist-info/LICENSE new file mode 100644 index 00000000..be7e092b --- /dev/null +++ b/Splunk_TA_paloalto/bin/splunk_ta_paloalto/aob_py3/importlib_metadata-4.11.3.dist-info/LICENSE @@ -0,0 +1,13 @@ +Copyright 2017-2019 Jason R. Coombs, Barry Warsaw + +Licensed under the Apache License, Version 2.0 (the "License"); +you may not use this file except in compliance with the License. +You may obtain a copy of the License at + +http://www.apache.org/licenses/LICENSE-2.0 + +Unless required by applicable law or agreed to in writing, software +distributed under the License is distributed on an "AS IS" BASIS, +WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. +See the License for the specific language governing permissions and +limitations under the License. diff --git a/Splunk_TA_paloalto/bin/splunk_ta_paloalto/aob_py3/importlib_metadata-4.11.3.dist-info/METADATA b/Splunk_TA_paloalto/bin/splunk_ta_paloalto/aob_py3/importlib_metadata-4.11.3.dist-info/METADATA new file mode 100644 index 00000000..d4c34743 --- /dev/null +++ b/Splunk_TA_paloalto/bin/splunk_ta_paloalto/aob_py3/importlib_metadata-4.11.3.dist-info/METADATA @@ -0,0 +1,118 @@ +Metadata-Version: 2.1 +Name: importlib-metadata +Version: 4.11.3 +Summary: Read metadata from Python packages +Home-page: https://github.com/python/importlib_metadata +Author: Jason R. Coombs +Author-email: jaraco@jaraco.com +License: UNKNOWN +Platform: UNKNOWN +Classifier: Development Status :: 5 - Production/Stable +Classifier: Intended Audience :: Developers +Classifier: License :: OSI Approved :: Apache Software License +Classifier: Programming Language :: Python :: 3 +Classifier: Programming Language :: Python :: 3 :: Only +Requires-Python: >=3.7 +License-File: LICENSE +Requires-Dist: zipp (>=0.5) +Requires-Dist: typing-extensions (>=3.6.4) ; python_version < "3.8" +Provides-Extra: docs +Requires-Dist: sphinx ; extra == 'docs' +Requires-Dist: jaraco.packaging (>=9) ; extra == 'docs' +Requires-Dist: rst.linker (>=1.9) ; extra == 'docs' +Provides-Extra: perf +Requires-Dist: ipython ; extra == 'perf' +Provides-Extra: testing +Requires-Dist: pytest (>=6) ; extra == 'testing' +Requires-Dist: pytest-checkdocs (>=2.4) ; extra == 'testing' +Requires-Dist: pytest-flake8 ; extra == 'testing' +Requires-Dist: pytest-cov ; extra == 'testing' +Requires-Dist: pytest-enabler (>=1.0.1) ; extra == 'testing' +Requires-Dist: packaging ; extra == 'testing' +Requires-Dist: pyfakefs ; extra == 'testing' +Requires-Dist: flufl.flake8 ; extra == 'testing' +Requires-Dist: pytest-perf (>=0.9.2) ; extra == 'testing' +Requires-Dist: pytest-black (>=0.3.7) ; (platform_python_implementation != "PyPy") and extra == 'testing' +Requires-Dist: pytest-mypy (>=0.9.1) ; (platform_python_implementation != "PyPy") and extra == 'testing' +Requires-Dist: importlib-resources (>=1.3) ; (python_version < "3.9") and extra == 'testing' + +.. image:: https://img.shields.io/pypi/v/importlib_metadata.svg + :target: `PyPI link`_ + +.. image:: https://img.shields.io/pypi/pyversions/importlib_metadata.svg + :target: `PyPI link`_ + +.. _PyPI link: https://pypi.org/project/importlib_metadata + +.. image:: https://github.com/python/importlib_metadata/workflows/tests/badge.svg + :target: https://github.com/python/importlib_metadata/actions?query=workflow%3A%22tests%22 + :alt: tests + +.. image:: https://img.shields.io/badge/code%20style-black-000000.svg + :target: https://github.com/psf/black + :alt: Code style: Black + +.. image:: https://readthedocs.org/projects/importlib-metadata/badge/?version=latest + :target: https://importlib-metadata.readthedocs.io/en/latest/?badge=latest + +.. image:: https://img.shields.io/badge/skeleton-2022-informational + :target: https://blog.jaraco.com/skeleton + + +Library to access the metadata for a Python package. + +This package supplies third-party access to the functionality of +`importlib.metadata `_ +including improvements added to subsequent Python versions. + + +Compatibility +============= + +New features are introduced in this third-party library and later merged +into CPython. The following table indicates which versions of this library +were contributed to different versions in the standard library: + +.. list-table:: + :header-rows: 1 + + * - importlib_metadata + - stdlib + * - 4.8 + - 3.11 + * - 4.4 + - 3.10 + * - 1.4 + - 3.8 + + +Usage +===== + +See the `online documentation `_ +for usage details. + +`Finder authors +`_ can +also add support for custom package installers. See the above documentation +for details. + + +Caveats +======= + +This project primarily supports third-party packages installed by PyPA +tools (or other conforming packages). It does not support: + +- Packages in the stdlib. +- Packages installed without metadata. + +Project details +=============== + + * Project home: https://github.com/python/importlib_metadata + * Report bugs at: https://github.com/python/importlib_metadata/issues + * Code hosting: https://github.com/python/importlib_metadata + * Documentation: https://importlib_metadata.readthedocs.io/ + + diff --git a/Splunk_TA_paloalto/bin/splunk_ta_paloalto/aob_py3/importlib_metadata-4.11.3.dist-info/RECORD b/Splunk_TA_paloalto/bin/splunk_ta_paloalto/aob_py3/importlib_metadata-4.11.3.dist-info/RECORD new file mode 100644 index 00000000..f32cdab7 --- /dev/null +++ b/Splunk_TA_paloalto/bin/splunk_ta_paloalto/aob_py3/importlib_metadata-4.11.3.dist-info/RECORD @@ -0,0 +1,15 @@ +importlib_metadata-4.11.3.dist-info/INSTALLER,sha256=zuuue4knoyJ-UwPPXg8fezS7VCrXJQrAP7zeNuwvFQg,4 +importlib_metadata-4.11.3.dist-info/LICENSE,sha256=wNe6dAchmJ1VvVB8D9oTc-gHHadCuaSBAev36sYEM6U,571 +importlib_metadata-4.11.3.dist-info/METADATA,sha256=QDN8bGG98uILiLVIoBDBL7qf1y40Vb_Pvp1fxPJtmS0,3997 +importlib_metadata-4.11.3.dist-info/RECORD,, +importlib_metadata-4.11.3.dist-info/WHEEL,sha256=G16H4A3IeoQmnOrYV4ueZGKSjhipXx8zc8nu9FGlvMA,92 +importlib_metadata-4.11.3.dist-info/top_level.txt,sha256=CO3fD9yylANiXkrMo4qHLV_mqXL2sC5JFKgt1yWAT-A,19 +importlib_metadata/__init__.py,sha256=7VjJ9nthPlNrL_TriHRiT5Ta-ZBsBnXrZq65pRfGNIs,30889 +importlib_metadata/_adapters.py,sha256=B6fCi5-8mLVDFUZj3krI5nAo-mKp1dH_qIavyIyFrJs,1862 +importlib_metadata/_collections.py,sha256=CJ0OTCHIjWA0ZIVS4voORAsn2R4R2cQBEtPsZEJpASY,743 +importlib_metadata/_compat.py,sha256=EU2XCFBPFByuI0Of6XkAuBYbzqSyjwwwwqmsK4ccna0,1826 +importlib_metadata/_functools.py,sha256=PsY2-4rrKX4RVeRC1oGp1lB1pmC9eKN88_f-bD9uOoA,2895 +importlib_metadata/_itertools.py,sha256=cvr_2v8BRbxcIl5x5ldfqdHjhI8Yi8s8yk50G_nm6jQ,2068 +importlib_metadata/_meta.py,sha256=_F48Hu_jFxkfKWz5wcYS8vO23qEygbVdF9r-6qh-hjE,1154 +importlib_metadata/_text.py,sha256=HCsFksZpJLeTP3NEk_ngrAeXVRRtTrtyh9eOABoRP4A,2166 +importlib_metadata/py.typed,sha256=47DEQpj8HBSa-_TImW-5JCeuQeRkm5NMpJWZG3hSuFU,0 diff --git a/Splunk_TA_paloalto/bin/splunk_ta_paloalto/aob_py3/importlib_metadata-4.11.3.dist-info/WHEEL b/Splunk_TA_paloalto/bin/splunk_ta_paloalto/aob_py3/importlib_metadata-4.11.3.dist-info/WHEEL new file mode 100644 index 00000000..becc9a66 --- /dev/null +++ b/Splunk_TA_paloalto/bin/splunk_ta_paloalto/aob_py3/importlib_metadata-4.11.3.dist-info/WHEEL @@ -0,0 +1,5 @@ +Wheel-Version: 1.0 +Generator: bdist_wheel (0.37.1) +Root-Is-Purelib: true +Tag: py3-none-any + diff --git a/Splunk_TA_paloalto/bin/splunk_ta_paloalto/aob_py3/importlib_metadata-4.11.3.dist-info/top_level.txt b/Splunk_TA_paloalto/bin/splunk_ta_paloalto/aob_py3/importlib_metadata-4.11.3.dist-info/top_level.txt new file mode 100644 index 00000000..bbb07547 --- /dev/null +++ b/Splunk_TA_paloalto/bin/splunk_ta_paloalto/aob_py3/importlib_metadata-4.11.3.dist-info/top_level.txt @@ -0,0 +1 @@ +importlib_metadata diff --git a/Splunk_TA_paloalto/bin/splunk_ta_paloalto/aob_py3/importlib_metadata-5.2.0.dist-info/INSTALLER b/Splunk_TA_paloalto/bin/splunk_ta_paloalto/aob_py3/importlib_metadata-5.2.0.dist-info/INSTALLER new file mode 100644 index 00000000..a1b589e3 --- /dev/null +++ b/Splunk_TA_paloalto/bin/splunk_ta_paloalto/aob_py3/importlib_metadata-5.2.0.dist-info/INSTALLER @@ -0,0 +1 @@ +pip diff --git a/Splunk_TA_paloalto/bin/splunk_ta_paloalto/aob_py3/solnlib/packages/splunklib/LICENSE b/Splunk_TA_paloalto/bin/splunk_ta_paloalto/aob_py3/importlib_metadata-5.2.0.dist-info/LICENSE similarity index 100% rename from Splunk_TA_paloalto/bin/splunk_ta_paloalto/aob_py3/solnlib/packages/splunklib/LICENSE rename to Splunk_TA_paloalto/bin/splunk_ta_paloalto/aob_py3/importlib_metadata-5.2.0.dist-info/LICENSE diff --git a/Splunk_TA_paloalto/bin/splunk_ta_paloalto/aob_py3/importlib_metadata-5.2.0.dist-info/METADATA b/Splunk_TA_paloalto/bin/splunk_ta_paloalto/aob_py3/importlib_metadata-5.2.0.dist-info/METADATA new file mode 100644 index 00000000..125e26ae --- /dev/null +++ b/Splunk_TA_paloalto/bin/splunk_ta_paloalto/aob_py3/importlib_metadata-5.2.0.dist-info/METADATA @@ -0,0 +1,135 @@ +Metadata-Version: 2.1 +Name: importlib-metadata +Version: 5.2.0 +Summary: Read metadata from Python packages +Home-page: https://github.com/python/importlib_metadata +Author: Jason R. Coombs +Author-email: jaraco@jaraco.com +Classifier: Development Status :: 5 - Production/Stable +Classifier: Intended Audience :: Developers +Classifier: License :: OSI Approved :: Apache Software License +Classifier: Programming Language :: Python :: 3 +Classifier: Programming Language :: Python :: 3 :: Only +Requires-Python: >=3.7 +License-File: LICENSE +Requires-Dist: zipp (>=0.5) +Requires-Dist: typing-extensions (>=3.6.4) ; python_version < "3.8" +Provides-Extra: docs +Requires-Dist: sphinx (>=3.5) ; extra == 'docs' +Requires-Dist: jaraco.packaging (>=9) ; extra == 'docs' +Requires-Dist: rst.linker (>=1.9) ; extra == 'docs' +Requires-Dist: furo ; extra == 'docs' +Requires-Dist: sphinx-lint ; extra == 'docs' +Requires-Dist: jaraco.tidelift (>=1.4) ; extra == 'docs' +Provides-Extra: perf +Requires-Dist: ipython ; extra == 'perf' +Provides-Extra: testing +Requires-Dist: pytest (>=6) ; extra == 'testing' +Requires-Dist: pytest-checkdocs (>=2.4) ; extra == 'testing' +Requires-Dist: flake8 (<5) ; extra == 'testing' +Requires-Dist: pytest-cov ; extra == 'testing' +Requires-Dist: pytest-enabler (>=1.3) ; extra == 'testing' +Requires-Dist: packaging ; extra == 'testing' +Requires-Dist: pyfakefs ; extra == 'testing' +Requires-Dist: flufl.flake8 ; extra == 'testing' +Requires-Dist: pytest-perf (>=0.9.2) ; extra == 'testing' +Requires-Dist: pytest-black (>=0.3.7) ; (platform_python_implementation != "PyPy") and extra == 'testing' +Requires-Dist: pytest-mypy (>=0.9.1) ; (platform_python_implementation != "PyPy") and extra == 'testing' +Requires-Dist: pytest-flake8 ; (python_version < "3.12") and extra == 'testing' +Requires-Dist: importlib-resources (>=1.3) ; (python_version < "3.9") and extra == 'testing' + +.. image:: https://img.shields.io/pypi/v/importlib_metadata.svg + :target: https://pypi.org/project/importlib_metadata + +.. image:: https://img.shields.io/pypi/pyversions/importlib_metadata.svg + +.. image:: https://github.com/python/importlib_metadata/workflows/tests/badge.svg + :target: https://github.com/python/importlib_metadata/actions?query=workflow%3A%22tests%22 + :alt: tests + +.. image:: https://img.shields.io/badge/code%20style-black-000000.svg + :target: https://github.com/psf/black + :alt: Code style: Black + +.. image:: https://readthedocs.org/projects/importlib-metadata/badge/?version=latest + :target: https://importlib-metadata.readthedocs.io/en/latest/?badge=latest + +.. image:: https://img.shields.io/badge/skeleton-2022-informational + :target: https://blog.jaraco.com/skeleton + +.. image:: https://tidelift.com/badges/package/pypi/importlib-metadata + :target: https://tidelift.com/subscription/pkg/pypi-importlib-metadata?utm_source=pypi-importlib-metadata&utm_medium=readme + +Library to access the metadata for a Python package. + +This package supplies third-party access to the functionality of +`importlib.metadata `_ +including improvements added to subsequent Python versions. + + +Compatibility +============= + +New features are introduced in this third-party library and later merged +into CPython. The following table indicates which versions of this library +were contributed to different versions in the standard library: + +.. list-table:: + :header-rows: 1 + + * - importlib_metadata + - stdlib + * - 5.0 + - 3.12 + * - 4.13 + - 3.11 + * - 4.6 + - 3.10 + * - 1.4 + - 3.8 + + +Usage +===== + +See the `online documentation `_ +for usage details. + +`Finder authors +`_ can +also add support for custom package installers. See the above documentation +for details. + + +Caveats +======= + +This project primarily supports third-party packages installed by PyPA +tools (or other conforming packages). It does not support: + +- Packages in the stdlib. +- Packages installed without metadata. + +Project details +=============== + + * Project home: https://github.com/python/importlib_metadata + * Report bugs at: https://github.com/python/importlib_metadata/issues + * Code hosting: https://github.com/python/importlib_metadata + * Documentation: https://importlib_metadata.readthedocs.io/ + +For Enterprise +============== + +Available as part of the Tidelift Subscription. + +This project and the maintainers of thousands of other packages are working with Tidelift to deliver one enterprise subscription that covers all of the open source you use. + +`Learn more `_. + +Security Contact +================ + +To report a security vulnerability, please use the +`Tidelift security contact `_. +Tidelift will coordinate the fix and disclosure. diff --git a/Splunk_TA_paloalto/bin/splunk_ta_paloalto/aob_py3/importlib_metadata-5.2.0.dist-info/RECORD b/Splunk_TA_paloalto/bin/splunk_ta_paloalto/aob_py3/importlib_metadata-5.2.0.dist-info/RECORD new file mode 100644 index 00000000..90f256fe --- /dev/null +++ b/Splunk_TA_paloalto/bin/splunk_ta_paloalto/aob_py3/importlib_metadata-5.2.0.dist-info/RECORD @@ -0,0 +1,16 @@ +importlib_metadata-5.2.0.dist-info/INSTALLER,sha256=zuuue4knoyJ-UwPPXg8fezS7VCrXJQrAP7zeNuwvFQg,4 +importlib_metadata-5.2.0.dist-info/LICENSE,sha256=z8d0m5b2O9McPEK1xHG_dWgUBT6EfBDz6wA0F7xSPTA,11358 +importlib_metadata-5.2.0.dist-info/METADATA,sha256=1LZWIE1zQ08SyxMfRZ8KqvwAybh6yoaeg24Xy-naMHU,4958 +importlib_metadata-5.2.0.dist-info/RECORD,, +importlib_metadata-5.2.0.dist-info/WHEEL,sha256=2wepM1nk4DS4eFpYrW1TTqPcoGNfHhhO_i5m4cOimbo,92 +importlib_metadata-5.2.0.dist-info/top_level.txt,sha256=CO3fD9yylANiXkrMo4qHLV_mqXL2sC5JFKgt1yWAT-A,19 +importlib_metadata/__init__.py,sha256=3J7V1nbWzGh1U_RUGcPToL0na6iaNTZmUmm4mjhk_GM,26467 +importlib_metadata/_adapters.py,sha256=i8S6Ib1OQjcILA-l4gkzktMZe18TaeUNI49PLRp6OBU,2454 +importlib_metadata/_collections.py,sha256=CJ0OTCHIjWA0ZIVS4voORAsn2R4R2cQBEtPsZEJpASY,743 +importlib_metadata/_compat.py,sha256=9zOKf0eDgkCMnnaEhU5kQVxHd1P8BIYV7Stso7av5h8,1857 +importlib_metadata/_functools.py,sha256=PsY2-4rrKX4RVeRC1oGp1lB1pmC9eKN88_f-bD9uOoA,2895 +importlib_metadata/_itertools.py,sha256=cvr_2v8BRbxcIl5x5ldfqdHjhI8Yi8s8yk50G_nm6jQ,2068 +importlib_metadata/_meta.py,sha256=v5e1ZDG7yZTH3h7TjbS5bM5p8AGzMPVOu8skDMv4h6k,1165 +importlib_metadata/_py39compat.py,sha256=2Tk5twb_VgLCY-1NEAQjdZp_S9OFMC-pUzP2isuaPsQ,1098 +importlib_metadata/_text.py,sha256=HCsFksZpJLeTP3NEk_ngrAeXVRRtTrtyh9eOABoRP4A,2166 +importlib_metadata/py.typed,sha256=47DEQpj8HBSa-_TImW-5JCeuQeRkm5NMpJWZG3hSuFU,0 diff --git a/Splunk_TA_paloalto/bin/splunk_ta_paloalto/aob_py3/importlib_metadata-5.2.0.dist-info/WHEEL b/Splunk_TA_paloalto/bin/splunk_ta_paloalto/aob_py3/importlib_metadata-5.2.0.dist-info/WHEEL new file mode 100644 index 00000000..57e3d840 --- /dev/null +++ b/Splunk_TA_paloalto/bin/splunk_ta_paloalto/aob_py3/importlib_metadata-5.2.0.dist-info/WHEEL @@ -0,0 +1,5 @@ +Wheel-Version: 1.0 +Generator: bdist_wheel (0.38.4) +Root-Is-Purelib: true +Tag: py3-none-any + diff --git a/Splunk_TA_paloalto/bin/splunk_ta_paloalto/aob_py3/importlib_metadata-5.2.0.dist-info/top_level.txt b/Splunk_TA_paloalto/bin/splunk_ta_paloalto/aob_py3/importlib_metadata-5.2.0.dist-info/top_level.txt new file mode 100644 index 00000000..bbb07547 --- /dev/null +++ b/Splunk_TA_paloalto/bin/splunk_ta_paloalto/aob_py3/importlib_metadata-5.2.0.dist-info/top_level.txt @@ -0,0 +1 @@ +importlib_metadata diff --git a/Splunk_TA_paloalto/bin/splunk_ta_paloalto/aob_py3/importlib_metadata/__init__.py b/Splunk_TA_paloalto/bin/splunk_ta_paloalto/aob_py3/importlib_metadata/__init__.py new file mode 100644 index 00000000..26a1388c --- /dev/null +++ b/Splunk_TA_paloalto/bin/splunk_ta_paloalto/aob_py3/importlib_metadata/__init__.py @@ -0,0 +1,904 @@ +import os +import re +import abc +import csv +import sys +import zipp +import email +import pathlib +import operator +import textwrap +import warnings +import functools +import itertools +import posixpath +import collections + +from . import _adapters, _meta, _py39compat +from ._collections import FreezableDefaultDict, Pair +from ._compat import ( + NullFinder, + install, + pypy_partial, +) +from ._functools import method_cache, pass_none +from ._itertools import always_iterable, unique_everseen +from ._meta import PackageMetadata, SimplePath + +from contextlib import suppress +from importlib import import_module +from importlib.abc import MetaPathFinder +from itertools import starmap +from typing import List, Mapping, Optional + + +__all__ = [ + 'Distribution', + 'DistributionFinder', + 'PackageMetadata', + 'PackageNotFoundError', + 'distribution', + 'distributions', + 'entry_points', + 'files', + 'metadata', + 'packages_distributions', + 'requires', + 'version', +] + + +class PackageNotFoundError(ModuleNotFoundError): + """The package was not found.""" + + def __str__(self): + return f"No package metadata was found for {self.name}" + + @property + def name(self): + (name,) = self.args + return name + + +class Sectioned: + """ + A simple entry point config parser for performance + + >>> for item in Sectioned.read(Sectioned._sample): + ... print(item) + Pair(name='sec1', value='# comments ignored') + Pair(name='sec1', value='a = 1') + Pair(name='sec1', value='b = 2') + Pair(name='sec2', value='a = 2') + + >>> res = Sectioned.section_pairs(Sectioned._sample) + >>> item = next(res) + >>> item.name + 'sec1' + >>> item.value + Pair(name='a', value='1') + >>> item = next(res) + >>> item.value + Pair(name='b', value='2') + >>> item = next(res) + >>> item.name + 'sec2' + >>> item.value + Pair(name='a', value='2') + >>> list(res) + [] + """ + + _sample = textwrap.dedent( + """ + [sec1] + # comments ignored + a = 1 + b = 2 + + [sec2] + a = 2 + """ + ).lstrip() + + @classmethod + def section_pairs(cls, text): + return ( + section._replace(value=Pair.parse(section.value)) + for section in cls.read(text, filter_=cls.valid) + if section.name is not None + ) + + @staticmethod + def read(text, filter_=None): + lines = filter(filter_, map(str.strip, text.splitlines())) + name = None + for value in lines: + section_match = value.startswith('[') and value.endswith(']') + if section_match: + name = value.strip('[]') + continue + yield Pair(name, value) + + @staticmethod + def valid(line): + return line and not line.startswith('#') + + +class DeprecatedTuple: + """ + Provide subscript item access for backward compatibility. + + >>> recwarn = getfixture('recwarn') + >>> ep = EntryPoint(name='name', value='value', group='group') + >>> ep[:] + ('name', 'value', 'group') + >>> ep[0] + 'name' + >>> len(recwarn) + 1 + """ + + # Do not remove prior to 2023-05-01 or Python 3.13 + _warn = functools.partial( + warnings.warn, + "EntryPoint tuple interface is deprecated. Access members by name.", + DeprecationWarning, + stacklevel=pypy_partial(2), + ) + + def __getitem__(self, item): + self._warn() + return self._key()[item] + + +class EntryPoint(DeprecatedTuple): + """An entry point as defined by Python packaging conventions. + + See `the packaging docs on entry points + `_ + for more information. + + >>> ep = EntryPoint( + ... name=None, group=None, value='package.module:attr [extra1, extra2]') + >>> ep.module + 'package.module' + >>> ep.attr + 'attr' + >>> ep.extras + ['extra1', 'extra2'] + """ + + pattern = re.compile( + r'(?P[\w.]+)\s*' + r'(:\s*(?P[\w.]+)\s*)?' + r'((?P\[.*\])\s*)?$' + ) + """ + A regular expression describing the syntax for an entry point, + which might look like: + + - module + - package.module + - package.module:attribute + - package.module:object.attribute + - package.module:attr [extra1, extra2] + + Other combinations are possible as well. + + The expression is lenient about whitespace around the ':', + following the attr, and following any extras. + """ + + name: str + value: str + group: str + + dist: Optional['Distribution'] = None + + def __init__(self, name, value, group): + vars(self).update(name=name, value=value, group=group) + + def load(self): + """Load the entry point from its definition. If only a module + is indicated by the value, return that module. Otherwise, + return the named object. + """ + match = self.pattern.match(self.value) + module = import_module(match.group('module')) + attrs = filter(None, (match.group('attr') or '').split('.')) + return functools.reduce(getattr, attrs, module) + + @property + def module(self): + match = self.pattern.match(self.value) + return match.group('module') + + @property + def attr(self): + match = self.pattern.match(self.value) + return match.group('attr') + + @property + def extras(self): + match = self.pattern.match(self.value) + return re.findall(r'\w+', match.group('extras') or '') + + def _for(self, dist): + vars(self).update(dist=dist) + return self + + def matches(self, **params): + """ + EntryPoint matches the given parameters. + + >>> ep = EntryPoint(group='foo', name='bar', value='bing:bong [extra1, extra2]') + >>> ep.matches(group='foo') + True + >>> ep.matches(name='bar', value='bing:bong [extra1, extra2]') + True + >>> ep.matches(group='foo', name='other') + False + >>> ep.matches() + True + >>> ep.matches(extras=['extra1', 'extra2']) + True + >>> ep.matches(module='bing') + True + >>> ep.matches(attr='bong') + True + """ + attrs = (getattr(self, param) for param in params) + return all(map(operator.eq, params.values(), attrs)) + + def _key(self): + return self.name, self.value, self.group + + def __lt__(self, other): + return self._key() < other._key() + + def __eq__(self, other): + return self._key() == other._key() + + def __setattr__(self, name, value): + raise AttributeError("EntryPoint objects are immutable.") + + def __repr__(self): + return ( + f'EntryPoint(name={self.name!r}, value={self.value!r}, ' + f'group={self.group!r})' + ) + + def __hash__(self): + return hash(self._key()) + + +class EntryPoints(tuple): + """ + An immutable collection of selectable EntryPoint objects. + """ + + __slots__ = () + + def __getitem__(self, name): # -> EntryPoint: + """ + Get the EntryPoint in self matching name. + """ + try: + return next(iter(self.select(name=name))) + except StopIteration: + raise KeyError(name) + + def select(self, **params): + """ + Select entry points from self that match the + given parameters (typically group and/or name). + """ + return EntryPoints(ep for ep in self if _py39compat.ep_matches(ep, **params)) + + @property + def names(self): + """ + Return the set of all names of all entry points. + """ + return {ep.name for ep in self} + + @property + def groups(self): + """ + Return the set of all groups of all entry points. + """ + return {ep.group for ep in self} + + @classmethod + def _from_text_for(cls, text, dist): + return cls(ep._for(dist) for ep in cls._from_text(text)) + + @staticmethod + def _from_text(text): + return ( + EntryPoint(name=item.value.name, value=item.value.value, group=item.name) + for item in Sectioned.section_pairs(text or '') + ) + + +class PackagePath(pathlib.PurePosixPath): + """A reference to a path in a package""" + + def read_text(self, encoding='utf-8'): + with self.locate().open(encoding=encoding) as stream: + return stream.read() + + def read_binary(self): + with self.locate().open('rb') as stream: + return stream.read() + + def locate(self): + """Return a path-like object for this path""" + return self.dist.locate_file(self) + + +class FileHash: + def __init__(self, spec): + self.mode, _, self.value = spec.partition('=') + + def __repr__(self): + return f'' + + +class Distribution: + """A Python distribution package.""" + + @abc.abstractmethod + def read_text(self, filename): + """Attempt to load metadata file given by the name. + + :param filename: The name of the file in the distribution info. + :return: The text if found, otherwise None. + """ + + @abc.abstractmethod + def locate_file(self, path): + """ + Given a path to a file in this distribution, return a path + to it. + """ + + @classmethod + def from_name(cls, name: str): + """Return the Distribution for the given package name. + + :param name: The name of the distribution package to search for. + :return: The Distribution instance (or subclass thereof) for the named + package, if found. + :raises PackageNotFoundError: When the named package's distribution + metadata cannot be found. + :raises ValueError: When an invalid value is supplied for name. + """ + if not name: + raise ValueError("A distribution name is required.") + try: + return next(cls.discover(name=name)) + except StopIteration: + raise PackageNotFoundError(name) + + @classmethod + def discover(cls, **kwargs): + """Return an iterable of Distribution objects for all packages. + + Pass a ``context`` or pass keyword arguments for constructing + a context. + + :context: A ``DistributionFinder.Context`` object. + :return: Iterable of Distribution objects for all packages. + """ + context = kwargs.pop('context', None) + if context and kwargs: + raise ValueError("cannot accept context and kwargs") + context = context or DistributionFinder.Context(**kwargs) + return itertools.chain.from_iterable( + resolver(context) for resolver in cls._discover_resolvers() + ) + + @staticmethod + def at(path): + """Return a Distribution for the indicated metadata path + + :param path: a string or path-like object + :return: a concrete Distribution instance for the path + """ + return PathDistribution(pathlib.Path(path)) + + @staticmethod + def _discover_resolvers(): + """Search the meta_path for resolvers.""" + declared = ( + getattr(finder, 'find_distributions', None) for finder in sys.meta_path + ) + return filter(None, declared) + + @property + def metadata(self) -> _meta.PackageMetadata: + """Return the parsed metadata for this Distribution. + + The returned object will have keys that name the various bits of + metadata. See PEP 566 for details. + """ + text = ( + self.read_text('METADATA') + or self.read_text('PKG-INFO') + # This last clause is here to support old egg-info files. Its + # effect is to just end up using the PathDistribution's self._path + # (which points to the egg-info file) attribute unchanged. + or self.read_text('') + ) + return _adapters.Message(email.message_from_string(text)) + + @property + def name(self): + """Return the 'Name' metadata for the distribution package.""" + return self.metadata['Name'] + + @property + def _normalized_name(self): + """Return a normalized version of the name.""" + return Prepared.normalize(self.name) + + @property + def version(self): + """Return the 'Version' metadata for the distribution package.""" + return self.metadata['Version'] + + @property + def entry_points(self): + return EntryPoints._from_text_for(self.read_text('entry_points.txt'), self) + + @property + def files(self): + """Files in this distribution. + + :return: List of PackagePath for this distribution or None + + Result is `None` if the metadata file that enumerates files + (i.e. RECORD for dist-info or SOURCES.txt for egg-info) is + missing. + Result may be empty if the metadata exists but is empty. + """ + + def make_file(name, hash=None, size_str=None): + result = PackagePath(name) + result.hash = FileHash(hash) if hash else None + result.size = int(size_str) if size_str else None + result.dist = self + return result + + @pass_none + def make_files(lines): + return list(starmap(make_file, csv.reader(lines))) + + return make_files(self._read_files_distinfo() or self._read_files_egginfo()) + + def _read_files_distinfo(self): + """ + Read the lines of RECORD + """ + text = self.read_text('RECORD') + return text and text.splitlines() + + def _read_files_egginfo(self): + """ + SOURCES.txt might contain literal commas, so wrap each line + in quotes. + """ + text = self.read_text('SOURCES.txt') + return text and map('"{}"'.format, text.splitlines()) + + @property + def requires(self): + """Generated requirements specified for this Distribution""" + reqs = self._read_dist_info_reqs() or self._read_egg_info_reqs() + return reqs and list(reqs) + + def _read_dist_info_reqs(self): + return self.metadata.get_all('Requires-Dist') + + def _read_egg_info_reqs(self): + source = self.read_text('requires.txt') + return pass_none(self._deps_from_requires_text)(source) + + @classmethod + def _deps_from_requires_text(cls, source): + return cls._convert_egg_info_reqs_to_simple_reqs(Sectioned.read(source)) + + @staticmethod + def _convert_egg_info_reqs_to_simple_reqs(sections): + """ + Historically, setuptools would solicit and store 'extra' + requirements, including those with environment markers, + in separate sections. More modern tools expect each + dependency to be defined separately, with any relevant + extras and environment markers attached directly to that + requirement. This method converts the former to the + latter. See _test_deps_from_requires_text for an example. + """ + + def make_condition(name): + return name and f'extra == "{name}"' + + def quoted_marker(section): + section = section or '' + extra, sep, markers = section.partition(':') + if extra and markers: + markers = f'({markers})' + conditions = list(filter(None, [markers, make_condition(extra)])) + return '; ' + ' and '.join(conditions) if conditions else '' + + def url_req_space(req): + """ + PEP 508 requires a space between the url_spec and the quoted_marker. + Ref python/importlib_metadata#357. + """ + # '@' is uniquely indicative of a url_req. + return ' ' * ('@' in req) + + for section in sections: + space = url_req_space(section.value) + yield section.value + space + quoted_marker(section.name) + + +class DistributionFinder(MetaPathFinder): + """ + A MetaPathFinder capable of discovering installed distributions. + """ + + class Context: + """ + Keyword arguments presented by the caller to + ``distributions()`` or ``Distribution.discover()`` + to narrow the scope of a search for distributions + in all DistributionFinders. + + Each DistributionFinder may expect any parameters + and should attempt to honor the canonical + parameters defined below when appropriate. + """ + + name = None + """ + Specific name for which a distribution finder should match. + A name of ``None`` matches all distributions. + """ + + def __init__(self, **kwargs): + vars(self).update(kwargs) + + @property + def path(self): + """ + The sequence of directory path that a distribution finder + should search. + + Typically refers to Python installed package paths such as + "site-packages" directories and defaults to ``sys.path``. + """ + return vars(self).get('path', sys.path) + + @abc.abstractmethod + def find_distributions(self, context=Context()): + """ + Find distributions. + + Return an iterable of all Distribution instances capable of + loading the metadata for packages matching the ``context``, + a DistributionFinder.Context instance. + """ + + +class FastPath: + """ + Micro-optimized class for searching a path for + children. + + >>> FastPath('').children() + ['...'] + """ + + @functools.lru_cache() # type: ignore + def __new__(cls, root): + return super().__new__(cls) + + def __init__(self, root): + self.root = root + + def joinpath(self, child): + return pathlib.Path(self.root, child) + + def children(self): + with suppress(Exception): + return os.listdir(self.root or '.') + with suppress(Exception): + return self.zip_children() + return [] + + def zip_children(self): + zip_path = zipp.Path(self.root) + names = zip_path.root.namelist() + self.joinpath = zip_path.joinpath + + return dict.fromkeys(child.split(posixpath.sep, 1)[0] for child in names) + + def search(self, name): + return self.lookup(self.mtime).search(name) + + @property + def mtime(self): + with suppress(OSError): + return os.stat(self.root).st_mtime + self.lookup.cache_clear() + + @method_cache + def lookup(self, mtime): + return Lookup(self) + + +class Lookup: + def __init__(self, path: FastPath): + base = os.path.basename(path.root).lower() + base_is_egg = base.endswith(".egg") + self.infos = FreezableDefaultDict(list) + self.eggs = FreezableDefaultDict(list) + + for child in path.children(): + low = child.lower() + if low.endswith((".dist-info", ".egg-info")): + # rpartition is faster than splitext and suitable for this purpose. + name = low.rpartition(".")[0].partition("-")[0] + normalized = Prepared.normalize(name) + self.infos[normalized].append(path.joinpath(child)) + elif base_is_egg and low == "egg-info": + name = base.rpartition(".")[0].partition("-")[0] + legacy_normalized = Prepared.legacy_normalize(name) + self.eggs[legacy_normalized].append(path.joinpath(child)) + + self.infos.freeze() + self.eggs.freeze() + + def search(self, prepared): + infos = ( + self.infos[prepared.normalized] + if prepared + else itertools.chain.from_iterable(self.infos.values()) + ) + eggs = ( + self.eggs[prepared.legacy_normalized] + if prepared + else itertools.chain.from_iterable(self.eggs.values()) + ) + return itertools.chain(infos, eggs) + + +class Prepared: + """ + A prepared search for metadata on a possibly-named package. + """ + + normalized = None + legacy_normalized = None + + def __init__(self, name): + self.name = name + if name is None: + return + self.normalized = self.normalize(name) + self.legacy_normalized = self.legacy_normalize(name) + + @staticmethod + def normalize(name): + """ + PEP 503 normalization plus dashes as underscores. + """ + return re.sub(r"[-_.]+", "-", name).lower().replace('-', '_') + + @staticmethod + def legacy_normalize(name): + """ + Normalize the package name as found in the convention in + older packaging tools versions and specs. + """ + return name.lower().replace('-', '_') + + def __bool__(self): + return bool(self.name) + + +@install +class MetadataPathFinder(NullFinder, DistributionFinder): + """A degenerate finder for distribution packages on the file system. + + This finder supplies only a find_distributions() method for versions + of Python that do not have a PathFinder find_distributions(). + """ + + def find_distributions(self, context=DistributionFinder.Context()): + """ + Find distributions. + + Return an iterable of all Distribution instances capable of + loading the metadata for packages matching ``context.name`` + (or all names if ``None`` indicated) along the paths in the list + of directories ``context.path``. + """ + found = self._search_paths(context.name, context.path) + return map(PathDistribution, found) + + @classmethod + def _search_paths(cls, name, paths): + """Find metadata directories in paths heuristically.""" + prepared = Prepared(name) + return itertools.chain.from_iterable( + path.search(prepared) for path in map(FastPath, paths) + ) + + def invalidate_caches(cls): + FastPath.__new__.cache_clear() + + +class PathDistribution(Distribution): + def __init__(self, path: SimplePath): + """Construct a distribution. + + :param path: SimplePath indicating the metadata directory. + """ + self._path = path + + def read_text(self, filename): + with suppress( + FileNotFoundError, + IsADirectoryError, + KeyError, + NotADirectoryError, + PermissionError, + ): + return self._path.joinpath(filename).read_text(encoding='utf-8') + + read_text.__doc__ = Distribution.read_text.__doc__ + + def locate_file(self, path): + return self._path.parent / path + + @property + def _normalized_name(self): + """ + Performance optimization: where possible, resolve the + normalized name from the file system path. + """ + stem = os.path.basename(str(self._path)) + return ( + pass_none(Prepared.normalize)(self._name_from_stem(stem)) + or super()._normalized_name + ) + + @staticmethod + def _name_from_stem(stem): + """ + >>> PathDistribution._name_from_stem('foo-3.0.egg-info') + 'foo' + >>> PathDistribution._name_from_stem('CherryPy-3.0.dist-info') + 'CherryPy' + >>> PathDistribution._name_from_stem('face.egg-info') + 'face' + >>> PathDistribution._name_from_stem('foo.bar') + """ + filename, ext = os.path.splitext(stem) + if ext not in ('.dist-info', '.egg-info'): + return + name, sep, rest = filename.partition('-') + return name + + +def distribution(distribution_name): + """Get the ``Distribution`` instance for the named package. + + :param distribution_name: The name of the distribution package as a string. + :return: A ``Distribution`` instance (or subclass thereof). + """ + return Distribution.from_name(distribution_name) + + +def distributions(**kwargs): + """Get all ``Distribution`` instances in the current environment. + + :return: An iterable of ``Distribution`` instances. + """ + return Distribution.discover(**kwargs) + + +def metadata(distribution_name) -> _meta.PackageMetadata: + """Get the metadata for the named package. + + :param distribution_name: The name of the distribution package to query. + :return: A PackageMetadata containing the parsed metadata. + """ + return Distribution.from_name(distribution_name).metadata + + +def version(distribution_name): + """Get the version string for the named package. + + :param distribution_name: The name of the distribution package to query. + :return: The version string for the package as defined in the package's + "Version" metadata key. + """ + return distribution(distribution_name).version + + +_unique = functools.partial( + unique_everseen, + key=_py39compat.normalized_name, +) +""" +Wrapper for ``distributions`` to return unique distributions by name. +""" + + +def entry_points(**params) -> EntryPoints: + """Return EntryPoint objects for all installed packages. + + Pass selection parameters (group or name) to filter the + result to entry points matching those properties (see + EntryPoints.select()). + + :return: EntryPoints for all installed packages. + """ + eps = itertools.chain.from_iterable( + dist.entry_points for dist in _unique(distributions()) + ) + return EntryPoints(eps).select(**params) + + +def files(distribution_name): + """Return a list of files for the named package. + + :param distribution_name: The name of the distribution package to query. + :return: List of files composing the distribution. + """ + return distribution(distribution_name).files + + +def requires(distribution_name): + """ + Return a list of requirements for the named package. + + :return: An iterator of requirements, suitable for + packaging.requirement.Requirement. + """ + return distribution(distribution_name).requires + + +def packages_distributions() -> Mapping[str, List[str]]: + """ + Return a mapping of top-level packages to their + distributions. + + >>> import collections.abc + >>> pkgs = packages_distributions() + >>> all(isinstance(dist, collections.abc.Sequence) for dist in pkgs.values()) + True + """ + pkg_to_dist = collections.defaultdict(list) + for dist in distributions(): + for pkg in _top_level_declared(dist) or _top_level_inferred(dist): + pkg_to_dist[pkg].append(dist.metadata['Name']) + return dict(pkg_to_dist) + + +def _top_level_declared(dist): + return (dist.read_text('top_level.txt') or '').split() + + +def _top_level_inferred(dist): + return { + f.parts[0] if len(f.parts) > 1 else f.with_suffix('').name + for f in always_iterable(dist.files) + if f.suffix == ".py" + } diff --git a/Splunk_TA_paloalto/bin/splunk_ta_paloalto/aob_py3/importlib_metadata/_adapters.py b/Splunk_TA_paloalto/bin/splunk_ta_paloalto/aob_py3/importlib_metadata/_adapters.py new file mode 100644 index 00000000..e33cba5e --- /dev/null +++ b/Splunk_TA_paloalto/bin/splunk_ta_paloalto/aob_py3/importlib_metadata/_adapters.py @@ -0,0 +1,90 @@ +import functools +import warnings +import re +import textwrap +import email.message + +from ._text import FoldedCase +from ._compat import pypy_partial + + +# Do not remove prior to 2024-01-01 or Python 3.14 +_warn = functools.partial( + warnings.warn, + "Implicit None on return values is deprecated and will raise KeyErrors.", + DeprecationWarning, + stacklevel=pypy_partial(2), +) + + +class Message(email.message.Message): + multiple_use_keys = set( + map( + FoldedCase, + [ + 'Classifier', + 'Obsoletes-Dist', + 'Platform', + 'Project-URL', + 'Provides-Dist', + 'Provides-Extra', + 'Requires-Dist', + 'Requires-External', + 'Supported-Platform', + 'Dynamic', + ], + ) + ) + """ + Keys that may be indicated multiple times per PEP 566. + """ + + def __new__(cls, orig: email.message.Message): + res = super().__new__(cls) + vars(res).update(vars(orig)) + return res + + def __init__(self, *args, **kwargs): + self._headers = self._repair_headers() + + # suppress spurious error from mypy + def __iter__(self): + return super().__iter__() + + def __getitem__(self, item): + """ + Warn users that a ``KeyError`` can be expected when a + mising key is supplied. Ref python/importlib_metadata#371. + """ + res = super().__getitem__(item) + if res is None: + _warn() + return res + + def _repair_headers(self): + def redent(value): + "Correct for RFC822 indentation" + if not value or '\n' not in value: + return value + return textwrap.dedent(' ' * 8 + value) + + headers = [(key, redent(value)) for key, value in vars(self)['_headers']] + if self._payload: + headers.append(('Description', self.get_payload())) + return headers + + @property + def json(self): + """ + Convert PackageMetadata to a JSON-compatible format + per PEP 0566. + """ + + def transform(key): + value = self.get_all(key) if key in self.multiple_use_keys else self[key] + if key == 'Keywords': + value = re.split(r'\s+', value) + tk = key.lower().replace('-', '_') + return tk, value + + return dict(map(transform, map(FoldedCase, self))) diff --git a/Splunk_TA_paloalto/bin/splunk_ta_paloalto/aob_py3/importlib_metadata/_collections.py b/Splunk_TA_paloalto/bin/splunk_ta_paloalto/aob_py3/importlib_metadata/_collections.py new file mode 100644 index 00000000..cf0954e1 --- /dev/null +++ b/Splunk_TA_paloalto/bin/splunk_ta_paloalto/aob_py3/importlib_metadata/_collections.py @@ -0,0 +1,30 @@ +import collections + + +# from jaraco.collections 3.3 +class FreezableDefaultDict(collections.defaultdict): + """ + Often it is desirable to prevent the mutation of + a default dict after its initial construction, such + as to prevent mutation during iteration. + + >>> dd = FreezableDefaultDict(list) + >>> dd[0].append('1') + >>> dd.freeze() + >>> dd[1] + [] + >>> len(dd) + 1 + """ + + def __missing__(self, key): + return getattr(self, '_frozen', super().__missing__)(key) + + def freeze(self): + self._frozen = lambda key: self.default_factory() + + +class Pair(collections.namedtuple('Pair', 'name value')): + @classmethod + def parse(cls, text): + return cls(*map(str.strip, text.split("=", 1))) diff --git a/Splunk_TA_paloalto/bin/splunk_ta_paloalto/aob_py3/importlib_metadata/_compat.py b/Splunk_TA_paloalto/bin/splunk_ta_paloalto/aob_py3/importlib_metadata/_compat.py new file mode 100644 index 00000000..3d78566e --- /dev/null +++ b/Splunk_TA_paloalto/bin/splunk_ta_paloalto/aob_py3/importlib_metadata/_compat.py @@ -0,0 +1,72 @@ +import sys +import platform + + +__all__ = ['install', 'NullFinder', 'Protocol'] + + +try: + from typing import Protocol +except ImportError: # pragma: no cover + # Python 3.7 compatibility + from typing_extensions import Protocol # type: ignore + + +def install(cls): + """ + Class decorator for installation on sys.meta_path. + + Adds the backport DistributionFinder to sys.meta_path and + attempts to disable the finder functionality of the stdlib + DistributionFinder. + """ + sys.meta_path.append(cls()) + disable_stdlib_finder() + return cls + + +def disable_stdlib_finder(): + """ + Give the backport primacy for discovering path-based distributions + by monkey-patching the stdlib O_O. + + See #91 for more background for rationale on this sketchy + behavior. + """ + + def matches(finder): + return getattr( + finder, '__module__', None + ) == '_frozen_importlib_external' and hasattr(finder, 'find_distributions') + + for finder in filter(matches, sys.meta_path): # pragma: nocover + del finder.find_distributions + + +class NullFinder: + """ + A "Finder" (aka "MetaClassFinder") that never finds any modules, + but may find distributions. + """ + + @staticmethod + def find_spec(*args, **kwargs): + return None + + # In Python 2, the import system requires finders + # to have a find_module() method, but this usage + # is deprecated in Python 3 in favor of find_spec(). + # For the purposes of this finder (i.e. being present + # on sys.meta_path but having no other import + # system functionality), the two methods are identical. + find_module = find_spec + + +def pypy_partial(val): + """ + Adjust for variable stacklevel on partial under PyPy. + + Workaround for #327. + """ + is_pypy = platform.python_implementation() == 'PyPy' + return val + is_pypy diff --git a/Splunk_TA_paloalto/bin/splunk_ta_paloalto/aob_py3/importlib_metadata/_functools.py b/Splunk_TA_paloalto/bin/splunk_ta_paloalto/aob_py3/importlib_metadata/_functools.py new file mode 100644 index 00000000..71f66bd0 --- /dev/null +++ b/Splunk_TA_paloalto/bin/splunk_ta_paloalto/aob_py3/importlib_metadata/_functools.py @@ -0,0 +1,104 @@ +import types +import functools + + +# from jaraco.functools 3.3 +def method_cache(method, cache_wrapper=None): + """ + Wrap lru_cache to support storing the cache data in the object instances. + + Abstracts the common paradigm where the method explicitly saves an + underscore-prefixed protected property on first call and returns that + subsequently. + + >>> class MyClass: + ... calls = 0 + ... + ... @method_cache + ... def method(self, value): + ... self.calls += 1 + ... return value + + >>> a = MyClass() + >>> a.method(3) + 3 + >>> for x in range(75): + ... res = a.method(x) + >>> a.calls + 75 + + Note that the apparent behavior will be exactly like that of lru_cache + except that the cache is stored on each instance, so values in one + instance will not flush values from another, and when an instance is + deleted, so are the cached values for that instance. + + >>> b = MyClass() + >>> for x in range(35): + ... res = b.method(x) + >>> b.calls + 35 + >>> a.method(0) + 0 + >>> a.calls + 75 + + Note that if method had been decorated with ``functools.lru_cache()``, + a.calls would have been 76 (due to the cached value of 0 having been + flushed by the 'b' instance). + + Clear the cache with ``.cache_clear()`` + + >>> a.method.cache_clear() + + Same for a method that hasn't yet been called. + + >>> c = MyClass() + >>> c.method.cache_clear() + + Another cache wrapper may be supplied: + + >>> cache = functools.lru_cache(maxsize=2) + >>> MyClass.method2 = method_cache(lambda self: 3, cache_wrapper=cache) + >>> a = MyClass() + >>> a.method2() + 3 + + Caution - do not subsequently wrap the method with another decorator, such + as ``@property``, which changes the semantics of the function. + + See also + http://code.activestate.com/recipes/577452-a-memoize-decorator-for-instance-methods/ + for another implementation and additional justification. + """ + cache_wrapper = cache_wrapper or functools.lru_cache() + + def wrapper(self, *args, **kwargs): + # it's the first call, replace the method with a cached, bound method + bound_method = types.MethodType(method, self) + cached_method = cache_wrapper(bound_method) + setattr(self, method.__name__, cached_method) + return cached_method(*args, **kwargs) + + # Support cache clear even before cache has been created. + wrapper.cache_clear = lambda: None + + return wrapper + + +# From jaraco.functools 3.3 +def pass_none(func): + """ + Wrap func so it's not called if its first param is None + + >>> print_text = pass_none(print) + >>> print_text('text') + text + >>> print_text(None) + """ + + @functools.wraps(func) + def wrapper(param, *args, **kwargs): + if param is not None: + return func(param, *args, **kwargs) + + return wrapper diff --git a/Splunk_TA_paloalto/bin/splunk_ta_paloalto/aob_py3/importlib_metadata/_itertools.py b/Splunk_TA_paloalto/bin/splunk_ta_paloalto/aob_py3/importlib_metadata/_itertools.py new file mode 100644 index 00000000..d4ca9b91 --- /dev/null +++ b/Splunk_TA_paloalto/bin/splunk_ta_paloalto/aob_py3/importlib_metadata/_itertools.py @@ -0,0 +1,73 @@ +from itertools import filterfalse + + +def unique_everseen(iterable, key=None): + "List unique elements, preserving order. Remember all elements ever seen." + # unique_everseen('AAAABBBCCDAABBB') --> A B C D + # unique_everseen('ABBCcAD', str.lower) --> A B C D + seen = set() + seen_add = seen.add + if key is None: + for element in filterfalse(seen.__contains__, iterable): + seen_add(element) + yield element + else: + for element in iterable: + k = key(element) + if k not in seen: + seen_add(k) + yield element + + +# copied from more_itertools 8.8 +def always_iterable(obj, base_type=(str, bytes)): + """If *obj* is iterable, return an iterator over its items:: + + >>> obj = (1, 2, 3) + >>> list(always_iterable(obj)) + [1, 2, 3] + + If *obj* is not iterable, return a one-item iterable containing *obj*:: + + >>> obj = 1 + >>> list(always_iterable(obj)) + [1] + + If *obj* is ``None``, return an empty iterable: + + >>> obj = None + >>> list(always_iterable(None)) + [] + + By default, binary and text strings are not considered iterable:: + + >>> obj = 'foo' + >>> list(always_iterable(obj)) + ['foo'] + + If *base_type* is set, objects for which ``isinstance(obj, base_type)`` + returns ``True`` won't be considered iterable. + + >>> obj = {'a': 1} + >>> list(always_iterable(obj)) # Iterate over the dict's keys + ['a'] + >>> list(always_iterable(obj, base_type=dict)) # Treat dicts as a unit + [{'a': 1}] + + Set *base_type* to ``None`` to avoid any special handling and treat objects + Python considers iterable as iterable: + + >>> obj = 'foo' + >>> list(always_iterable(obj, base_type=None)) + ['f', 'o', 'o'] + """ + if obj is None: + return iter(()) + + if (base_type is not None) and isinstance(obj, base_type): + return iter((obj,)) + + try: + return iter(obj) + except TypeError: + return iter((obj,)) diff --git a/Splunk_TA_paloalto/bin/splunk_ta_paloalto/aob_py3/importlib_metadata/_meta.py b/Splunk_TA_paloalto/bin/splunk_ta_paloalto/aob_py3/importlib_metadata/_meta.py new file mode 100644 index 00000000..259b15ba --- /dev/null +++ b/Splunk_TA_paloalto/bin/splunk_ta_paloalto/aob_py3/importlib_metadata/_meta.py @@ -0,0 +1,49 @@ +from ._compat import Protocol +from typing import Any, Dict, Iterator, List, TypeVar, Union + + +_T = TypeVar("_T") + + +class PackageMetadata(Protocol): + def __len__(self) -> int: + ... # pragma: no cover + + def __contains__(self, item: str) -> bool: + ... # pragma: no cover + + def __getitem__(self, key: str) -> str: + ... # pragma: no cover + + def __iter__(self) -> Iterator[str]: + ... # pragma: no cover + + def get_all(self, name: str, failobj: _T = ...) -> Union[List[Any], _T]: + """ + Return all values associated with a possibly multi-valued key. + """ + + @property + def json(self) -> Dict[str, Union[str, List[str]]]: + """ + A JSON-compatible form of the metadata. + """ + + +class SimplePath(Protocol[_T]): + """ + A minimal subset of pathlib.Path required by PathDistribution. + """ + + def joinpath(self) -> _T: + ... # pragma: no cover + + def __truediv__(self, other: Union[str, _T]) -> _T: + ... # pragma: no cover + + @property + def parent(self) -> _T: + ... # pragma: no cover + + def read_text(self) -> str: + ... # pragma: no cover diff --git a/Splunk_TA_paloalto/bin/splunk_ta_paloalto/aob_py3/importlib_metadata/_py39compat.py b/Splunk_TA_paloalto/bin/splunk_ta_paloalto/aob_py3/importlib_metadata/_py39compat.py new file mode 100644 index 00000000..cde4558f --- /dev/null +++ b/Splunk_TA_paloalto/bin/splunk_ta_paloalto/aob_py3/importlib_metadata/_py39compat.py @@ -0,0 +1,35 @@ +""" +Compatibility layer with Python 3.8/3.9 +""" +from typing import TYPE_CHECKING, Any, Optional + +if TYPE_CHECKING: # pragma: no cover + # Prevent circular imports on runtime. + from . import Distribution, EntryPoint +else: + Distribution = EntryPoint = Any + + +def normalized_name(dist: Distribution) -> Optional[str]: + """ + Honor name normalization for distributions that don't provide ``_normalized_name``. + """ + try: + return dist._normalized_name + except AttributeError: + from . import Prepared # -> delay to prevent circular imports. + + return Prepared.normalize(getattr(dist, "name", None) or dist.metadata['Name']) + + +def ep_matches(ep: EntryPoint, **params) -> bool: + """ + Workaround for ``EntryPoint`` objects without the ``matches`` method. + """ + try: + return ep.matches(**params) + except AttributeError: + from . import EntryPoint # -> delay to prevent circular imports. + + # Reconstruct the EntryPoint object to make sure it is compatible. + return EntryPoint(ep.name, ep.value, ep.group).matches(**params) diff --git a/Splunk_TA_paloalto/bin/splunk_ta_paloalto/aob_py3/importlib_metadata/_text.py b/Splunk_TA_paloalto/bin/splunk_ta_paloalto/aob_py3/importlib_metadata/_text.py new file mode 100644 index 00000000..c88cfbb2 --- /dev/null +++ b/Splunk_TA_paloalto/bin/splunk_ta_paloalto/aob_py3/importlib_metadata/_text.py @@ -0,0 +1,99 @@ +import re + +from ._functools import method_cache + + +# from jaraco.text 3.5 +class FoldedCase(str): + """ + A case insensitive string class; behaves just like str + except compares equal when the only variation is case. + + >>> s = FoldedCase('hello world') + + >>> s == 'Hello World' + True + + >>> 'Hello World' == s + True + + >>> s != 'Hello World' + False + + >>> s.index('O') + 4 + + >>> s.split('O') + ['hell', ' w', 'rld'] + + >>> sorted(map(FoldedCase, ['GAMMA', 'alpha', 'Beta'])) + ['alpha', 'Beta', 'GAMMA'] + + Sequence membership is straightforward. + + >>> "Hello World" in [s] + True + >>> s in ["Hello World"] + True + + You may test for set inclusion, but candidate and elements + must both be folded. + + >>> FoldedCase("Hello World") in {s} + True + >>> s in {FoldedCase("Hello World")} + True + + String inclusion works as long as the FoldedCase object + is on the right. + + >>> "hello" in FoldedCase("Hello World") + True + + But not if the FoldedCase object is on the left: + + >>> FoldedCase('hello') in 'Hello World' + False + + In that case, use in_: + + >>> FoldedCase('hello').in_('Hello World') + True + + >>> FoldedCase('hello') > FoldedCase('Hello') + False + """ + + def __lt__(self, other): + return self.lower() < other.lower() + + def __gt__(self, other): + return self.lower() > other.lower() + + def __eq__(self, other): + return self.lower() == other.lower() + + def __ne__(self, other): + return self.lower() != other.lower() + + def __hash__(self): + return hash(self.lower()) + + def __contains__(self, other): + return super().lower().__contains__(other.lower()) + + def in_(self, other): + "Does self appear in other?" + return self in FoldedCase(other) + + # cache lower since it's likely to be called frequently. + @method_cache + def lower(self): + return super().lower() + + def index(self, sub): + return self.lower().index(sub.lower()) + + def split(self, splitter=' ', maxsplit=0): + pattern = re.compile(re.escape(splitter), re.I) + return pattern.split(self, maxsplit) diff --git a/Splunk_TA_paloalto/bin/splunk_ta_paloalto/aob_py3/importlib_metadata/py.typed b/Splunk_TA_paloalto/bin/splunk_ta_paloalto/aob_py3/importlib_metadata/py.typed new file mode 100644 index 00000000..e69de29b diff --git a/Splunk_TA_paloalto/bin/splunk_ta_paloalto/aob_py3/importlib_resources-5.10.2.dist-info/INSTALLER b/Splunk_TA_paloalto/bin/splunk_ta_paloalto/aob_py3/importlib_resources-5.10.2.dist-info/INSTALLER new file mode 100644 index 00000000..a1b589e3 --- /dev/null +++ b/Splunk_TA_paloalto/bin/splunk_ta_paloalto/aob_py3/importlib_resources-5.10.2.dist-info/INSTALLER @@ -0,0 +1 @@ +pip diff --git a/Splunk_TA_paloalto/bin/splunk_ta_paloalto/aob_py3/importlib_resources-5.10.2.dist-info/LICENSE b/Splunk_TA_paloalto/bin/splunk_ta_paloalto/aob_py3/importlib_resources-5.10.2.dist-info/LICENSE new file mode 100644 index 00000000..d6456956 --- /dev/null +++ b/Splunk_TA_paloalto/bin/splunk_ta_paloalto/aob_py3/importlib_resources-5.10.2.dist-info/LICENSE @@ -0,0 +1,202 @@ + + Apache License + Version 2.0, January 2004 + http://www.apache.org/licenses/ + + TERMS AND CONDITIONS FOR USE, REPRODUCTION, AND DISTRIBUTION + + 1. Definitions. + + "License" shall mean the terms and conditions for use, reproduction, + and distribution as defined by Sections 1 through 9 of this document. + + "Licensor" shall mean the copyright owner or entity authorized by + the copyright owner that is granting the License. + + "Legal Entity" shall mean the union of the acting entity and all + other entities that control, are controlled by, or are under common + control with that entity. For the purposes of this definition, + "control" means (i) the power, direct or indirect, to cause the + direction or management of such entity, whether by contract or + otherwise, or (ii) ownership of fifty percent (50%) or more of the + outstanding shares, or (iii) beneficial ownership of such entity. + + "You" (or "Your") shall mean an individual or Legal Entity + exercising permissions granted by this License. + + "Source" form shall mean the preferred form for making modifications, + including but not limited to software source code, documentation + source, and configuration files. + + "Object" form shall mean any form resulting from mechanical + transformation or translation of a Source form, including but + not limited to compiled object code, generated documentation, + and conversions to other media types. + + "Work" shall mean the work of authorship, whether in Source or + Object form, made available under the License, as indicated by a + copyright notice that is included in or attached to the work + (an example is provided in the Appendix below). + + "Derivative Works" shall mean any work, whether in Source or Object + form, that is based on (or derived from) the Work and for which the + editorial revisions, annotations, elaborations, or other modifications + represent, as a whole, an original work of authorship. For the purposes + of this License, Derivative Works shall not include works that remain + separable from, or merely link (or bind by name) to the interfaces of, + the Work and Derivative Works thereof. + + "Contribution" shall mean any work of authorship, including + the original version of the Work and any modifications or additions + to that Work or Derivative Works thereof, that is intentionally + submitted to Licensor for inclusion in the Work by the copyright owner + or by an individual or Legal Entity authorized to submit on behalf of + the copyright owner. For the purposes of this definition, "submitted" + means any form of electronic, verbal, or written communication sent + to the Licensor or its representatives, including but not limited to + communication on electronic mailing lists, source code control systems, + and issue tracking systems that are managed by, or on behalf of, the + Licensor for the purpose of discussing and improving the Work, but + excluding communication that is conspicuously marked or otherwise + designated in writing by the copyright owner as "Not a Contribution." + + "Contributor" shall mean Licensor and any individual or Legal Entity + on behalf of whom a Contribution has been received by Licensor and + subsequently incorporated within the Work. + + 2. Grant of Copyright License. Subject to the terms and conditions of + this License, each Contributor hereby grants to You a perpetual, + worldwide, non-exclusive, no-charge, royalty-free, irrevocable + copyright license to reproduce, prepare Derivative Works of, + publicly display, publicly perform, sublicense, and distribute the + Work and such Derivative Works in Source or Object form. + + 3. Grant of Patent License. Subject to the terms and conditions of + this License, each Contributor hereby grants to You a perpetual, + worldwide, non-exclusive, no-charge, royalty-free, irrevocable + (except as stated in this section) patent license to make, have made, + use, offer to sell, sell, import, and otherwise transfer the Work, + where such license applies only to those patent claims licensable + by such Contributor that are necessarily infringed by their + Contribution(s) alone or by combination of their Contribution(s) + with the Work to which such Contribution(s) was submitted. If You + institute patent litigation against any entity (including a + cross-claim or counterclaim in a lawsuit) alleging that the Work + or a Contribution incorporated within the Work constitutes direct + or contributory patent infringement, then any patent licenses + granted to You under this License for that Work shall terminate + as of the date such litigation is filed. + + 4. Redistribution. You may reproduce and distribute copies of the + Work or Derivative Works thereof in any medium, with or without + modifications, and in Source or Object form, provided that You + meet the following conditions: + + (a) You must give any other recipients of the Work or + Derivative Works a copy of this License; and + + (b) You must cause any modified files to carry prominent notices + stating that You changed the files; and + + (c) You must retain, in the Source form of any Derivative Works + that You distribute, all copyright, patent, trademark, and + attribution notices from the Source form of the Work, + excluding those notices that do not pertain to any part of + the Derivative Works; and + + (d) If the Work includes a "NOTICE" text file as part of its + distribution, then any Derivative Works that You distribute must + include a readable copy of the attribution notices contained + within such NOTICE file, excluding those notices that do not + pertain to any part of the Derivative Works, in at least one + of the following places: within a NOTICE text file distributed + as part of the Derivative Works; within the Source form or + documentation, if provided along with the Derivative Works; or, + within a display generated by the Derivative Works, if and + wherever such third-party notices normally appear. The contents + of the NOTICE file are for informational purposes only and + do not modify the License. You may add Your own attribution + notices within Derivative Works that You distribute, alongside + or as an addendum to the NOTICE text from the Work, provided + that such additional attribution notices cannot be construed + as modifying the License. + + You may add Your own copyright statement to Your modifications and + may provide additional or different license terms and conditions + for use, reproduction, or distribution of Your modifications, or + for any such Derivative Works as a whole, provided Your use, + reproduction, and distribution of the Work otherwise complies with + the conditions stated in this License. + + 5. Submission of Contributions. Unless You explicitly state otherwise, + any Contribution intentionally submitted for inclusion in the Work + by You to the Licensor shall be under the terms and conditions of + this License, without any additional terms or conditions. + Notwithstanding the above, nothing herein shall supersede or modify + the terms of any separate license agreement you may have executed + with Licensor regarding such Contributions. + + 6. Trademarks. This License does not grant permission to use the trade + names, trademarks, service marks, or product names of the Licensor, + except as required for reasonable and customary use in describing the + origin of the Work and reproducing the content of the NOTICE file. + + 7. Disclaimer of Warranty. Unless required by applicable law or + agreed to in writing, Licensor provides the Work (and each + Contributor provides its Contributions) on an "AS IS" BASIS, + WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or + implied, including, without limitation, any warranties or conditions + of TITLE, NON-INFRINGEMENT, MERCHANTABILITY, or FITNESS FOR A + PARTICULAR PURPOSE. You are solely responsible for determining the + appropriateness of using or redistributing the Work and assume any + risks associated with Your exercise of permissions under this License. + + 8. Limitation of Liability. In no event and under no legal theory, + whether in tort (including negligence), contract, or otherwise, + unless required by applicable law (such as deliberate and grossly + negligent acts) or agreed to in writing, shall any Contributor be + liable to You for damages, including any direct, indirect, special, + incidental, or consequential damages of any character arising as a + result of this License or out of the use or inability to use the + Work (including but not limited to damages for loss of goodwill, + work stoppage, computer failure or malfunction, or any and all + other commercial damages or losses), even if such Contributor + has been advised of the possibility of such damages. + + 9. Accepting Warranty or Additional Liability. While redistributing + the Work or Derivative Works thereof, You may choose to offer, + and charge a fee for, acceptance of support, warranty, indemnity, + or other liability obligations and/or rights consistent with this + License. However, in accepting such obligations, You may act only + on Your own behalf and on Your sole responsibility, not on behalf + of any other Contributor, and only if You agree to indemnify, + defend, and hold each Contributor harmless for any liability + incurred by, or claims asserted against, such Contributor by reason + of your accepting any such warranty or additional liability. + + END OF TERMS AND CONDITIONS + + APPENDIX: How to apply the Apache License to your work. + + To apply the Apache License to your work, attach the following + boilerplate notice, with the fields enclosed by brackets "[]" + replaced with your own identifying information. (Don't include + the brackets!) The text should be enclosed in the appropriate + comment syntax for the file format. We also recommend that a + file or class name and description of purpose be included on the + same "printed page" as the copyright notice for easier + identification within third-party archives. + + Copyright [yyyy] [name of copyright owner] + + Licensed under the Apache License, Version 2.0 (the "License"); + you may not use this file except in compliance with the License. + You may obtain a copy of the License at + + http://www.apache.org/licenses/LICENSE-2.0 + + Unless required by applicable law or agreed to in writing, software + distributed under the License is distributed on an "AS IS" BASIS, + WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. + See the License for the specific language governing permissions and + limitations under the License. diff --git a/Splunk_TA_paloalto/bin/splunk_ta_paloalto/aob_py3/importlib_resources-5.10.2.dist-info/METADATA b/Splunk_TA_paloalto/bin/splunk_ta_paloalto/aob_py3/importlib_resources-5.10.2.dist-info/METADATA new file mode 100644 index 00000000..a9995f09 --- /dev/null +++ b/Splunk_TA_paloalto/bin/splunk_ta_paloalto/aob_py3/importlib_resources-5.10.2.dist-info/METADATA @@ -0,0 +1,104 @@ +Metadata-Version: 2.1 +Name: importlib-resources +Version: 5.10.2 +Summary: Read resources from Python packages +Home-page: https://github.com/python/importlib_resources +Author: Barry Warsaw +Author-email: barry@python.org +Project-URL: Documentation, https://importlib-resources.readthedocs.io/ +Classifier: Development Status :: 5 - Production/Stable +Classifier: Intended Audience :: Developers +Classifier: License :: OSI Approved :: Apache Software License +Classifier: Programming Language :: Python :: 3 +Classifier: Programming Language :: Python :: 3 :: Only +Requires-Python: >=3.7 +License-File: LICENSE +Requires-Dist: zipp (>=3.1.0) ; python_version < "3.10" +Provides-Extra: docs +Requires-Dist: sphinx (>=3.5) ; extra == 'docs' +Requires-Dist: jaraco.packaging (>=9) ; extra == 'docs' +Requires-Dist: rst.linker (>=1.9) ; extra == 'docs' +Requires-Dist: furo ; extra == 'docs' +Requires-Dist: sphinx-lint ; extra == 'docs' +Requires-Dist: jaraco.tidelift (>=1.4) ; extra == 'docs' +Provides-Extra: testing +Requires-Dist: pytest (>=6) ; extra == 'testing' +Requires-Dist: pytest-checkdocs (>=2.4) ; extra == 'testing' +Requires-Dist: flake8 (<5) ; extra == 'testing' +Requires-Dist: pytest-cov ; extra == 'testing' +Requires-Dist: pytest-enabler (>=1.3) ; extra == 'testing' +Requires-Dist: pytest-black (>=0.3.7) ; (platform_python_implementation != "PyPy") and extra == 'testing' +Requires-Dist: pytest-mypy (>=0.9.1) ; (platform_python_implementation != "PyPy") and extra == 'testing' +Requires-Dist: pytest-flake8 ; (python_version < "3.12") and extra == 'testing' + +.. image:: https://img.shields.io/pypi/v/importlib_resources.svg + :target: https://pypi.org/project/importlib_resources + +.. image:: https://img.shields.io/pypi/pyversions/importlib_resources.svg + +.. image:: https://github.com/python/importlib_resources/workflows/tests/badge.svg + :target: https://github.com/python/importlib_resources/actions?query=workflow%3A%22tests%22 + :alt: tests + +.. image:: https://img.shields.io/badge/code%20style-black-000000.svg + :target: https://github.com/psf/black + :alt: Code style: Black + +.. image:: https://readthedocs.org/projects/importlib-resources/badge/?version=latest + :target: https://importlib-resources.readthedocs.io/en/latest/?badge=latest + +.. image:: https://img.shields.io/badge/skeleton-2022-informational + :target: https://blog.jaraco.com/skeleton + +.. image:: https://tidelift.com/badges/package/pypi/importlib-resources + :target: https://tidelift.com/subscription/pkg/pypi-importlib-resources?utm_source=pypi-importlib-resources&utm_medium=readme + +``importlib_resources`` is a backport of Python standard library +`importlib.resources +`_ +module for older Pythons. + +The key goal of this module is to replace parts of `pkg_resources +`_ with a +solution in Python's stdlib that relies on well-defined APIs. This makes +reading resources included in packages easier, with more stable and consistent +semantics. + +Compatibility +============= + +New features are introduced in this third-party library and later merged +into CPython. The following table indicates which versions of this library +were contributed to different versions in the standard library: + +.. list-table:: + :header-rows: 1 + + * - importlib_resources + - stdlib + * - 5.9 + - 3.12 + * - 5.7 + - 3.11 + * - 5.0 + - 3.10 + * - 1.3 + - 3.9 + * - 0.5 (?) + - 3.7 + +For Enterprise +============== + +Available as part of the Tidelift Subscription. + +This project and the maintainers of thousands of other packages are working with Tidelift to deliver one enterprise subscription that covers all of the open source you use. + +`Learn more `_. + +Security Contact +================ + +To report a security vulnerability, please use the +`Tidelift security contact `_. +Tidelift will coordinate the fix and disclosure. diff --git a/Splunk_TA_paloalto/bin/splunk_ta_paloalto/aob_py3/importlib_resources-5.10.2.dist-info/RECORD b/Splunk_TA_paloalto/bin/splunk_ta_paloalto/aob_py3/importlib_resources-5.10.2.dist-info/RECORD new file mode 100644 index 00000000..f62c3940 --- /dev/null +++ b/Splunk_TA_paloalto/bin/splunk_ta_paloalto/aob_py3/importlib_resources-5.10.2.dist-info/RECORD @@ -0,0 +1,47 @@ +importlib_resources-5.10.2.dist-info/INSTALLER,sha256=zuuue4knoyJ-UwPPXg8fezS7VCrXJQrAP7zeNuwvFQg,4 +importlib_resources-5.10.2.dist-info/LICENSE,sha256=z8d0m5b2O9McPEK1xHG_dWgUBT6EfBDz6wA0F7xSPTA,11358 +importlib_resources-5.10.2.dist-info/METADATA,sha256=Xo5ntATvDYUxdmW8tr8kxtfdiOC9889mOk-LE1LtZfI,4111 +importlib_resources-5.10.2.dist-info/RECORD,, +importlib_resources-5.10.2.dist-info/WHEEL,sha256=2wepM1nk4DS4eFpYrW1TTqPcoGNfHhhO_i5m4cOimbo,92 +importlib_resources-5.10.2.dist-info/top_level.txt,sha256=fHIjHU1GZwAjvcydpmUnUrTnbvdiWjG4OEVZK8by0TQ,20 +importlib_resources/__init__.py,sha256=evPm12kLgYqTm-pbzm60bOuumumT8IpBNWFp0uMyrzE,506 +importlib_resources/_adapters.py,sha256=o51tP2hpVtohP33gSYyAkGNpLfYDBqxxYsadyiRZi1E,4504 +importlib_resources/_common.py,sha256=jSC4xfLdcMNbtbWHtpzbFkNa0W7kvf__nsYn14C_AEU,5457 +importlib_resources/_compat.py,sha256=dSadF6WPt8MwOqSm_NIOQPhw4x0iaMYTWxi-XS93p7M,2923 +importlib_resources/_itertools.py,sha256=WCdJ1Gs_kNFwKENyIG7TO0Y434IWCu0zjVVSsSbZwU8,884 +importlib_resources/_legacy.py,sha256=0TKdZixxLWA-xwtAZw4HcpqJmj4Xprx1Zkcty0gTRZY,3481 +importlib_resources/abc.py,sha256=Icr2IJ2QtH7vvAB9vC5WRJ9KBoaDyJa7KUs8McuROzo,5140 +importlib_resources/py.typed,sha256=47DEQpj8HBSa-_TImW-5JCeuQeRkm5NMpJWZG3hSuFU,0 +importlib_resources/readers.py,sha256=PZsi5qacr2Qn3KHw4qw3Gm1MzrBblPHoTdjqjH7EKWw,3581 +importlib_resources/simple.py,sha256=0__2TQBTQoqkajYmNPt1HxERcReAT6boVKJA328pr04,2576 +importlib_resources/tests/__init__.py,sha256=47DEQpj8HBSa-_TImW-5JCeuQeRkm5NMpJWZG3hSuFU,0 +importlib_resources/tests/_compat.py,sha256=YTSB0U1R9oADnh6GrQcOCgojxcF_N6H1LklymEWf9SQ,708 +importlib_resources/tests/_path.py,sha256=yZyWsQzJZQ1Z8ARAxWkjAdaVVsjlzyqxO0qjBUofJ8M,1039 +importlib_resources/tests/data01/__init__.py,sha256=47DEQpj8HBSa-_TImW-5JCeuQeRkm5NMpJWZG3hSuFU,0 +importlib_resources/tests/data01/binary.file,sha256=BU7ewdAhH2JP7Qy8qdT5QAsOSRxDdCryxbCr6_DJkNg,4 +importlib_resources/tests/data01/subdirectory/__init__.py,sha256=47DEQpj8HBSa-_TImW-5JCeuQeRkm5NMpJWZG3hSuFU,0 +importlib_resources/tests/data01/subdirectory/binary.file,sha256=BU7ewdAhH2JP7Qy8qdT5QAsOSRxDdCryxbCr6_DJkNg,4 +importlib_resources/tests/data01/utf-16.file,sha256=t5q9qhxX0rYqItBOM8D3ylwG-RHrnOYteTLtQr6sF7g,44 +importlib_resources/tests/data01/utf-8.file,sha256=kwWgYG4yQ-ZF2X_WA66EjYPmxJRn-w8aSOiS9e8tKYY,20 +importlib_resources/tests/data02/__init__.py,sha256=47DEQpj8HBSa-_TImW-5JCeuQeRkm5NMpJWZG3hSuFU,0 +importlib_resources/tests/data02/one/__init__.py,sha256=47DEQpj8HBSa-_TImW-5JCeuQeRkm5NMpJWZG3hSuFU,0 +importlib_resources/tests/data02/one/resource1.txt,sha256=10flKac7c-XXFzJ3t-AB5MJjlBy__dSZvPE_dOm2q6U,13 +importlib_resources/tests/data02/two/__init__.py,sha256=47DEQpj8HBSa-_TImW-5JCeuQeRkm5NMpJWZG3hSuFU,0 +importlib_resources/tests/data02/two/resource2.txt,sha256=lt2jbN3TMn9QiFKM832X39bU_62UptDdUkoYzkvEbl0,13 +importlib_resources/tests/namespacedata01/binary.file,sha256=BU7ewdAhH2JP7Qy8qdT5QAsOSRxDdCryxbCr6_DJkNg,4 +importlib_resources/tests/namespacedata01/utf-16.file,sha256=t5q9qhxX0rYqItBOM8D3ylwG-RHrnOYteTLtQr6sF7g,44 +importlib_resources/tests/namespacedata01/utf-8.file,sha256=kwWgYG4yQ-ZF2X_WA66EjYPmxJRn-w8aSOiS9e8tKYY,20 +importlib_resources/tests/test_compatibilty_files.py,sha256=NWkbIsylI8Wz3Dwsxo1quT4ZI6ToXFA2mojCG6Dzuxw,3260 +importlib_resources/tests/test_contents.py,sha256=V1Xfk3lqTDdvUsZuV18Kndf0CT_tkM2oEIwk9Vv0rhg,968 +importlib_resources/tests/test_files.py,sha256=1Y8da-g0xOQLzuREDYUiRc_qhWlvFNeydW_mUH7l15w,3251 +importlib_resources/tests/test_open.py,sha256=pmEgdrSFdM83L6FxtR8U_RT9BfI3JZ4snGmM_ZZIegY,2565 +importlib_resources/tests/test_path.py,sha256=xvPteNA-UKavDhKgLgrQuXSxKWYH7Q4nSNDVfBX95Gs,2103 +importlib_resources/tests/test_read.py,sha256=EyYvpHJ_7F4LuX2EU_c5EerIBQfRhOFmiIR7LOc5Y5E,2408 +importlib_resources/tests/test_reader.py,sha256=nPhldbYPq3fXoQs0ZAub4atjhp2lgNyLNv2G1pg6Agw,4480 +importlib_resources/tests/test_resource.py,sha256=EMoarxTEHcrq8R41LQDsndIG8Idtm4I_LpN8DYpHtT0,8478 +importlib_resources/tests/update-zips.py,sha256=x-SrO5v87iLLUMXyefxDwAd3imAs_slI94sLWvJ6N40,1417 +importlib_resources/tests/util.py,sha256=ARAlxZ47wC-lgR7PGlmgBoi4HnhzcykD5Is2-TAwY0I,4873 +importlib_resources/tests/zipdata01/__init__.py,sha256=47DEQpj8HBSa-_TImW-5JCeuQeRkm5NMpJWZG3hSuFU,0 +importlib_resources/tests/zipdata01/ziptestdata.zip,sha256=z5Of4dsv3T0t-46B0MsVhxlhsPGMz28aUhJDWpj3_oY,876 +importlib_resources/tests/zipdata02/__init__.py,sha256=47DEQpj8HBSa-_TImW-5JCeuQeRkm5NMpJWZG3hSuFU,0 +importlib_resources/tests/zipdata02/ziptestdata.zip,sha256=ydI-_j-xgQ7tDxqBp9cjOqXBGxUp6ZBbwVJu6Xj-nrY,698 diff --git a/Splunk_TA_paloalto/bin/splunk_ta_paloalto/aob_py3/importlib_resources-5.10.2.dist-info/WHEEL b/Splunk_TA_paloalto/bin/splunk_ta_paloalto/aob_py3/importlib_resources-5.10.2.dist-info/WHEEL new file mode 100644 index 00000000..57e3d840 --- /dev/null +++ b/Splunk_TA_paloalto/bin/splunk_ta_paloalto/aob_py3/importlib_resources-5.10.2.dist-info/WHEEL @@ -0,0 +1,5 @@ +Wheel-Version: 1.0 +Generator: bdist_wheel (0.38.4) +Root-Is-Purelib: true +Tag: py3-none-any + diff --git a/Splunk_TA_paloalto/bin/splunk_ta_paloalto/aob_py3/importlib_resources-5.10.2.dist-info/top_level.txt b/Splunk_TA_paloalto/bin/splunk_ta_paloalto/aob_py3/importlib_resources-5.10.2.dist-info/top_level.txt new file mode 100644 index 00000000..58ad1bd3 --- /dev/null +++ b/Splunk_TA_paloalto/bin/splunk_ta_paloalto/aob_py3/importlib_resources-5.10.2.dist-info/top_level.txt @@ -0,0 +1 @@ +importlib_resources diff --git a/Splunk_TA_paloalto/bin/splunk_ta_paloalto/aob_py3/importlib_resources/__init__.py b/Splunk_TA_paloalto/bin/splunk_ta_paloalto/aob_py3/importlib_resources/__init__.py new file mode 100644 index 00000000..34e3a995 --- /dev/null +++ b/Splunk_TA_paloalto/bin/splunk_ta_paloalto/aob_py3/importlib_resources/__init__.py @@ -0,0 +1,36 @@ +"""Read resources contained within a package.""" + +from ._common import ( + as_file, + files, + Package, +) + +from ._legacy import ( + contents, + open_binary, + read_binary, + open_text, + read_text, + is_resource, + path, + Resource, +) + +from .abc import ResourceReader + + +__all__ = [ + 'Package', + 'Resource', + 'ResourceReader', + 'as_file', + 'contents', + 'files', + 'is_resource', + 'open_binary', + 'open_text', + 'path', + 'read_binary', + 'read_text', +] diff --git a/Splunk_TA_paloalto/bin/splunk_ta_paloalto/aob_py3/importlib_resources/_adapters.py b/Splunk_TA_paloalto/bin/splunk_ta_paloalto/aob_py3/importlib_resources/_adapters.py new file mode 100644 index 00000000..ea363d86 --- /dev/null +++ b/Splunk_TA_paloalto/bin/splunk_ta_paloalto/aob_py3/importlib_resources/_adapters.py @@ -0,0 +1,170 @@ +from contextlib import suppress +from io import TextIOWrapper + +from . import abc + + +class SpecLoaderAdapter: + """ + Adapt a package spec to adapt the underlying loader. + """ + + def __init__(self, spec, adapter=lambda spec: spec.loader): + self.spec = spec + self.loader = adapter(spec) + + def __getattr__(self, name): + return getattr(self.spec, name) + + +class TraversableResourcesLoader: + """ + Adapt a loader to provide TraversableResources. + """ + + def __init__(self, spec): + self.spec = spec + + def get_resource_reader(self, name): + return CompatibilityFiles(self.spec)._native() + + +def _io_wrapper(file, mode='r', *args, **kwargs): + if mode == 'r': + return TextIOWrapper(file, *args, **kwargs) + elif mode == 'rb': + return file + raise ValueError( + "Invalid mode value '{}', only 'r' and 'rb' are supported".format(mode) + ) + + +class CompatibilityFiles: + """ + Adapter for an existing or non-existent resource reader + to provide a compatibility .files(). + """ + + class SpecPath(abc.Traversable): + """ + Path tied to a module spec. + Can be read and exposes the resource reader children. + """ + + def __init__(self, spec, reader): + self._spec = spec + self._reader = reader + + def iterdir(self): + if not self._reader: + return iter(()) + return iter( + CompatibilityFiles.ChildPath(self._reader, path) + for path in self._reader.contents() + ) + + def is_file(self): + return False + + is_dir = is_file + + def joinpath(self, other): + if not self._reader: + return CompatibilityFiles.OrphanPath(other) + return CompatibilityFiles.ChildPath(self._reader, other) + + @property + def name(self): + return self._spec.name + + def open(self, mode='r', *args, **kwargs): + return _io_wrapper(self._reader.open_resource(None), mode, *args, **kwargs) + + class ChildPath(abc.Traversable): + """ + Path tied to a resource reader child. + Can be read but doesn't expose any meaningful children. + """ + + def __init__(self, reader, name): + self._reader = reader + self._name = name + + def iterdir(self): + return iter(()) + + def is_file(self): + return self._reader.is_resource(self.name) + + def is_dir(self): + return not self.is_file() + + def joinpath(self, other): + return CompatibilityFiles.OrphanPath(self.name, other) + + @property + def name(self): + return self._name + + def open(self, mode='r', *args, **kwargs): + return _io_wrapper( + self._reader.open_resource(self.name), mode, *args, **kwargs + ) + + class OrphanPath(abc.Traversable): + """ + Orphan path, not tied to a module spec or resource reader. + Can't be read and doesn't expose any meaningful children. + """ + + def __init__(self, *path_parts): + if len(path_parts) < 1: + raise ValueError('Need at least one path part to construct a path') + self._path = path_parts + + def iterdir(self): + return iter(()) + + def is_file(self): + return False + + is_dir = is_file + + def joinpath(self, other): + return CompatibilityFiles.OrphanPath(*self._path, other) + + @property + def name(self): + return self._path[-1] + + def open(self, mode='r', *args, **kwargs): + raise FileNotFoundError("Can't open orphan path") + + def __init__(self, spec): + self.spec = spec + + @property + def _reader(self): + with suppress(AttributeError): + return self.spec.loader.get_resource_reader(self.spec.name) + + def _native(self): + """ + Return the native reader if it supports files(). + """ + reader = self._reader + return reader if hasattr(reader, 'files') else self + + def __getattr__(self, attr): + return getattr(self._reader, attr) + + def files(self): + return CompatibilityFiles.SpecPath(self.spec, self._reader) + + +def wrap_spec(package): + """ + Construct a package spec with traversable compatibility + on the spec/loader/reader. + """ + return SpecLoaderAdapter(package.__spec__, TraversableResourcesLoader) diff --git a/Splunk_TA_paloalto/bin/splunk_ta_paloalto/aob_py3/importlib_resources/_common.py b/Splunk_TA_paloalto/bin/splunk_ta_paloalto/aob_py3/importlib_resources/_common.py new file mode 100644 index 00000000..3c6de1cf --- /dev/null +++ b/Splunk_TA_paloalto/bin/splunk_ta_paloalto/aob_py3/importlib_resources/_common.py @@ -0,0 +1,207 @@ +import os +import pathlib +import tempfile +import functools +import contextlib +import types +import importlib +import inspect +import warnings +import itertools + +from typing import Union, Optional, cast +from .abc import ResourceReader, Traversable + +from ._compat import wrap_spec + +Package = Union[types.ModuleType, str] +Anchor = Package + + +def package_to_anchor(func): + """ + Replace 'package' parameter as 'anchor' and warn about the change. + + Other errors should fall through. + + >>> files('a', 'b') + Traceback (most recent call last): + TypeError: files() takes from 0 to 1 positional arguments but 2 were given + """ + undefined = object() + + @functools.wraps(func) + def wrapper(anchor=undefined, package=undefined): + if package is not undefined: + if anchor is not undefined: + return func(anchor, package) + warnings.warn( + "First parameter to files is renamed to 'anchor'", + DeprecationWarning, + stacklevel=2, + ) + return func(package) + elif anchor is undefined: + return func() + return func(anchor) + + return wrapper + + +@package_to_anchor +def files(anchor: Optional[Anchor] = None) -> Traversable: + """ + Get a Traversable resource for an anchor. + """ + return from_package(resolve(anchor)) + + +def get_resource_reader(package: types.ModuleType) -> Optional[ResourceReader]: + """ + Return the package's loader if it's a ResourceReader. + """ + # We can't use + # a issubclass() check here because apparently abc.'s __subclasscheck__() + # hook wants to create a weak reference to the object, but + # zipimport.zipimporter does not support weak references, resulting in a + # TypeError. That seems terrible. + spec = package.__spec__ + reader = getattr(spec.loader, 'get_resource_reader', None) # type: ignore + if reader is None: + return None + return reader(spec.name) # type: ignore + + +@functools.singledispatch +def resolve(cand: Optional[Anchor]) -> types.ModuleType: + return cast(types.ModuleType, cand) + + +@resolve.register +def _(cand: str) -> types.ModuleType: + return importlib.import_module(cand) + + +@resolve.register +def _(cand: None) -> types.ModuleType: + return resolve(_infer_caller().f_globals['__name__']) + + +def _infer_caller(): + """ + Walk the stack and find the frame of the first caller not in this module. + """ + + def is_this_file(frame_info): + return frame_info.filename == __file__ + + def is_wrapper(frame_info): + return frame_info.function == 'wrapper' + + not_this_file = itertools.filterfalse(is_this_file, inspect.stack()) + # also exclude 'wrapper' due to singledispatch in the call stack + callers = itertools.filterfalse(is_wrapper, not_this_file) + return next(callers).frame + + +def from_package(package: types.ModuleType): + """ + Return a Traversable object for the given package. + + """ + spec = wrap_spec(package) + reader = spec.loader.get_resource_reader(spec.name) + return reader.files() + + +@contextlib.contextmanager +def _tempfile( + reader, + suffix='', + # gh-93353: Keep a reference to call os.remove() in late Python + # finalization. + *, + _os_remove=os.remove, +): + # Not using tempfile.NamedTemporaryFile as it leads to deeper 'try' + # blocks due to the need to close the temporary file to work on Windows + # properly. + fd, raw_path = tempfile.mkstemp(suffix=suffix) + try: + try: + os.write(fd, reader()) + finally: + os.close(fd) + del reader + yield pathlib.Path(raw_path) + finally: + try: + _os_remove(raw_path) + except FileNotFoundError: + pass + + +def _temp_file(path): + return _tempfile(path.read_bytes, suffix=path.name) + + +def _is_present_dir(path: Traversable) -> bool: + """ + Some Traversables implement ``is_dir()`` to raise an + exception (i.e. ``FileNotFoundError``) when the + directory doesn't exist. This function wraps that call + to always return a boolean and only return True + if there's a dir and it exists. + """ + with contextlib.suppress(FileNotFoundError): + return path.is_dir() + return False + + +@functools.singledispatch +def as_file(path): + """ + Given a Traversable object, return that object as a + path on the local file system in a context manager. + """ + return _temp_dir(path) if _is_present_dir(path) else _temp_file(path) + + +@as_file.register(pathlib.Path) +@contextlib.contextmanager +def _(path): + """ + Degenerate behavior for pathlib.Path objects. + """ + yield path + + +@contextlib.contextmanager +def _temp_path(dir: tempfile.TemporaryDirectory): + """ + Wrap tempfile.TemporyDirectory to return a pathlib object. + """ + with dir as result: + yield pathlib.Path(result) + + +@contextlib.contextmanager +def _temp_dir(path): + """ + Given a traversable dir, recursively replicate the whole tree + to the file system in a context manager. + """ + assert path.is_dir() + with _temp_path(tempfile.TemporaryDirectory()) as temp_dir: + yield _write_contents(temp_dir, path) + + +def _write_contents(target, source): + child = target.joinpath(source.name) + if source.is_dir(): + child.mkdir() + for item in source.iterdir(): + _write_contents(child, item) + else: + child.write_bytes(source.read_bytes()) + return child diff --git a/Splunk_TA_paloalto/bin/splunk_ta_paloalto/aob_py3/importlib_resources/_compat.py b/Splunk_TA_paloalto/bin/splunk_ta_paloalto/aob_py3/importlib_resources/_compat.py new file mode 100644 index 00000000..8d7ade08 --- /dev/null +++ b/Splunk_TA_paloalto/bin/splunk_ta_paloalto/aob_py3/importlib_resources/_compat.py @@ -0,0 +1,108 @@ +# flake8: noqa + +import abc +import os +import sys +import pathlib +from contextlib import suppress +from typing import Union + + +if sys.version_info >= (3, 10): + from zipfile import Path as ZipPath # type: ignore +else: + from zipp import Path as ZipPath # type: ignore + + +try: + from typing import runtime_checkable # type: ignore +except ImportError: + + def runtime_checkable(cls): # type: ignore + return cls + + +try: + from typing import Protocol # type: ignore +except ImportError: + Protocol = abc.ABC # type: ignore + + +class TraversableResourcesLoader: + """ + Adapt loaders to provide TraversableResources and other + compatibility. + + Used primarily for Python 3.9 and earlier where the native + loaders do not yet implement TraversableResources. + """ + + def __init__(self, spec): + self.spec = spec + + @property + def path(self): + return self.spec.origin + + def get_resource_reader(self, name): + from . import readers, _adapters + + def _zip_reader(spec): + with suppress(AttributeError): + return readers.ZipReader(spec.loader, spec.name) + + def _namespace_reader(spec): + with suppress(AttributeError, ValueError): + return readers.NamespaceReader(spec.submodule_search_locations) + + def _available_reader(spec): + with suppress(AttributeError): + return spec.loader.get_resource_reader(spec.name) + + def _native_reader(spec): + reader = _available_reader(spec) + return reader if hasattr(reader, 'files') else None + + def _file_reader(spec): + try: + path = pathlib.Path(self.path) + except TypeError: + return None + if path.exists(): + return readers.FileReader(self) + + return ( + # native reader if it supplies 'files' + _native_reader(self.spec) + or + # local ZipReader if a zip module + _zip_reader(self.spec) + or + # local NamespaceReader if a namespace module + _namespace_reader(self.spec) + or + # local FileReader + _file_reader(self.spec) + # fallback - adapt the spec ResourceReader to TraversableReader + or _adapters.CompatibilityFiles(self.spec) + ) + + +def wrap_spec(package): + """ + Construct a package spec with traversable compatibility + on the spec/loader/reader. + + Supersedes _adapters.wrap_spec to use TraversableResourcesLoader + from above for older Python compatibility (<3.10). + """ + from . import _adapters + + return _adapters.SpecLoaderAdapter(package.__spec__, TraversableResourcesLoader) + + +if sys.version_info >= (3, 9): + StrPath = Union[str, os.PathLike[str]] +else: + # PathLike is only subscriptable at runtime in 3.9+ + StrPath = Union[str, "os.PathLike[str]"] diff --git a/Splunk_TA_paloalto/bin/splunk_ta_paloalto/aob_py3/importlib_resources/_itertools.py b/Splunk_TA_paloalto/bin/splunk_ta_paloalto/aob_py3/importlib_resources/_itertools.py new file mode 100644 index 00000000..cce05582 --- /dev/null +++ b/Splunk_TA_paloalto/bin/splunk_ta_paloalto/aob_py3/importlib_resources/_itertools.py @@ -0,0 +1,35 @@ +from itertools import filterfalse + +from typing import ( + Callable, + Iterable, + Iterator, + Optional, + Set, + TypeVar, + Union, +) + +# Type and type variable definitions +_T = TypeVar('_T') +_U = TypeVar('_U') + + +def unique_everseen( + iterable: Iterable[_T], key: Optional[Callable[[_T], _U]] = None +) -> Iterator[_T]: + "List unique elements, preserving order. Remember all elements ever seen." + # unique_everseen('AAAABBBCCDAABBB') --> A B C D + # unique_everseen('ABBCcAD', str.lower) --> A B C D + seen: Set[Union[_T, _U]] = set() + seen_add = seen.add + if key is None: + for element in filterfalse(seen.__contains__, iterable): + seen_add(element) + yield element + else: + for element in iterable: + k = key(element) + if k not in seen: + seen_add(k) + yield element diff --git a/Splunk_TA_paloalto/bin/splunk_ta_paloalto/aob_py3/importlib_resources/_legacy.py b/Splunk_TA_paloalto/bin/splunk_ta_paloalto/aob_py3/importlib_resources/_legacy.py new file mode 100644 index 00000000..b1ea8105 --- /dev/null +++ b/Splunk_TA_paloalto/bin/splunk_ta_paloalto/aob_py3/importlib_resources/_legacy.py @@ -0,0 +1,120 @@ +import functools +import os +import pathlib +import types +import warnings + +from typing import Union, Iterable, ContextManager, BinaryIO, TextIO, Any + +from . import _common + +Package = Union[types.ModuleType, str] +Resource = str + + +def deprecated(func): + @functools.wraps(func) + def wrapper(*args, **kwargs): + warnings.warn( + f"{func.__name__} is deprecated. Use files() instead. " + "Refer to https://importlib-resources.readthedocs.io" + "/en/latest/using.html#migrating-from-legacy for migration advice.", + DeprecationWarning, + stacklevel=2, + ) + return func(*args, **kwargs) + + return wrapper + + +def normalize_path(path: Any) -> str: + """Normalize a path by ensuring it is a string. + + If the resulting string contains path separators, an exception is raised. + """ + str_path = str(path) + parent, file_name = os.path.split(str_path) + if parent: + raise ValueError(f'{path!r} must be only a file name') + return file_name + + +@deprecated +def open_binary(package: Package, resource: Resource) -> BinaryIO: + """Return a file-like object opened for binary reading of the resource.""" + return (_common.files(package) / normalize_path(resource)).open('rb') + + +@deprecated +def read_binary(package: Package, resource: Resource) -> bytes: + """Return the binary contents of the resource.""" + return (_common.files(package) / normalize_path(resource)).read_bytes() + + +@deprecated +def open_text( + package: Package, + resource: Resource, + encoding: str = 'utf-8', + errors: str = 'strict', +) -> TextIO: + """Return a file-like object opened for text reading of the resource.""" + return (_common.files(package) / normalize_path(resource)).open( + 'r', encoding=encoding, errors=errors + ) + + +@deprecated +def read_text( + package: Package, + resource: Resource, + encoding: str = 'utf-8', + errors: str = 'strict', +) -> str: + """Return the decoded string of the resource. + + The decoding-related arguments have the same semantics as those of + bytes.decode(). + """ + with open_text(package, resource, encoding, errors) as fp: + return fp.read() + + +@deprecated +def contents(package: Package) -> Iterable[str]: + """Return an iterable of entries in `package`. + + Note that not all entries are resources. Specifically, directories are + not considered resources. Use `is_resource()` on each entry returned here + to check if it is a resource or not. + """ + return [path.name for path in _common.files(package).iterdir()] + + +@deprecated +def is_resource(package: Package, name: str) -> bool: + """True if `name` is a resource inside `package`. + + Directories are *not* resources. + """ + resource = normalize_path(name) + return any( + traversable.name == resource and traversable.is_file() + for traversable in _common.files(package).iterdir() + ) + + +@deprecated +def path( + package: Package, + resource: Resource, +) -> ContextManager[pathlib.Path]: + """A context manager providing a file path object to the resource. + + If the resource does not already exist on its own on the file system, + a temporary file will be created. If the file was created, the file + will be deleted upon exiting the context manager (no exception is + raised if the file was deleted prior to the context manager + exiting). + """ + return _common.as_file(_common.files(package) / normalize_path(resource)) diff --git a/Splunk_TA_paloalto/bin/splunk_ta_paloalto/aob_py3/importlib_resources/abc.py b/Splunk_TA_paloalto/bin/splunk_ta_paloalto/aob_py3/importlib_resources/abc.py new file mode 100644 index 00000000..23b6aeaf --- /dev/null +++ b/Splunk_TA_paloalto/bin/splunk_ta_paloalto/aob_py3/importlib_resources/abc.py @@ -0,0 +1,170 @@ +import abc +import io +import itertools +import pathlib +from typing import Any, BinaryIO, Iterable, Iterator, NoReturn, Text, Optional + +from ._compat import runtime_checkable, Protocol, StrPath + + +__all__ = ["ResourceReader", "Traversable", "TraversableResources"] + + +class ResourceReader(metaclass=abc.ABCMeta): + """Abstract base class for loaders to provide resource reading support.""" + + @abc.abstractmethod + def open_resource(self, resource: Text) -> BinaryIO: + """Return an opened, file-like object for binary reading. + + The 'resource' argument is expected to represent only a file name. + If the resource cannot be found, FileNotFoundError is raised. + """ + # This deliberately raises FileNotFoundError instead of + # NotImplementedError so that if this method is accidentally called, + # it'll still do the right thing. + raise FileNotFoundError + + @abc.abstractmethod + def resource_path(self, resource: Text) -> Text: + """Return the file system path to the specified resource. + + The 'resource' argument is expected to represent only a file name. + If the resource does not exist on the file system, raise + FileNotFoundError. + """ + # This deliberately raises FileNotFoundError instead of + # NotImplementedError so that if this method is accidentally called, + # it'll still do the right thing. + raise FileNotFoundError + + @abc.abstractmethod + def is_resource(self, path: Text) -> bool: + """Return True if the named 'path' is a resource. + + Files are resources, directories are not. + """ + raise FileNotFoundError + + @abc.abstractmethod + def contents(self) -> Iterable[str]: + """Return an iterable of entries in `package`.""" + raise FileNotFoundError + + +class TraversalError(Exception): + pass + + +@runtime_checkable +class Traversable(Protocol): + """ + An object with a subset of pathlib.Path methods suitable for + traversing directories and opening files. + + Any exceptions that occur when accessing the backing resource + may propagate unaltered. + """ + + @abc.abstractmethod + def iterdir(self) -> Iterator["Traversable"]: + """ + Yield Traversable objects in self + """ + + def read_bytes(self) -> bytes: + """ + Read contents of self as bytes + """ + with self.open('rb') as strm: + return strm.read() + + def read_text(self, encoding: Optional[str] = None) -> str: + """ + Read contents of self as text + """ + with self.open(encoding=encoding) as strm: + return strm.read() + + @abc.abstractmethod + def is_dir(self) -> bool: + """ + Return True if self is a directory + """ + + @abc.abstractmethod + def is_file(self) -> bool: + """ + Return True if self is a file + """ + + def joinpath(self, *descendants: StrPath) -> "Traversable": + """ + Return Traversable resolved with any descendants applied. + + Each descendant should be a path segment relative to self + and each may contain multiple levels separated by + ``posixpath.sep`` (``/``). + """ + if not descendants: + return self + names = itertools.chain.from_iterable( + path.parts for path in map(pathlib.PurePosixPath, descendants) + ) + target = next(names) + matches = ( + traversable for traversable in self.iterdir() if traversable.name == target + ) + try: + match = next(matches) + except StopIteration: + raise TraversalError( + "Target not found during traversal.", target, list(names) + ) + return match.joinpath(*names) + + def __truediv__(self, child: StrPath) -> "Traversable": + """ + Return Traversable child in self + """ + return self.joinpath(child) + + @abc.abstractmethod + def open(self, mode='r', *args, **kwargs): + """ + mode may be 'r' or 'rb' to open as text or binary. Return a handle + suitable for reading (same as pathlib.Path.open). + + When opening as text, accepts encoding parameters such as those + accepted by io.TextIOWrapper. + """ + + @property + @abc.abstractmethod + def name(self) -> str: + """ + The base name of this object without any parent references. + """ + + +class TraversableResources(ResourceReader): + """ + The required interface for providing traversable + resources. + """ + + @abc.abstractmethod + def files(self) -> "Traversable": + """Return a Traversable object for the loaded package.""" + + def open_resource(self, resource: StrPath) -> io.BufferedReader: + return self.files().joinpath(resource).open('rb') + + def resource_path(self, resource: Any) -> NoReturn: + raise FileNotFoundError(resource) + + def is_resource(self, path: StrPath) -> bool: + return self.files().joinpath(path).is_file() + + def contents(self) -> Iterator[str]: + return (item.name for item in self.files().iterdir()) diff --git a/Splunk_TA_paloalto/bin/splunk_ta_paloalto/aob_py3/importlib_resources/py.typed b/Splunk_TA_paloalto/bin/splunk_ta_paloalto/aob_py3/importlib_resources/py.typed new file mode 100644 index 00000000..e69de29b diff --git a/Splunk_TA_paloalto/bin/splunk_ta_paloalto/aob_py3/importlib_resources/readers.py b/Splunk_TA_paloalto/bin/splunk_ta_paloalto/aob_py3/importlib_resources/readers.py new file mode 100644 index 00000000..ab34db74 --- /dev/null +++ b/Splunk_TA_paloalto/bin/splunk_ta_paloalto/aob_py3/importlib_resources/readers.py @@ -0,0 +1,120 @@ +import collections +import pathlib +import operator + +from . import abc + +from ._itertools import unique_everseen +from ._compat import ZipPath + + +def remove_duplicates(items): + return iter(collections.OrderedDict.fromkeys(items)) + + +class FileReader(abc.TraversableResources): + def __init__(self, loader): + self.path = pathlib.Path(loader.path).parent + + def resource_path(self, resource): + """ + Return the file system path to prevent + `resources.path()` from creating a temporary + copy. + """ + return str(self.path.joinpath(resource)) + + def files(self): + return self.path + + +class ZipReader(abc.TraversableResources): + def __init__(self, loader, module): + _, _, name = module.rpartition('.') + self.prefix = loader.prefix.replace('\\', '/') + name + '/' + self.archive = loader.archive + + def open_resource(self, resource): + try: + return super().open_resource(resource) + except KeyError as exc: + raise FileNotFoundError(exc.args[0]) + + def is_resource(self, path): + # workaround for `zipfile.Path.is_file` returning true + # for non-existent paths. + target = self.files().joinpath(path) + return target.is_file() and target.exists() + + def files(self): + return ZipPath(self.archive, self.prefix) + + +class MultiplexedPath(abc.Traversable): + """ + Given a series of Traversable objects, implement a merged + version of the interface across all objects. Useful for + namespace packages which may be multihomed at a single + name. + """ + + def __init__(self, *paths): + self._paths = list(map(pathlib.Path, remove_duplicates(paths))) + if not self._paths: + message = 'MultiplexedPath must contain at least one path' + raise FileNotFoundError(message) + if not all(path.is_dir() for path in self._paths): + raise NotADirectoryError('MultiplexedPath only supports directories') + + def iterdir(self): + files = (file for path in self._paths for file in path.iterdir()) + return unique_everseen(files, key=operator.attrgetter('name')) + + def read_bytes(self): + raise FileNotFoundError(f'{self} is not a file') + + def read_text(self, *args, **kwargs): + raise FileNotFoundError(f'{self} is not a file') + + def is_dir(self): + return True + + def is_file(self): + return False + + def joinpath(self, *descendants): + try: + return super().joinpath(*descendants) + except abc.TraversalError: + # One of the paths did not resolve (a directory does not exist). + # Just return something that will not exist. + return self._paths[0].joinpath(*descendants) + + def open(self, *args, **kwargs): + raise FileNotFoundError(f'{self} is not a file') + + @property + def name(self): + return self._paths[0].name + + def __repr__(self): + paths = ', '.join(f"'{path}'" for path in self._paths) + return f'MultiplexedPath({paths})' + + +class NamespaceReader(abc.TraversableResources): + def __init__(self, namespace_path): + if 'NamespacePath' not in str(namespace_path): + raise ValueError('Invalid path') + self.path = MultiplexedPath(*list(namespace_path)) + + def resource_path(self, resource): + """ + Return the file system path to prevent + `resources.path()` from creating a temporary + copy. + """ + return str(self.path.joinpath(resource)) + + def files(self): + return self.path diff --git a/Splunk_TA_paloalto/bin/splunk_ta_paloalto/aob_py3/importlib_resources/simple.py b/Splunk_TA_paloalto/bin/splunk_ta_paloalto/aob_py3/importlib_resources/simple.py new file mode 100644 index 00000000..7770c922 --- /dev/null +++ b/Splunk_TA_paloalto/bin/splunk_ta_paloalto/aob_py3/importlib_resources/simple.py @@ -0,0 +1,106 @@ +""" +Interface adapters for low-level readers. +""" + +import abc +import io +import itertools +from typing import BinaryIO, List + +from .abc import Traversable, TraversableResources + + +class SimpleReader(abc.ABC): + """ + The minimum, low-level interface required from a resource + provider. + """ + + @property + @abc.abstractmethod + def package(self) -> str: + """ + The name of the package for which this reader loads resources. + """ + + @abc.abstractmethod + def children(self) -> List['SimpleReader']: + """ + Obtain an iterable of SimpleReader for available + child containers (e.g. directories). + """ + + @abc.abstractmethod + def resources(self) -> List[str]: + """ + Obtain available named resources for this virtual package. + """ + + @abc.abstractmethod + def open_binary(self, resource: str) -> BinaryIO: + """ + Obtain a File-like for a named resource. + """ + + @property + def name(self): + return self.package.split('.')[-1] + + +class ResourceContainer(Traversable): + """ + Traversable container for a package's resources via its reader. + """ + + def __init__(self, reader: SimpleReader): + self.reader = reader + + def is_dir(self): + return True + + def is_file(self): + return False + + def iterdir(self): + files = (ResourceHandle(self, name) for name in self.reader.resources) + dirs = map(ResourceContainer, self.reader.children()) + return itertools.chain(files, dirs) + + def open(self, *args, **kwargs): + raise IsADirectoryError() + + +class ResourceHandle(Traversable): + """ + Handle to a named resource in a ResourceReader. + """ + + def __init__(self, parent: ResourceContainer, name: str): + self.parent = parent + self.name = name # type: ignore + + def is_file(self): + return True + + def is_dir(self): + return False + + def open(self, mode='r', *args, **kwargs): + stream = self.parent.reader.open_binary(self.name) + if 'b' not in mode: + stream = io.TextIOWrapper(*args, **kwargs) + return stream + + def joinpath(self, name): + raise RuntimeError("Cannot traverse into a resource") + + +class TraversableReader(TraversableResources, SimpleReader): + """ + A TraversableResources based on SimpleReader. Resource providers + may derive from this class to provide the TraversableResources + interface by supplying the SimpleReader interface. + """ + + def files(self): + return ResourceContainer(self) diff --git a/Splunk_TA_paloalto/bin/splunk_ta_paloalto/aob_py3/importlib_resources/tests/__init__.py b/Splunk_TA_paloalto/bin/splunk_ta_paloalto/aob_py3/importlib_resources/tests/__init__.py new file mode 100644 index 00000000..e69de29b diff --git a/Splunk_TA_paloalto/bin/splunk_ta_paloalto/aob_py3/importlib_resources/tests/_compat.py b/Splunk_TA_paloalto/bin/splunk_ta_paloalto/aob_py3/importlib_resources/tests/_compat.py new file mode 100644 index 00000000..e7bf06dd --- /dev/null +++ b/Splunk_TA_paloalto/bin/splunk_ta_paloalto/aob_py3/importlib_resources/tests/_compat.py @@ -0,0 +1,32 @@ +import os + + +try: + from test.support import import_helper # type: ignore +except ImportError: + # Python 3.9 and earlier + class import_helper: # type: ignore + from test.support import ( + modules_setup, + modules_cleanup, + DirsOnSysPath, + CleanImport, + ) + + +try: + from test.support import os_helper # type: ignore +except ImportError: + # Python 3.9 compat + class os_helper: # type:ignore + from test.support import temp_dir + + +try: + # Python 3.10 + from test.support.os_helper import unlink +except ImportError: + from test.support import unlink as _unlink + + def unlink(target): + return _unlink(os.fspath(target)) diff --git a/Splunk_TA_paloalto/bin/splunk_ta_paloalto/aob_py3/importlib_resources/tests/_path.py b/Splunk_TA_paloalto/bin/splunk_ta_paloalto/aob_py3/importlib_resources/tests/_path.py new file mode 100644 index 00000000..c630e4d3 --- /dev/null +++ b/Splunk_TA_paloalto/bin/splunk_ta_paloalto/aob_py3/importlib_resources/tests/_path.py @@ -0,0 +1,50 @@ +import pathlib +import functools + + +#### +# from jaraco.path 3.4 + + +def build(spec, prefix=pathlib.Path()): + """ + Build a set of files/directories, as described by the spec. + + Each key represents a pathname, and the value represents + the content. Content may be a nested directory. + + >>> spec = { + ... 'README.txt': "A README file", + ... "foo": { + ... "__init__.py": "", + ... "bar": { + ... "__init__.py": "", + ... }, + ... "baz.py": "# Some code", + ... } + ... } + >>> tmpdir = getfixture('tmpdir') + >>> build(spec, tmpdir) + """ + for name, contents in spec.items(): + create(contents, pathlib.Path(prefix) / name) + + +@functools.singledispatch +def create(content, path): + path.mkdir(exist_ok=True) + build(content, prefix=path) # type: ignore + + +@create.register +def _(content: bytes, path): + path.write_bytes(content) + + +@create.register +def _(content: str, path): + path.write_text(content) + + +# end from jaraco.path +#### diff --git a/Splunk_TA_paloalto/bin/splunk_ta_paloalto/aob_py3/importlib_resources/tests/data01/__init__.py b/Splunk_TA_paloalto/bin/splunk_ta_paloalto/aob_py3/importlib_resources/tests/data01/__init__.py new file mode 100644 index 00000000..e69de29b diff --git a/Splunk_TA_paloalto/bin/splunk_ta_paloalto/aob_py3/importlib_resources/tests/data01/binary.file b/Splunk_TA_paloalto/bin/splunk_ta_paloalto/aob_py3/importlib_resources/tests/data01/binary.file new file mode 100644 index 00000000..eaf36c1d Binary files /dev/null and b/Splunk_TA_paloalto/bin/splunk_ta_paloalto/aob_py3/importlib_resources/tests/data01/binary.file differ diff --git a/Splunk_TA_paloalto/bin/splunk_ta_paloalto/aob_py3/importlib_resources/tests/data01/subdirectory/__init__.py b/Splunk_TA_paloalto/bin/splunk_ta_paloalto/aob_py3/importlib_resources/tests/data01/subdirectory/__init__.py new file mode 100644 index 00000000..e69de29b diff --git a/Splunk_TA_paloalto/bin/splunk_ta_paloalto/aob_py3/importlib_resources/tests/data01/subdirectory/binary.file b/Splunk_TA_paloalto/bin/splunk_ta_paloalto/aob_py3/importlib_resources/tests/data01/subdirectory/binary.file new file mode 100644 index 00000000..eaf36c1d Binary files /dev/null and b/Splunk_TA_paloalto/bin/splunk_ta_paloalto/aob_py3/importlib_resources/tests/data01/subdirectory/binary.file differ diff --git a/Splunk_TA_paloalto/bin/splunk_ta_paloalto/aob_py3/importlib_resources/tests/data01/utf-16.file b/Splunk_TA_paloalto/bin/splunk_ta_paloalto/aob_py3/importlib_resources/tests/data01/utf-16.file new file mode 100644 index 00000000..2cb77229 Binary files /dev/null and b/Splunk_TA_paloalto/bin/splunk_ta_paloalto/aob_py3/importlib_resources/tests/data01/utf-16.file differ diff --git a/Splunk_TA_paloalto/bin/splunk_ta_paloalto/aob_py3/importlib_resources/tests/data01/utf-8.file b/Splunk_TA_paloalto/bin/splunk_ta_paloalto/aob_py3/importlib_resources/tests/data01/utf-8.file new file mode 100644 index 00000000..1c0132ad --- /dev/null +++ b/Splunk_TA_paloalto/bin/splunk_ta_paloalto/aob_py3/importlib_resources/tests/data01/utf-8.file @@ -0,0 +1 @@ +Hello, UTF-8 world! diff --git a/Splunk_TA_paloalto/bin/splunk_ta_paloalto/aob_py3/importlib_resources/tests/data02/__init__.py b/Splunk_TA_paloalto/bin/splunk_ta_paloalto/aob_py3/importlib_resources/tests/data02/__init__.py new file mode 100644 index 00000000..e69de29b diff --git a/Splunk_TA_paloalto/bin/splunk_ta_paloalto/aob_py3/importlib_resources/tests/data02/one/__init__.py b/Splunk_TA_paloalto/bin/splunk_ta_paloalto/aob_py3/importlib_resources/tests/data02/one/__init__.py new file mode 100644 index 00000000..e69de29b diff --git a/Splunk_TA_paloalto/bin/splunk_ta_paloalto/aob_py3/importlib_resources/tests/data02/one/resource1.txt b/Splunk_TA_paloalto/bin/splunk_ta_paloalto/aob_py3/importlib_resources/tests/data02/one/resource1.txt new file mode 100644 index 00000000..61a813e4 --- /dev/null +++ b/Splunk_TA_paloalto/bin/splunk_ta_paloalto/aob_py3/importlib_resources/tests/data02/one/resource1.txt @@ -0,0 +1 @@ +one resource diff --git a/Splunk_TA_paloalto/bin/splunk_ta_paloalto/aob_py3/importlib_resources/tests/data02/two/__init__.py b/Splunk_TA_paloalto/bin/splunk_ta_paloalto/aob_py3/importlib_resources/tests/data02/two/__init__.py new file mode 100644 index 00000000..e69de29b diff --git a/Splunk_TA_paloalto/bin/splunk_ta_paloalto/aob_py3/importlib_resources/tests/data02/two/resource2.txt b/Splunk_TA_paloalto/bin/splunk_ta_paloalto/aob_py3/importlib_resources/tests/data02/two/resource2.txt new file mode 100644 index 00000000..a80ce46e --- /dev/null +++ b/Splunk_TA_paloalto/bin/splunk_ta_paloalto/aob_py3/importlib_resources/tests/data02/two/resource2.txt @@ -0,0 +1 @@ +two resource diff --git a/Splunk_TA_paloalto/bin/splunk_ta_paloalto/aob_py3/importlib_resources/tests/namespacedata01/binary.file b/Splunk_TA_paloalto/bin/splunk_ta_paloalto/aob_py3/importlib_resources/tests/namespacedata01/binary.file new file mode 100644 index 00000000..eaf36c1d Binary files /dev/null and b/Splunk_TA_paloalto/bin/splunk_ta_paloalto/aob_py3/importlib_resources/tests/namespacedata01/binary.file differ diff --git a/Splunk_TA_paloalto/bin/splunk_ta_paloalto/aob_py3/importlib_resources/tests/namespacedata01/utf-16.file b/Splunk_TA_paloalto/bin/splunk_ta_paloalto/aob_py3/importlib_resources/tests/namespacedata01/utf-16.file new file mode 100644 index 00000000..2cb77229 Binary files /dev/null and b/Splunk_TA_paloalto/bin/splunk_ta_paloalto/aob_py3/importlib_resources/tests/namespacedata01/utf-16.file differ diff --git a/Splunk_TA_paloalto/bin/splunk_ta_paloalto/aob_py3/importlib_resources/tests/namespacedata01/utf-8.file b/Splunk_TA_paloalto/bin/splunk_ta_paloalto/aob_py3/importlib_resources/tests/namespacedata01/utf-8.file new file mode 100644 index 00000000..1c0132ad --- /dev/null +++ b/Splunk_TA_paloalto/bin/splunk_ta_paloalto/aob_py3/importlib_resources/tests/namespacedata01/utf-8.file @@ -0,0 +1 @@ +Hello, UTF-8 world! diff --git a/Splunk_TA_paloalto/bin/splunk_ta_paloalto/aob_py3/importlib_resources/tests/test_compatibilty_files.py b/Splunk_TA_paloalto/bin/splunk_ta_paloalto/aob_py3/importlib_resources/tests/test_compatibilty_files.py new file mode 100644 index 00000000..d92c7c56 --- /dev/null +++ b/Splunk_TA_paloalto/bin/splunk_ta_paloalto/aob_py3/importlib_resources/tests/test_compatibilty_files.py @@ -0,0 +1,102 @@ +import io +import unittest + +import importlib_resources as resources + +from importlib_resources._adapters import ( + CompatibilityFiles, + wrap_spec, +) + +from . import util + + +class CompatibilityFilesTests(unittest.TestCase): + @property + def package(self): + bytes_data = io.BytesIO(b'Hello, world!') + return util.create_package( + file=bytes_data, + path='some_path', + contents=('a', 'b', 'c'), + ) + + @property + def files(self): + return resources.files(self.package) + + def test_spec_path_iter(self): + self.assertEqual( + sorted(path.name for path in self.files.iterdir()), + ['a', 'b', 'c'], + ) + + def test_child_path_iter(self): + self.assertEqual(list((self.files / 'a').iterdir()), []) + + def test_orphan_path_iter(self): + self.assertEqual(list((self.files / 'a' / 'a').iterdir()), []) + self.assertEqual(list((self.files / 'a' / 'a' / 'a').iterdir()), []) + + def test_spec_path_is(self): + self.assertFalse(self.files.is_file()) + self.assertFalse(self.files.is_dir()) + + def test_child_path_is(self): + self.assertTrue((self.files / 'a').is_file()) + self.assertFalse((self.files / 'a').is_dir()) + + def test_orphan_path_is(self): + self.assertFalse((self.files / 'a' / 'a').is_file()) + self.assertFalse((self.files / 'a' / 'a').is_dir()) + self.assertFalse((self.files / 'a' / 'a' / 'a').is_file()) + self.assertFalse((self.files / 'a' / 'a' / 'a').is_dir()) + + def test_spec_path_name(self): + self.assertEqual(self.files.name, 'testingpackage') + + def test_child_path_name(self): + self.assertEqual((self.files / 'a').name, 'a') + + def test_orphan_path_name(self): + self.assertEqual((self.files / 'a' / 'b').name, 'b') + self.assertEqual((self.files / 'a' / 'b' / 'c').name, 'c') + + def test_spec_path_open(self): + self.assertEqual(self.files.read_bytes(), b'Hello, world!') + self.assertEqual(self.files.read_text(), 'Hello, world!') + + def test_child_path_open(self): + self.assertEqual((self.files / 'a').read_bytes(), b'Hello, world!') + self.assertEqual((self.files / 'a').read_text(), 'Hello, world!') + + def test_orphan_path_open(self): + with self.assertRaises(FileNotFoundError): + (self.files / 'a' / 'b').read_bytes() + with self.assertRaises(FileNotFoundError): + (self.files / 'a' / 'b' / 'c').read_bytes() + + def test_open_invalid_mode(self): + with self.assertRaises(ValueError): + self.files.open('0') + + def test_orphan_path_invalid(self): + with self.assertRaises(ValueError): + CompatibilityFiles.OrphanPath() + + def test_wrap_spec(self): + spec = wrap_spec(self.package) + self.assertIsInstance(spec.loader.get_resource_reader(None), CompatibilityFiles) + + +class CompatibilityFilesNoReaderTests(unittest.TestCase): + @property + def package(self): + return util.create_package_from_loader(None) + + @property + def files(self): + return resources.files(self.package) + + def test_spec_path_joinpath(self): + self.assertIsInstance(self.files / 'a', CompatibilityFiles.OrphanPath) diff --git a/Splunk_TA_paloalto/bin/splunk_ta_paloalto/aob_py3/importlib_resources/tests/test_contents.py b/Splunk_TA_paloalto/bin/splunk_ta_paloalto/aob_py3/importlib_resources/tests/test_contents.py new file mode 100644 index 00000000..525568e8 --- /dev/null +++ b/Splunk_TA_paloalto/bin/splunk_ta_paloalto/aob_py3/importlib_resources/tests/test_contents.py @@ -0,0 +1,43 @@ +import unittest +import importlib_resources as resources + +from . import data01 +from . import util + + +class ContentsTests: + expected = { + '__init__.py', + 'binary.file', + 'subdirectory', + 'utf-16.file', + 'utf-8.file', + } + + def test_contents(self): + contents = {path.name for path in resources.files(self.data).iterdir()} + assert self.expected <= contents + + +class ContentsDiskTests(ContentsTests, unittest.TestCase): + def setUp(self): + self.data = data01 + + +class ContentsZipTests(ContentsTests, util.ZipSetup, unittest.TestCase): + pass + + +class ContentsNamespaceTests(ContentsTests, unittest.TestCase): + expected = { + # no __init__ because of namespace design + # no subdirectory as incidental difference in fixture + 'binary.file', + 'utf-16.file', + 'utf-8.file', + } + + def setUp(self): + from . import namespacedata01 + + self.data = namespacedata01 diff --git a/Splunk_TA_paloalto/bin/splunk_ta_paloalto/aob_py3/importlib_resources/tests/test_files.py b/Splunk_TA_paloalto/bin/splunk_ta_paloalto/aob_py3/importlib_resources/tests/test_files.py new file mode 100644 index 00000000..d258fb5f --- /dev/null +++ b/Splunk_TA_paloalto/bin/splunk_ta_paloalto/aob_py3/importlib_resources/tests/test_files.py @@ -0,0 +1,112 @@ +import typing +import textwrap +import unittest +import warnings +import importlib +import contextlib + +import importlib_resources as resources +from ..abc import Traversable +from . import data01 +from . import util +from . import _path +from ._compat import os_helper, import_helper + + +@contextlib.contextmanager +def suppress_known_deprecation(): + with warnings.catch_warnings(record=True) as ctx: + warnings.simplefilter('default', category=DeprecationWarning) + yield ctx + + +class FilesTests: + def test_read_bytes(self): + files = resources.files(self.data) + actual = files.joinpath('utf-8.file').read_bytes() + assert actual == b'Hello, UTF-8 world!\n' + + def test_read_text(self): + files = resources.files(self.data) + actual = files.joinpath('utf-8.file').read_text(encoding='utf-8') + assert actual == 'Hello, UTF-8 world!\n' + + @unittest.skipUnless( + hasattr(typing, 'runtime_checkable'), + "Only suitable when typing supports runtime_checkable", + ) + def test_traversable(self): + assert isinstance(resources.files(self.data), Traversable) + + def test_old_parameter(self): + """ + Files used to take a 'package' parameter. Make sure anyone + passing by name is still supported. + """ + with suppress_known_deprecation(): + resources.files(package=self.data) + + +class OpenDiskTests(FilesTests, unittest.TestCase): + def setUp(self): + self.data = data01 + + +class OpenZipTests(FilesTests, util.ZipSetup, unittest.TestCase): + pass + + +class OpenNamespaceTests(FilesTests, unittest.TestCase): + def setUp(self): + from . import namespacedata01 + + self.data = namespacedata01 + + +class SiteDir: + def setUp(self): + self.fixtures = contextlib.ExitStack() + self.addCleanup(self.fixtures.close) + self.site_dir = self.fixtures.enter_context(os_helper.temp_dir()) + self.fixtures.enter_context(import_helper.DirsOnSysPath(self.site_dir)) + self.fixtures.enter_context(import_helper.CleanImport()) + + +class ModulesFilesTests(SiteDir, unittest.TestCase): + def test_module_resources(self): + """ + A module can have resources found adjacent to the module. + """ + spec = { + 'mod.py': '', + 'res.txt': 'resources are the best', + } + _path.build(spec, self.site_dir) + import mod + + actual = resources.files(mod).joinpath('res.txt').read_text() + assert actual == spec['res.txt'] + + +class ImplicitContextFilesTests(SiteDir, unittest.TestCase): + def test_implicit_files(self): + """ + Without any parameter, files() will infer the location as the caller. + """ + spec = { + 'somepkg': { + '__init__.py': textwrap.dedent( + """ + import importlib_resources as res + val = res.files().joinpath('res.txt').read_text() + """ + ), + 'res.txt': 'resources are the best', + }, + } + _path.build(spec, self.site_dir) + assert importlib.import_module('somepkg').val == 'resources are the best' + + +if __name__ == '__main__': + unittest.main() diff --git a/Splunk_TA_paloalto/bin/splunk_ta_paloalto/aob_py3/importlib_resources/tests/test_open.py b/Splunk_TA_paloalto/bin/splunk_ta_paloalto/aob_py3/importlib_resources/tests/test_open.py new file mode 100644 index 00000000..87b42c3d --- /dev/null +++ b/Splunk_TA_paloalto/bin/splunk_ta_paloalto/aob_py3/importlib_resources/tests/test_open.py @@ -0,0 +1,81 @@ +import unittest + +import importlib_resources as resources +from . import data01 +from . import util + + +class CommonBinaryTests(util.CommonTests, unittest.TestCase): + def execute(self, package, path): + target = resources.files(package).joinpath(path) + with target.open('rb'): + pass + + +class CommonTextTests(util.CommonTests, unittest.TestCase): + def execute(self, package, path): + target = resources.files(package).joinpath(path) + with target.open(): + pass + + +class OpenTests: + def test_open_binary(self): + target = resources.files(self.data) / 'binary.file' + with target.open('rb') as fp: + result = fp.read() + self.assertEqual(result, b'\x00\x01\x02\x03') + + def test_open_text_default_encoding(self): + target = resources.files(self.data) / 'utf-8.file' + with target.open() as fp: + result = fp.read() + self.assertEqual(result, 'Hello, UTF-8 world!\n') + + def test_open_text_given_encoding(self): + target = resources.files(self.data) / 'utf-16.file' + with target.open(encoding='utf-16', errors='strict') as fp: + result = fp.read() + self.assertEqual(result, 'Hello, UTF-16 world!\n') + + def test_open_text_with_errors(self): + # Raises UnicodeError without the 'errors' argument. + target = resources.files(self.data) / 'utf-16.file' + with target.open(encoding='utf-8', errors='strict') as fp: + self.assertRaises(UnicodeError, fp.read) + with target.open(encoding='utf-8', errors='ignore') as fp: + result = fp.read() + self.assertEqual( + result, + 'H\x00e\x00l\x00l\x00o\x00,\x00 ' + '\x00U\x00T\x00F\x00-\x001\x006\x00 ' + '\x00w\x00o\x00r\x00l\x00d\x00!\x00\n\x00', + ) + + def test_open_binary_FileNotFoundError(self): + target = resources.files(self.data) / 'does-not-exist' + self.assertRaises(FileNotFoundError, target.open, 'rb') + + def test_open_text_FileNotFoundError(self): + target = resources.files(self.data) / 'does-not-exist' + self.assertRaises(FileNotFoundError, target.open) + + +class OpenDiskTests(OpenTests, unittest.TestCase): + def setUp(self): + self.data = data01 + + +class OpenDiskNamespaceTests(OpenTests, unittest.TestCase): + def setUp(self): + from . import namespacedata01 + + self.data = namespacedata01 + + +class OpenZipTests(OpenTests, util.ZipSetup, unittest.TestCase): + pass + + +if __name__ == '__main__': + unittest.main() diff --git a/Splunk_TA_paloalto/bin/splunk_ta_paloalto/aob_py3/importlib_resources/tests/test_path.py b/Splunk_TA_paloalto/bin/splunk_ta_paloalto/aob_py3/importlib_resources/tests/test_path.py new file mode 100644 index 00000000..4f4d3943 --- /dev/null +++ b/Splunk_TA_paloalto/bin/splunk_ta_paloalto/aob_py3/importlib_resources/tests/test_path.py @@ -0,0 +1,64 @@ +import io +import unittest + +import importlib_resources as resources +from . import data01 +from . import util + + +class CommonTests(util.CommonTests, unittest.TestCase): + def execute(self, package, path): + with resources.as_file(resources.files(package).joinpath(path)): + pass + + +class PathTests: + def test_reading(self): + # Path should be readable. + # Test also implicitly verifies the returned object is a pathlib.Path + # instance. + target = resources.files(self.data) / 'utf-8.file' + with resources.as_file(target) as path: + self.assertTrue(path.name.endswith("utf-8.file"), repr(path)) + # pathlib.Path.read_text() was introduced in Python 3.5. + with path.open('r', encoding='utf-8') as file: + text = file.read() + self.assertEqual('Hello, UTF-8 world!\n', text) + + +class PathDiskTests(PathTests, unittest.TestCase): + data = data01 + + def test_natural_path(self): + """ + Guarantee the internal implementation detail that + file-system-backed resources do not get the tempdir + treatment. + """ + target = resources.files(self.data) / 'utf-8.file' + with resources.as_file(target) as path: + assert 'data' in str(path) + + +class PathMemoryTests(PathTests, unittest.TestCase): + def setUp(self): + file = io.BytesIO(b'Hello, UTF-8 world!\n') + self.addCleanup(file.close) + self.data = util.create_package( + file=file, path=FileNotFoundError("package exists only in memory") + ) + self.data.__spec__.origin = None + self.data.__spec__.has_location = False + + +class PathZipTests(PathTests, util.ZipSetup, unittest.TestCase): + def test_remove_in_context_manager(self): + # It is not an error if the file that was temporarily stashed on the + # file system is removed inside the `with` stanza. + target = resources.files(self.data) / 'utf-8.file' + with resources.as_file(target) as path: + path.unlink() + + +if __name__ == '__main__': + unittest.main() diff --git a/Splunk_TA_paloalto/bin/splunk_ta_paloalto/aob_py3/importlib_resources/tests/test_read.py b/Splunk_TA_paloalto/bin/splunk_ta_paloalto/aob_py3/importlib_resources/tests/test_read.py new file mode 100644 index 00000000..41dd6db5 --- /dev/null +++ b/Splunk_TA_paloalto/bin/splunk_ta_paloalto/aob_py3/importlib_resources/tests/test_read.py @@ -0,0 +1,76 @@ +import unittest +import importlib_resources as resources + +from . import data01 +from . import util +from importlib import import_module + + +class CommonBinaryTests(util.CommonTests, unittest.TestCase): + def execute(self, package, path): + resources.files(package).joinpath(path).read_bytes() + + +class CommonTextTests(util.CommonTests, unittest.TestCase): + def execute(self, package, path): + resources.files(package).joinpath(path).read_text() + + +class ReadTests: + def test_read_bytes(self): + result = resources.files(self.data).joinpath('binary.file').read_bytes() + self.assertEqual(result, b'\0\1\2\3') + + def test_read_text_default_encoding(self): + result = resources.files(self.data).joinpath('utf-8.file').read_text() + self.assertEqual(result, 'Hello, UTF-8 world!\n') + + def test_read_text_given_encoding(self): + result = ( + resources.files(self.data) + .joinpath('utf-16.file') + .read_text(encoding='utf-16') + ) + self.assertEqual(result, 'Hello, UTF-16 world!\n') + + def test_read_text_with_errors(self): + # Raises UnicodeError without the 'errors' argument. + target = resources.files(self.data) / 'utf-16.file' + self.assertRaises(UnicodeError, target.read_text, encoding='utf-8') + result = target.read_text(encoding='utf-8', errors='ignore') + self.assertEqual( + result, + 'H\x00e\x00l\x00l\x00o\x00,\x00 ' + '\x00U\x00T\x00F\x00-\x001\x006\x00 ' + '\x00w\x00o\x00r\x00l\x00d\x00!\x00\n\x00', + ) + + +class ReadDiskTests(ReadTests, unittest.TestCase): + data = data01 + + +class ReadZipTests(ReadTests, util.ZipSetup, unittest.TestCase): + def test_read_submodule_resource(self): + submodule = import_module('ziptestdata.subdirectory') + result = resources.files(submodule).joinpath('binary.file').read_bytes() + self.assertEqual(result, b'\0\1\2\3') + + def test_read_submodule_resource_by_name(self): + result = ( + resources.files('ziptestdata.subdirectory') + .joinpath('binary.file') + .read_bytes() + ) + self.assertEqual(result, b'\0\1\2\3') + + +class ReadNamespaceTests(ReadTests, unittest.TestCase): + def setUp(self): + from . import namespacedata01 + + self.data = namespacedata01 + + +if __name__ == '__main__': + unittest.main() diff --git a/Splunk_TA_paloalto/bin/splunk_ta_paloalto/aob_py3/importlib_resources/tests/test_reader.py b/Splunk_TA_paloalto/bin/splunk_ta_paloalto/aob_py3/importlib_resources/tests/test_reader.py new file mode 100644 index 00000000..1c8ebeeb --- /dev/null +++ b/Splunk_TA_paloalto/bin/splunk_ta_paloalto/aob_py3/importlib_resources/tests/test_reader.py @@ -0,0 +1,133 @@ +import os.path +import sys +import pathlib +import unittest + +from importlib import import_module +from importlib_resources.readers import MultiplexedPath, NamespaceReader + + +class MultiplexedPathTest(unittest.TestCase): + @classmethod + def setUpClass(cls): + path = pathlib.Path(__file__).parent / 'namespacedata01' + cls.folder = str(path) + + def test_init_no_paths(self): + with self.assertRaises(FileNotFoundError): + MultiplexedPath() + + def test_init_file(self): + with self.assertRaises(NotADirectoryError): + MultiplexedPath(os.path.join(self.folder, 'binary.file')) + + def test_iterdir(self): + contents = {path.name for path in MultiplexedPath(self.folder).iterdir()} + try: + contents.remove('__pycache__') + except (KeyError, ValueError): + pass + self.assertEqual(contents, {'binary.file', 'utf-16.file', 'utf-8.file'}) + + def test_iterdir_duplicate(self): + data01 = os.path.abspath(os.path.join(__file__, '..', 'data01')) + contents = { + path.name for path in MultiplexedPath(self.folder, data01).iterdir() + } + for remove in ('__pycache__', '__init__.pyc'): + try: + contents.remove(remove) + except (KeyError, ValueError): + pass + self.assertEqual( + contents, + {'__init__.py', 'binary.file', 'subdirectory', 'utf-16.file', 'utf-8.file'}, + ) + + def test_is_dir(self): + self.assertEqual(MultiplexedPath(self.folder).is_dir(), True) + + def test_is_file(self): + self.assertEqual(MultiplexedPath(self.folder).is_file(), False) + + def test_open_file(self): + path = MultiplexedPath(self.folder) + with self.assertRaises(FileNotFoundError): + path.read_bytes() + with self.assertRaises(FileNotFoundError): + path.read_text() + with self.assertRaises(FileNotFoundError): + path.open() + + def test_join_path(self): + prefix = os.path.abspath(os.path.join(__file__, '..')) + data01 = os.path.join(prefix, 'data01') + path = MultiplexedPath(self.folder, data01) + self.assertEqual( + str(path.joinpath('binary.file'))[len(prefix) + 1 :], + os.path.join('namespacedata01', 'binary.file'), + ) + self.assertEqual( + str(path.joinpath('subdirectory'))[len(prefix) + 1 :], + os.path.join('data01', 'subdirectory'), + ) + self.assertEqual( + str(path.joinpath('imaginary'))[len(prefix) + 1 :], + os.path.join('namespacedata01', 'imaginary'), + ) + self.assertEqual(path.joinpath(), path) + + def test_join_path_compound(self): + path = MultiplexedPath(self.folder) + assert not path.joinpath('imaginary/foo.py').exists() + + def test_repr(self): + self.assertEqual( + repr(MultiplexedPath(self.folder)), + f"MultiplexedPath('{self.folder}')", + ) + + def test_name(self): + self.assertEqual( + MultiplexedPath(self.folder).name, + os.path.basename(self.folder), + ) + + +class NamespaceReaderTest(unittest.TestCase): + site_dir = str(pathlib.Path(__file__).parent) + + @classmethod + def setUpClass(cls): + sys.path.append(cls.site_dir) + + @classmethod + def tearDownClass(cls): + sys.path.remove(cls.site_dir) + + def test_init_error(self): + with self.assertRaises(ValueError): + NamespaceReader(['path1', 'path2']) + + def test_resource_path(self): + namespacedata01 = import_module('namespacedata01') + reader = NamespaceReader(namespacedata01.__spec__.submodule_search_locations) + + root = os.path.abspath(os.path.join(__file__, '..', 'namespacedata01')) + self.assertEqual( + reader.resource_path('binary.file'), os.path.join(root, 'binary.file') + ) + self.assertEqual( + reader.resource_path('imaginary'), os.path.join(root, 'imaginary') + ) + + def test_files(self): + namespacedata01 = import_module('namespacedata01') + reader = NamespaceReader(namespacedata01.__spec__.submodule_search_locations) + root = os.path.abspath(os.path.join(__file__, '..', 'namespacedata01')) + self.assertIsInstance(reader.files(), MultiplexedPath) + self.assertEqual(repr(reader.files()), f"MultiplexedPath('{root}')") + + +if __name__ == '__main__': + unittest.main() diff --git a/Splunk_TA_paloalto/bin/splunk_ta_paloalto/aob_py3/importlib_resources/tests/test_resource.py b/Splunk_TA_paloalto/bin/splunk_ta_paloalto/aob_py3/importlib_resources/tests/test_resource.py new file mode 100644 index 00000000..82390271 --- /dev/null +++ b/Splunk_TA_paloalto/bin/splunk_ta_paloalto/aob_py3/importlib_resources/tests/test_resource.py @@ -0,0 +1,260 @@ +import sys +import unittest +import importlib_resources as resources +import uuid +import pathlib + +from . import data01 +from . import zipdata01, zipdata02 +from . import util +from importlib import import_module +from ._compat import import_helper, unlink + + +class ResourceTests: + # Subclasses are expected to set the `data` attribute. + + def test_is_file_exists(self): + target = resources.files(self.data) / 'binary.file' + self.assertTrue(target.is_file()) + + def test_is_file_missing(self): + target = resources.files(self.data) / 'not-a-file' + self.assertFalse(target.is_file()) + + def test_is_dir(self): + target = resources.files(self.data) / 'subdirectory' + self.assertFalse(target.is_file()) + self.assertTrue(target.is_dir()) + + +class ResourceDiskTests(ResourceTests, unittest.TestCase): + def setUp(self): + self.data = data01 + + +class ResourceZipTests(ResourceTests, util.ZipSetup, unittest.TestCase): + pass + + +def names(traversable): + return {item.name for item in traversable.iterdir()} + + +class ResourceLoaderTests(unittest.TestCase): + def test_resource_contents(self): + package = util.create_package( + file=data01, path=data01.__file__, contents=['A', 'B', 'C'] + ) + self.assertEqual(names(resources.files(package)), {'A', 'B', 'C'}) + + def test_is_file(self): + package = util.create_package( + file=data01, path=data01.__file__, contents=['A', 'B', 'C', 'D/E', 'D/F'] + ) + self.assertTrue(resources.files(package).joinpath('B').is_file()) + + def test_is_dir(self): + package = util.create_package( + file=data01, path=data01.__file__, contents=['A', 'B', 'C', 'D/E', 'D/F'] + ) + self.assertTrue(resources.files(package).joinpath('D').is_dir()) + + def test_resource_missing(self): + package = util.create_package( + file=data01, path=data01.__file__, contents=['A', 'B', 'C', 'D/E', 'D/F'] + ) + self.assertFalse(resources.files(package).joinpath('Z').is_file()) + + +class ResourceCornerCaseTests(unittest.TestCase): + def test_package_has_no_reader_fallback(self): + # Test odd ball packages which: + # 1. Do not have a ResourceReader as a loader + # 2. Are not on the file system + # 3. Are not in a zip file + module = util.create_package( + file=data01, path=data01.__file__, contents=['A', 'B', 'C'] + ) + # Give the module a dummy loader. + module.__loader__ = object() + # Give the module a dummy origin. + module.__file__ = '/path/which/shall/not/be/named' + module.__spec__.loader = module.__loader__ + module.__spec__.origin = module.__file__ + self.assertFalse(resources.files(module).joinpath('A').is_file()) + + +class ResourceFromZipsTest01(util.ZipSetupBase, unittest.TestCase): + ZIP_MODULE = zipdata01 # type: ignore + + def test_is_submodule_resource(self): + submodule = import_module('ziptestdata.subdirectory') + self.assertTrue(resources.files(submodule).joinpath('binary.file').is_file()) + + def test_read_submodule_resource_by_name(self): + self.assertTrue( + resources.files('ziptestdata.subdirectory') + .joinpath('binary.file') + .is_file() + ) + + def test_submodule_contents(self): + submodule = import_module('ziptestdata.subdirectory') + self.assertEqual( + names(resources.files(submodule)), {'__init__.py', 'binary.file'} + ) + + def test_submodule_contents_by_name(self): + self.assertEqual( + names(resources.files('ziptestdata.subdirectory')), + {'__init__.py', 'binary.file'}, + ) + + def test_as_file_directory(self): + with resources.as_file(resources.files('ziptestdata')) as data: + assert data.name == 'ziptestdata' + assert data.is_dir() + assert data.joinpath('subdirectory').is_dir() + assert len(list(data.iterdir())) + assert not data.parent.exists() + + +class ResourceFromZipsTest02(util.ZipSetupBase, unittest.TestCase): + ZIP_MODULE = zipdata02 # type: ignore + + def test_unrelated_contents(self): + """ + Test thata zip with two unrelated subpackages return + distinct resources. Ref python/importlib_resources#44. + """ + self.assertEqual( + names(resources.files('ziptestdata.one')), + {'__init__.py', 'resource1.txt'}, + ) + self.assertEqual( + names(resources.files('ziptestdata.two')), + {'__init__.py', 'resource2.txt'}, + ) + + +class DeletingZipsTest(unittest.TestCase): + """Having accessed resources in a zip file should not keep an open + reference to the zip. + """ + + ZIP_MODULE = zipdata01 + + def setUp(self): + modules = import_helper.modules_setup() + self.addCleanup(import_helper.modules_cleanup, *modules) + + data_path = pathlib.Path(self.ZIP_MODULE.__file__) + data_dir = data_path.parent + self.source_zip_path = data_dir / 'ziptestdata.zip' + self.zip_path = pathlib.Path(f'{uuid.uuid4()}.zip').absolute() + self.zip_path.write_bytes(self.source_zip_path.read_bytes()) + sys.path.append(str(self.zip_path)) + self.data = import_module('ziptestdata') + + def tearDown(self): + try: + sys.path.remove(str(self.zip_path)) + except ValueError: + pass + + try: + del sys.path_importer_cache[str(self.zip_path)] + del sys.modules[self.data.__name__] + except KeyError: + pass + + try: + unlink(self.zip_path) + except OSError: + # If the test fails, this will probably fail too + pass + + def test_iterdir_does_not_keep_open(self): + c = [item.name for item in resources.files('ziptestdata').iterdir()] + self.zip_path.unlink() + del c + + def test_is_file_does_not_keep_open(self): + c = resources.files('ziptestdata').joinpath('binary.file').is_file() + self.zip_path.unlink() + del c + + def test_is_file_failure_does_not_keep_open(self): + c = resources.files('ziptestdata').joinpath('not-present').is_file() + self.zip_path.unlink() + del c + + @unittest.skip("Desired but not supported.") + def test_as_file_does_not_keep_open(self): # pragma: no cover + c = resources.as_file(resources.files('ziptestdata') / 'binary.file') + self.zip_path.unlink() + del c + + def test_entered_path_does_not_keep_open(self): + # This is what certifi does on import to make its bundle + # available for the process duration. + c = resources.as_file( + resources.files('ziptestdata') / 'binary.file' + ).__enter__() + self.zip_path.unlink() + del c + + def test_read_binary_does_not_keep_open(self): + c = resources.files('ziptestdata').joinpath('binary.file').read_bytes() + self.zip_path.unlink() + del c + + def test_read_text_does_not_keep_open(self): + c = resources.files('ziptestdata').joinpath('utf-8.file').read_text() + self.zip_path.unlink() + del c + + +class ResourceFromNamespaceTest01(unittest.TestCase): + site_dir = str(pathlib.Path(__file__).parent) + + @classmethod + def setUpClass(cls): + sys.path.append(cls.site_dir) + + @classmethod + def tearDownClass(cls): + sys.path.remove(cls.site_dir) + + def test_is_submodule_resource(self): + self.assertTrue( + resources.files(import_module('namespacedata01')) + .joinpath('binary.file') + .is_file() + ) + + def test_read_submodule_resource_by_name(self): + self.assertTrue( + resources.files('namespacedata01').joinpath('binary.file').is_file() + ) + + def test_submodule_contents(self): + contents = names(resources.files(import_module('namespacedata01'))) + try: + contents.remove('__pycache__') + except KeyError: + pass + self.assertEqual(contents, {'binary.file', 'utf-8.file', 'utf-16.file'}) + + def test_submodule_contents_by_name(self): + contents = names(resources.files('namespacedata01')) + try: + contents.remove('__pycache__') + except KeyError: + pass + self.assertEqual(contents, {'binary.file', 'utf-8.file', 'utf-16.file'}) + + +if __name__ == '__main__': + unittest.main() diff --git a/Splunk_TA_paloalto/bin/splunk_ta_paloalto/aob_py3/importlib_resources/tests/update-zips.py b/Splunk_TA_paloalto/bin/splunk_ta_paloalto/aob_py3/importlib_resources/tests/update-zips.py new file mode 100644 index 00000000..231334aa --- /dev/null +++ b/Splunk_TA_paloalto/bin/splunk_ta_paloalto/aob_py3/importlib_resources/tests/update-zips.py @@ -0,0 +1,53 @@ +""" +Generate the zip test data files. + +Run to build the tests/zipdataNN/ziptestdata.zip files from +files in tests/dataNN. + +Replaces the file with the working copy, but does commit anything +to the source repo. +""" + +import contextlib +import os +import pathlib +import zipfile + + +def main(): + """ + >>> from unittest import mock + >>> monkeypatch = getfixture('monkeypatch') + >>> monkeypatch.setattr(zipfile, 'ZipFile', mock.MagicMock()) + >>> print(); main() # print workaround for bpo-32509 + + ...data01... -> ziptestdata/... + ... + ...data02... -> ziptestdata/... + ... + """ + suffixes = '01', '02' + tuple(map(generate, suffixes)) + + +def generate(suffix): + root = pathlib.Path(__file__).parent.relative_to(os.getcwd()) + zfpath = root / f'zipdata{suffix}/ziptestdata.zip' + with zipfile.ZipFile(zfpath, 'w') as zf: + for src, rel in walk(root / f'data{suffix}'): + dst = 'ziptestdata' / pathlib.PurePosixPath(rel.as_posix()) + print(src, '->', dst) + zf.write(src, dst) + + +def walk(datapath): + for dirpath, dirnames, filenames in os.walk(datapath): + with contextlib.suppress(ValueError): + dirnames.remove('__pycache__') + for filename in filenames: + res = pathlib.Path(dirpath) / filename + rel = res.relative_to(datapath) + yield res, rel + + +__name__ == '__main__' and main() diff --git a/Splunk_TA_paloalto/bin/splunk_ta_paloalto/aob_py3/importlib_resources/tests/util.py b/Splunk_TA_paloalto/bin/splunk_ta_paloalto/aob_py3/importlib_resources/tests/util.py new file mode 100644 index 00000000..b596c0ce --- /dev/null +++ b/Splunk_TA_paloalto/bin/splunk_ta_paloalto/aob_py3/importlib_resources/tests/util.py @@ -0,0 +1,167 @@ +import abc +import importlib +import io +import sys +import types +import pathlib + +from . import data01 +from . import zipdata01 +from ..abc import ResourceReader +from ._compat import import_helper + + +from importlib.machinery import ModuleSpec + + +class Reader(ResourceReader): + def __init__(self, **kwargs): + vars(self).update(kwargs) + + def get_resource_reader(self, package): + return self + + def open_resource(self, path): + self._path = path + if isinstance(self.file, Exception): + raise self.file + return self.file + + def resource_path(self, path_): + self._path = path_ + if isinstance(self.path, Exception): + raise self.path + return self.path + + def is_resource(self, path_): + self._path = path_ + if isinstance(self.path, Exception): + raise self.path + + def part(entry): + return entry.split('/') + + return any( + len(parts) == 1 and parts[0] == path_ for parts in map(part, self._contents) + ) + + def contents(self): + if isinstance(self.path, Exception): + raise self.path + yield from self._contents + + +def create_package_from_loader(loader, is_package=True): + name = 'testingpackage' + module = types.ModuleType(name) + spec = ModuleSpec(name, loader, origin='does-not-exist', is_package=is_package) + module.__spec__ = spec + module.__loader__ = loader + return module + + +def create_package(file=None, path=None, is_package=True, contents=()): + return create_package_from_loader( + Reader(file=file, path=path, _contents=contents), + is_package, + ) + + +class CommonTests(metaclass=abc.ABCMeta): + """ + Tests shared by test_open, test_path, and test_read. + """ + + @abc.abstractmethod + def execute(self, package, path): + """ + Call the pertinent legacy API function (e.g. open_text, path) + on package and path. + """ + + def test_package_name(self): + # Passing in the package name should succeed. + self.execute(data01.__name__, 'utf-8.file') + + def test_package_object(self): + # Passing in the package itself should succeed. + self.execute(data01, 'utf-8.file') + + def test_string_path(self): + # Passing in a string for the path should succeed. + path = 'utf-8.file' + self.execute(data01, path) + + def test_pathlib_path(self): + # Passing in a pathlib.PurePath object for the path should succeed. + path = pathlib.PurePath('utf-8.file') + self.execute(data01, path) + + def test_importing_module_as_side_effect(self): + # The anchor package can already be imported. + del sys.modules[data01.__name__] + self.execute(data01.__name__, 'utf-8.file') + + def test_missing_path(self): + # Attempting to open or read or request the path for a + # non-existent path should succeed if open_resource + # can return a viable data stream. + bytes_data = io.BytesIO(b'Hello, world!') + package = create_package(file=bytes_data, path=FileNotFoundError()) + self.execute(package, 'utf-8.file') + self.assertEqual(package.__loader__._path, 'utf-8.file') + + def test_extant_path(self): + # Attempting to open or read or request the path when the + # path does exist should still succeed. Does not assert + # anything about the result. + bytes_data = io.BytesIO(b'Hello, world!') + # any path that exists + path = __file__ + package = create_package(file=bytes_data, path=path) + self.execute(package, 'utf-8.file') + self.assertEqual(package.__loader__._path, 'utf-8.file') + + def test_useless_loader(self): + package = create_package(file=FileNotFoundError(), path=FileNotFoundError()) + with self.assertRaises(FileNotFoundError): + self.execute(package, 'utf-8.file') + + +class ZipSetupBase: + ZIP_MODULE = None + + @classmethod + def setUpClass(cls): + data_path = pathlib.Path(cls.ZIP_MODULE.__file__) + data_dir = data_path.parent + cls._zip_path = str(data_dir / 'ziptestdata.zip') + sys.path.append(cls._zip_path) + cls.data = importlib.import_module('ziptestdata') + + @classmethod + def tearDownClass(cls): + try: + sys.path.remove(cls._zip_path) + except ValueError: + pass + + try: + del sys.path_importer_cache[cls._zip_path] + del sys.modules[cls.data.__name__] + except KeyError: + pass + + try: + del cls.data + del cls._zip_path + except AttributeError: + pass + + def setUp(self): + modules = import_helper.modules_setup() + self.addCleanup(import_helper.modules_cleanup, *modules) + + +class ZipSetup(ZipSetupBase): + ZIP_MODULE = zipdata01 # type: ignore diff --git a/Splunk_TA_paloalto/bin/splunk_ta_paloalto/aob_py3/importlib_resources/tests/zipdata01/__init__.py b/Splunk_TA_paloalto/bin/splunk_ta_paloalto/aob_py3/importlib_resources/tests/zipdata01/__init__.py new file mode 100644 index 00000000..e69de29b diff --git a/Splunk_TA_paloalto/bin/splunk_ta_paloalto/aob_py3/importlib_resources/tests/zipdata01/ziptestdata.zip b/Splunk_TA_paloalto/bin/splunk_ta_paloalto/aob_py3/importlib_resources/tests/zipdata01/ziptestdata.zip new file mode 100644 index 00000000..9a3bb073 Binary files /dev/null and b/Splunk_TA_paloalto/bin/splunk_ta_paloalto/aob_py3/importlib_resources/tests/zipdata01/ziptestdata.zip differ diff --git a/Splunk_TA_paloalto/bin/splunk_ta_paloalto/aob_py3/importlib_resources/tests/zipdata02/__init__.py b/Splunk_TA_paloalto/bin/splunk_ta_paloalto/aob_py3/importlib_resources/tests/zipdata02/__init__.py new file mode 100644 index 00000000..e69de29b diff --git a/Splunk_TA_paloalto/bin/splunk_ta_paloalto/aob_py3/importlib_resources/tests/zipdata02/ziptestdata.zip b/Splunk_TA_paloalto/bin/splunk_ta_paloalto/aob_py3/importlib_resources/tests/zipdata02/ziptestdata.zip new file mode 100644 index 00000000..d63ff512 Binary files /dev/null and b/Splunk_TA_paloalto/bin/splunk_ta_paloalto/aob_py3/importlib_resources/tests/zipdata02/ziptestdata.zip differ diff --git a/Splunk_TA_paloalto/bin/splunk_ta_paloalto/aob_py3/jinja2/__init__.py b/Splunk_TA_paloalto/bin/splunk_ta_paloalto/aob_py3/jinja2/__init__.py old mode 100755 new mode 100644 index 15e13b6f..f17866f6 --- a/Splunk_TA_paloalto/bin/splunk_ta_paloalto/aob_py3/jinja2/__init__.py +++ b/Splunk_TA_paloalto/bin/splunk_ta_paloalto/aob_py3/jinja2/__init__.py @@ -1,83 +1,44 @@ # -*- coding: utf-8 -*- +"""Jinja is a template engine written in pure Python. It provides a +non-XML syntax that supports inline expressions and an optional +sandboxed environment. """ - jinja2 - ~~~~~~ - - Jinja2 is a template engine written in pure Python. It provides a - Django inspired non-XML syntax but supports inline expressions and - an optional sandboxed environment. - - Nutshell - -------- - - Here a small example of a Jinja2 template:: - - {% extends 'base.html' %} - {% block title %}Memberlist{% endblock %} - {% block content %} -

- {% endblock %} - - - :copyright: (c) 2017 by the Jinja Team. - :license: BSD, see LICENSE for more details. -""" -__docformat__ = 'restructuredtext en' -__version__ = '2.10.1' - -# high level interface -from jinja2.environment import Environment, Template - -# loaders -from jinja2.loaders import BaseLoader, FileSystemLoader, PackageLoader, \ - DictLoader, FunctionLoader, PrefixLoader, ChoiceLoader, \ - ModuleLoader - -# bytecode caches -from jinja2.bccache import BytecodeCache, FileSystemBytecodeCache, \ - MemcachedBytecodeCache - -# undefined types -from jinja2.runtime import Undefined, DebugUndefined, StrictUndefined, \ - make_logging_undefined - -# exceptions -from jinja2.exceptions import TemplateError, UndefinedError, \ - TemplateNotFound, TemplatesNotFound, TemplateSyntaxError, \ - TemplateAssertionError, TemplateRuntimeError - -# decorators and public utilities -from jinja2.filters import environmentfilter, contextfilter, \ - evalcontextfilter -from jinja2.utils import Markup, escape, clear_caches, \ - environmentfunction, evalcontextfunction, contextfunction, \ - is_undefined, select_autoescape - -__all__ = [ - 'Environment', 'Template', 'BaseLoader', 'FileSystemLoader', - 'PackageLoader', 'DictLoader', 'FunctionLoader', 'PrefixLoader', - 'ChoiceLoader', 'BytecodeCache', 'FileSystemBytecodeCache', - 'MemcachedBytecodeCache', 'Undefined', 'DebugUndefined', - 'StrictUndefined', 'TemplateError', 'UndefinedError', 'TemplateNotFound', - 'TemplatesNotFound', 'TemplateSyntaxError', 'TemplateAssertionError', - 'TemplateRuntimeError', - 'ModuleLoader', 'environmentfilter', 'contextfilter', 'Markup', 'escape', - 'environmentfunction', 'contextfunction', 'clear_caches', 'is_undefined', - 'evalcontextfilter', 'evalcontextfunction', 'make_logging_undefined', - 'select_autoescape', -] - - -def _patch_async(): - from jinja2.utils import have_async_gen - if have_async_gen: - from jinja2.asyncsupport import patch_all - patch_all() - - -_patch_async() -del _patch_async +from markupsafe import escape +from markupsafe import Markup + +from .bccache import BytecodeCache +from .bccache import FileSystemBytecodeCache +from .bccache import MemcachedBytecodeCache +from .environment import Environment +from .environment import Template +from .exceptions import TemplateAssertionError +from .exceptions import TemplateError +from .exceptions import TemplateNotFound +from .exceptions import TemplateRuntimeError +from .exceptions import TemplatesNotFound +from .exceptions import TemplateSyntaxError +from .exceptions import UndefinedError +from .filters import contextfilter +from .filters import environmentfilter +from .filters import evalcontextfilter +from .loaders import BaseLoader +from .loaders import ChoiceLoader +from .loaders import DictLoader +from .loaders import FileSystemLoader +from .loaders import FunctionLoader +from .loaders import ModuleLoader +from .loaders import PackageLoader +from .loaders import PrefixLoader +from .runtime import ChainableUndefined +from .runtime import DebugUndefined +from .runtime import make_logging_undefined +from .runtime import StrictUndefined +from .runtime import Undefined +from .utils import clear_caches +from .utils import contextfunction +from .utils import environmentfunction +from .utils import evalcontextfunction +from .utils import is_undefined +from .utils import select_autoescape + +__version__ = "2.11.3" diff --git a/Splunk_TA_paloalto/bin/splunk_ta_paloalto/aob_py3/jinja2/_compat.py b/Splunk_TA_paloalto/bin/splunk_ta_paloalto/aob_py3/jinja2/_compat.py old mode 100755 new mode 100644 index 61d85301..1f044954 --- a/Splunk_TA_paloalto/bin/splunk_ta_paloalto/aob_py3/jinja2/_compat.py +++ b/Splunk_TA_paloalto/bin/splunk_ta_paloalto/aob_py3/jinja2/_compat.py @@ -1,22 +1,12 @@ # -*- coding: utf-8 -*- -""" - jinja2._compat - ~~~~~~~~~~~~~~ - - Some py2/py3 compatibility support based on a stripped down - version of six so we don't have to depend on a specific version - of it. - - :copyright: Copyright 2013 by the Jinja team, see AUTHORS. - :license: BSD, see LICENSE for details. -""" +# flake8: noqa +import marshal import sys PY2 = sys.version_info[0] == 2 -PYPY = hasattr(sys, 'pypy_translation_info') +PYPY = hasattr(sys, "pypy_translation_info") _identity = lambda x: x - if not PY2: unichr = chr range_type = range @@ -30,6 +20,7 @@ import pickle from io import BytesIO, StringIO + NativeStringIO = StringIO def reraise(tp, value, tb=None): @@ -46,6 +37,9 @@ def reraise(tp, value, tb=None): implements_to_string = _identity encode_filename = _identity + marshal_dump = marshal.dump + marshal_load = marshal.load + else: unichr = unichr text_type = unicode @@ -59,11 +53,13 @@ def reraise(tp, value, tb=None): import cPickle as pickle from cStringIO import StringIO as BytesIO, StringIO + NativeStringIO = BytesIO - exec('def reraise(tp, value, tb=None):\n raise tp, value, tb') + exec("def reraise(tp, value, tb=None):\n raise tp, value, tb") from itertools import imap, izip, ifilter + intern = intern def implements_iterator(cls): @@ -73,14 +69,25 @@ def implements_iterator(cls): def implements_to_string(cls): cls.__unicode__ = cls.__str__ - cls.__str__ = lambda x: x.__unicode__().encode('utf-8') + cls.__str__ = lambda x: x.__unicode__().encode("utf-8") return cls def encode_filename(filename): if isinstance(filename, unicode): - return filename.encode('utf-8') + return filename.encode("utf-8") return filename + def marshal_dump(code, f): + if isinstance(f, file): + marshal.dump(code, f) + else: + f.write(marshal.dumps(code)) + + def marshal_load(f): + if isinstance(f, file): + return marshal.load(f) + return marshal.loads(f.read()) + def with_metaclass(meta, *bases): """Create a base class with a metaclass.""" @@ -90,10 +97,36 @@ def with_metaclass(meta, *bases): class metaclass(type): def __new__(cls, name, this_bases, d): return meta(name, bases, d) - return type.__new__(metaclass, 'temporary_class', (), {}) + + return type.__new__(metaclass, "temporary_class", (), {}) try: from urllib.parse import quote_from_bytes as url_quote except ImportError: from urllib import quote as url_quote + + +try: + from collections import abc +except ImportError: + import collections as abc + + +try: + from os import fspath +except ImportError: + try: + from pathlib import PurePath + except ImportError: + PurePath = None + + def fspath(path): + if hasattr(path, "__fspath__"): + return path.__fspath__() + + # Python 3.5 doesn't have __fspath__ yet, use str. + if PurePath is not None and isinstance(path, PurePath): + return str(path) + + return path diff --git a/Splunk_TA_paloalto/bin/splunk_ta_paloalto/aob_py3/jinja2/_identifier.py b/Splunk_TA_paloalto/bin/splunk_ta_paloalto/aob_py3/jinja2/_identifier.py old mode 100755 new mode 100644 index 2eac35d5..224d5449 --- a/Splunk_TA_paloalto/bin/splunk_ta_paloalto/aob_py3/jinja2/_identifier.py +++ b/Splunk_TA_paloalto/bin/splunk_ta_paloalto/aob_py3/jinja2/_identifier.py @@ -1,2 +1,6 @@ +import re + # generated by scripts/generate_identifier_pattern.py -pattern = '·̀-ͯ·҃-֑҇-ׇֽֿׁׂׅׄؐ-ًؚ-ٰٟۖ-ۜ۟-۪ۤۧۨ-ܑۭܰ-݊ަ-ް߫-߳ࠖ-࠙ࠛ-ࠣࠥ-ࠧࠩ-࡙࠭-࡛ࣔ-ࣣ࣡-ःऺ-़ा-ॏ॑-ॗॢॣঁ-ঃ়া-ৄেৈো-্ৗৢৣਁ-ਃ਼ਾ-ੂੇੈੋ-੍ੑੰੱੵઁ-ઃ઼ા-ૅે-ૉો-્ૢૣଁ-ଃ଼ା-ୄେୈୋ-୍ୖୗୢୣஂா-ூெ-ைொ-்ௗఀ-ఃా-ౄె-ైొ-్ౕౖౢౣಁ-ಃ಼ಾ-ೄೆ-ೈೊ-್ೕೖೢೣഁ-ഃാ-ൄെ-ൈൊ-്ൗൢൣංඃ්ා-ුූෘ-ෟෲෳัิ-ฺ็-๎ັິ-ູົຼ່-ໍ༹༘༙༵༷༾༿ཱ-྄྆྇ྍ-ྗྙ-ྼ࿆ါ-ှၖ-ၙၞ-ၠၢ-ၤၧ-ၭၱ-ၴႂ-ႍႏႚ-ႝ፝-፟ᜒ-᜔ᜲ-᜴ᝒᝓᝲᝳ឴-៓៝᠋-᠍ᢅᢆᢩᤠ-ᤫᤰ-᤻ᨗ-ᨛᩕ-ᩞ᩠-᩿᩼᪰-᪽ᬀ-ᬄ᬴-᭄᭫-᭳ᮀ-ᮂᮡ-ᮭ᯦-᯳ᰤ-᰷᳐-᳔᳒-᳨᳭ᳲ-᳴᳸᳹᷀-᷵᷻-᷿‿⁀⁔⃐-⃥⃜⃡-⃰℘℮⳯-⵿⳱ⷠ-〪ⷿ-゙゚〯꙯ꙴ-꙽ꚞꚟ꛰꛱ꠂ꠆ꠋꠣ-ꠧꢀꢁꢴ-ꣅ꣠-꣱ꤦ-꤭ꥇ-꥓ꦀ-ꦃ꦳-꧀ꧥꨩ-ꨶꩃꩌꩍꩻ-ꩽꪰꪲ-ꪴꪷꪸꪾ꪿꫁ꫫ-ꫯꫵ꫶ꯣ-ꯪ꯬꯭ﬞ︀-️︠-︯︳︴﹍-﹏_𐇽𐋠𐍶-𐍺𐨁-𐨃𐨅𐨆𐨌-𐨏𐨸-𐨿𐨺𐫦𐫥𑀀-𑀂𑀸-𑁆𑁿-𑂂𑂰-𑂺𑄀-𑄂𑄧-𑅳𑄴𑆀-𑆂𑆳-𑇊𑇀-𑇌𑈬-𑈷𑈾𑋟-𑋪𑌀-𑌃𑌼𑌾-𑍄𑍇𑍈𑍋-𑍍𑍗𑍢𑍣𑍦-𑍬𑍰-𑍴𑐵-𑑆𑒰-𑓃𑖯-𑖵𑖸-𑗀𑗜𑗝𑘰-𑙀𑚫-𑚷𑜝-𑜫𑰯-𑰶𑰸-𑰿𑲒-𑲧𑲩-𑲶𖫰-𖫴𖬰-𖬶𖽑-𖽾𖾏-𖾒𛲝𛲞𝅥-𝅩𝅭-𝅲𝅻-𝆂𝆅-𝆋𝆪-𝆭𝉂-𝉄𝨀-𝨶𝨻-𝩬𝩵𝪄𝪛-𝪟𝪡-𝪯𞀀-𞀆𞀈-𞀘𞀛-𞀡𞀣𞀤𞀦-𞣐𞀪-𞣖𞥄-𞥊󠄀-󠇯' +pattern = re.compile( + r"[\w·̀-ͯ·҃-֑҇-ׇֽֿׁׂׅׄؐ-ًؚ-ٰٟۖ-ۜ۟-۪ۤۧۨ-ܑۭܰ-݊ަ-ް߫-߳ࠖ-࠙ࠛ-ࠣࠥ-ࠧࠩ-࡙࠭-࡛ࣔ-ࣣ࣡-ःऺ-़ा-ॏ॑-ॗॢॣঁ-ঃ়া-ৄেৈো-্ৗৢৣਁ-ਃ਼ਾ-ੂੇੈੋ-੍ੑੰੱੵઁ-ઃ઼ા-ૅે-ૉો-્ૢૣଁ-ଃ଼ା-ୄେୈୋ-୍ୖୗୢୣஂா-ூெ-ைொ-்ௗఀ-ఃా-ౄె-ైొ-్ౕౖౢౣಁ-ಃ಼ಾ-ೄೆ-ೈೊ-್ೕೖೢೣഁ-ഃാ-ൄെ-ൈൊ-്ൗൢൣංඃ්ා-ුූෘ-ෟෲෳัิ-ฺ็-๎ັິ-ູົຼ່-ໍ༹༘༙༵༷༾༿ཱ-྄྆྇ྍ-ྗྙ-ྼ࿆ါ-ှၖ-ၙၞ-ၠၢ-ၤၧ-ၭၱ-ၴႂ-ႍႏႚ-ႝ፝-፟ᜒ-᜔ᜲ-᜴ᝒᝓᝲᝳ឴-៓៝᠋-᠍ᢅᢆᢩᤠ-ᤫᤰ-᤻ᨗ-ᨛᩕ-ᩞ᩠-᩿᩼᪰-᪽ᬀ-ᬄ᬴-᭄᭫-᭳ᮀ-ᮂᮡ-ᮭ᯦-᯳ᰤ-᰷᳐-᳔᳒-᳨᳭ᳲ-᳴᳸᳹᷀-᷵᷻-᷿‿⁀⁔⃐-⃥⃜⃡-⃰℘℮⳯-⵿⳱ⷠ-〪ⷿ-゙゚〯꙯ꙴ-꙽ꚞꚟ꛰꛱ꠂ꠆ꠋꠣ-ꠧꢀꢁꢴ-ꣅ꣠-꣱ꤦ-꤭ꥇ-꥓ꦀ-ꦃ꦳-꧀ꧥꨩ-ꨶꩃꩌꩍꩻ-ꩽꪰꪲ-ꪴꪷꪸꪾ꪿꫁ꫫ-ꫯꫵ꫶ꯣ-ꯪ꯬꯭ﬞ︀-️︠-︯︳︴﹍-﹏_𐇽𐋠𐍶-𐍺𐨁-𐨃𐨅𐨆𐨌-𐨏𐨸-𐨿𐨺𐫦𐫥𑀀-𑀂𑀸-𑁆𑁿-𑂂𑂰-𑂺𑄀-𑄂𑄧-𑅳𑄴𑆀-𑆂𑆳-𑇊𑇀-𑇌𑈬-𑈷𑈾𑋟-𑋪𑌀-𑌃𑌼𑌾-𑍄𑍇𑍈𑍋-𑍍𑍗𑍢𑍣𑍦-𑍬𑍰-𑍴𑐵-𑑆𑒰-𑓃𑖯-𑖵𑖸-𑗀𑗜𑗝𑘰-𑙀𑚫-𑚷𑜝-𑜫𑰯-𑰶𑰸-𑰿𑲒-𑲧𑲩-𑲶𖫰-𖫴𖬰-𖬶𖽑-𖽾𖾏-𖾒𛲝𛲞𝅥-𝅩𝅭-𝅲𝅻-𝆂𝆅-𝆋𝆪-𝆭𝉂-𝉄𝨀-𝨶𝨻-𝩬𝩵𝪄𝪛-𝪟𝪡-𝪯𞀀-𞀆𞀈-𞀘𞀛-𞀡𞀣𞀤𞀦-𞣐𞀪-𞣖𞥄-𞥊󠄀-󠇯]+" # noqa: B950 +) diff --git a/Splunk_TA_paloalto/bin/splunk_ta_paloalto/aob_py3/jinja2/asyncfilters.py b/Splunk_TA_paloalto/bin/splunk_ta_paloalto/aob_py3/jinja2/asyncfilters.py old mode 100755 new mode 100644 index 5c1f46d7..3d98dbcc --- a/Splunk_TA_paloalto/bin/splunk_ta_paloalto/aob_py3/jinja2/asyncfilters.py +++ b/Splunk_TA_paloalto/bin/splunk_ta_paloalto/aob_py3/jinja2/asyncfilters.py @@ -1,12 +1,13 @@ from functools import wraps -from jinja2.asyncsupport import auto_aiter -from jinja2 import filters +from . import filters +from .asyncsupport import auto_aiter +from .asyncsupport import auto_await async def auto_to_seq(value): seq = [] - if hasattr(value, '__aiter__'): + if hasattr(value, "__aiter__"): async for item in value: seq.append(item) else: @@ -16,8 +17,7 @@ async def auto_to_seq(value): async def async_select_or_reject(args, kwargs, modfunc, lookup_attr): - seq, func = filters.prepare_select_or_reject( - args, kwargs, modfunc, lookup_attr) + seq, func = filters.prepare_select_or_reject(args, kwargs, modfunc, lookup_attr) if seq: async for item in auto_aiter(seq): if func(item): @@ -26,14 +26,19 @@ async def async_select_or_reject(args, kwargs, modfunc, lookup_attr): def dualfilter(normal_filter, async_filter): wrap_evalctx = False - if getattr(normal_filter, 'environmentfilter', False): - is_async = lambda args: args[0].is_async + if getattr(normal_filter, "environmentfilter", False) is True: + + def is_async(args): + return args[0].is_async + wrap_evalctx = False else: - if not getattr(normal_filter, 'evalcontextfilter', False) and \ - not getattr(normal_filter, 'contextfilter', False): - wrap_evalctx = True - is_async = lambda args: args[0].environment.is_async + has_evalctxfilter = getattr(normal_filter, "evalcontextfilter", False) is True + has_ctxfilter = getattr(normal_filter, "contextfilter", False) is True + wrap_evalctx = not has_evalctxfilter and not has_ctxfilter + + def is_async(args): + return args[0].environment.is_async @wraps(normal_filter) def wrapper(*args, **kwargs): @@ -55,6 +60,7 @@ def wrapper(*args, **kwargs): def asyncfiltervariant(original): def decorator(f): return dualfilter(original, f) + return decorator @@ -63,19 +69,22 @@ async def do_first(environment, seq): try: return await auto_aiter(seq).__anext__() except StopAsyncIteration: - return environment.undefined('No first item, sequence was empty.') + return environment.undefined("No first item, sequence was empty.") @asyncfiltervariant(filters.do_groupby) async def do_groupby(environment, value, attribute): expr = filters.make_attrgetter(environment, attribute) - return [filters._GroupTuple(key, await auto_to_seq(values)) - for key, values in filters.groupby(sorted( - await auto_to_seq(value), key=expr), expr)] + return [ + filters._GroupTuple(key, await auto_to_seq(values)) + for key, values in filters.groupby( + sorted(await auto_to_seq(value), key=expr), expr + ) + ] @asyncfiltervariant(filters.do_join) -async def do_join(eval_ctx, value, d=u'', attribute=None): +async def do_join(eval_ctx, value, d=u"", attribute=None): return filters.do_join(eval_ctx, await auto_to_seq(value), d, attribute) @@ -109,7 +118,7 @@ async def do_map(*args, **kwargs): seq, func = filters.prepare_map(args, kwargs) if seq: async for item in auto_aiter(seq): - yield func(item) + yield await auto_await(func(item)) @asyncfiltervariant(filters.do_sum) @@ -118,7 +127,10 @@ async def do_sum(environment, iterable, attribute=None, start=0): if attribute is not None: func = filters.make_attrgetter(environment, attribute) else: - func = lambda x: x + + def func(x): + return x + async for item in auto_aiter(iterable): rv += func(item) return rv @@ -130,17 +142,17 @@ async def do_slice(value, slices, fill_with=None): ASYNC_FILTERS = { - 'first': do_first, - 'groupby': do_groupby, - 'join': do_join, - 'list': do_list, + "first": do_first, + "groupby": do_groupby, + "join": do_join, + "list": do_list, # we intentionally do not support do_last because that would be # ridiculous - 'reject': do_reject, - 'rejectattr': do_rejectattr, - 'map': do_map, - 'select': do_select, - 'selectattr': do_selectattr, - 'sum': do_sum, - 'slice': do_slice, + "reject": do_reject, + "rejectattr": do_rejectattr, + "map": do_map, + "select": do_select, + "selectattr": do_selectattr, + "sum": do_sum, + "slice": do_slice, } diff --git a/Splunk_TA_paloalto/bin/splunk_ta_paloalto/aob_py3/jinja2/asyncsupport.py b/Splunk_TA_paloalto/bin/splunk_ta_paloalto/aob_py3/jinja2/asyncsupport.py old mode 100755 new mode 100644 index b1e7b5ce..78ba3739 --- a/Splunk_TA_paloalto/bin/splunk_ta_paloalto/aob_py3/jinja2/asyncsupport.py +++ b/Splunk_TA_paloalto/bin/splunk_ta_paloalto/aob_py3/jinja2/asyncsupport.py @@ -1,29 +1,27 @@ # -*- coding: utf-8 -*- +"""The code for async support. Importing this patches Jinja on supported +Python versions. """ - jinja2.asyncsupport - ~~~~~~~~~~~~~~~~~~~ - - Has all the code for async support which is implemented as a patch - for supported Python versions. - - :copyright: (c) 2017 by the Jinja Team. - :license: BSD, see LICENSE for more details. -""" -import sys import asyncio import inspect from functools import update_wrapper -from jinja2.utils import concat, internalcode, Markup -from jinja2.environment import TemplateModule -from jinja2.runtime import LoopContextBase, _last_iteration +from markupsafe import Markup + +from .environment import TemplateModule +from .runtime import LoopContext +from .utils import concat +from .utils import internalcode +from .utils import missing async def concat_async(async_gen): rv = [] + async def collect(): async for event in async_gen: rv.append(event) + await collect() return concat(rv) @@ -34,10 +32,7 @@ async def generate_async(self, *args, **kwargs): async for event in self.root_render_func(self.new_context(vars)): yield event except Exception: - exc_info = sys.exc_info() - else: - return - yield self.environment.handle_exception(exc_info, True) + yield self.environment.handle_exception() def wrap_generate_func(original_generate): @@ -48,17 +43,18 @@ def _convert_generator(self, loop, args, kwargs): yield loop.run_until_complete(async_gen.__anext__()) except StopAsyncIteration: pass + def generate(self, *args, **kwargs): if not self.environment.is_async: return original_generate(self, *args, **kwargs) return _convert_generator(self, asyncio.get_event_loop(), args, kwargs) + return update_wrapper(generate, original_generate) async def render_async(self, *args, **kwargs): if not self.environment.is_async: - raise RuntimeError('The environment was not created with async mode ' - 'enabled.') + raise RuntimeError("The environment was not created with async mode enabled.") vars = dict(*args, **kwargs) ctx = self.new_context(vars) @@ -66,8 +62,7 @@ async def render_async(self, *args, **kwargs): try: return await concat_async(self.root_render_func(ctx)) except Exception: - exc_info = sys.exc_info() - return self.environment.handle_exception(exc_info, True) + return self.environment.handle_exception() def wrap_render_func(original_render): @@ -76,6 +71,7 @@ def render(self, *args, **kwargs): return original_render(self, *args, **kwargs) loop = asyncio.get_event_loop() return loop.run_until_complete(self.render_async(*args, **kwargs)) + return update_wrapper(render, original_render) @@ -109,6 +105,7 @@ def _invoke(self, arguments, autoescape): if not self._environment.is_async: return original_invoke(self, arguments, autoescape) return async_invoke(self, arguments, autoescape) + return update_wrapper(_invoke, original_invoke) @@ -124,9 +121,9 @@ def wrap_default_module(original_default_module): @internalcode def _get_default_module(self): if self.environment.is_async: - raise RuntimeError('Template module attribute is unavailable ' - 'in async mode') + raise RuntimeError("Template module attribute is unavailable in async mode") return original_default_module(self) + return _get_default_module @@ -139,30 +136,30 @@ async def make_module_async(self, vars=None, shared=False, locals=None): def patch_template(): - from jinja2 import Template + from . import Template + Template.generate = wrap_generate_func(Template.generate) - Template.generate_async = update_wrapper( - generate_async, Template.generate_async) - Template.render_async = update_wrapper( - render_async, Template.render_async) + Template.generate_async = update_wrapper(generate_async, Template.generate_async) + Template.render_async = update_wrapper(render_async, Template.render_async) Template.render = wrap_render_func(Template.render) - Template._get_default_module = wrap_default_module( - Template._get_default_module) + Template._get_default_module = wrap_default_module(Template._get_default_module) Template._get_default_module_async = get_default_module_async Template.make_module_async = update_wrapper( - make_module_async, Template.make_module_async) + make_module_async, Template.make_module_async + ) def patch_runtime(): - from jinja2.runtime import BlockReference, Macro - BlockReference.__call__ = wrap_block_reference_call( - BlockReference.__call__) + from .runtime import BlockReference, Macro + + BlockReference.__call__ = wrap_block_reference_call(BlockReference.__call__) Macro._invoke = wrap_macro_invoke(Macro._invoke) def patch_filters(): - from jinja2.filters import FILTERS - from jinja2.asyncfilters import ASYNC_FILTERS + from .filters import FILTERS + from .asyncfilters import ASYNC_FILTERS + FILTERS.update(ASYNC_FILTERS) @@ -179,7 +176,7 @@ async def auto_await(value): async def auto_aiter(iterable): - if hasattr(iterable, '__aiter__'): + if hasattr(iterable, "__aiter__"): async for item in iterable: yield item return @@ -187,70 +184,81 @@ async def auto_aiter(iterable): yield item -class AsyncLoopContext(LoopContextBase): - - def __init__(self, async_iterator, undefined, after, length, recurse=None, - depth0=0): - LoopContextBase.__init__(self, undefined, recurse, depth0) - self._async_iterator = async_iterator - self._after = after - self._length = length +class AsyncLoopContext(LoopContext): + _to_iterator = staticmethod(auto_aiter) @property - def length(self): - if self._length is None: - raise TypeError('Loop length for some iterators cannot be ' - 'lazily calculated in async mode') + async def length(self): + if self._length is not None: + return self._length + + try: + self._length = len(self._iterable) + except TypeError: + iterable = [x async for x in self._iterator] + self._iterator = self._to_iterator(iterable) + self._length = len(iterable) + self.index + (self._after is not missing) + return self._length - def __aiter__(self): - return AsyncLoopContextIterator(self) + @property + async def revindex0(self): + return await self.length - self.index + @property + async def revindex(self): + return await self.length - self.index0 + + async def _peek_next(self): + if self._after is not missing: + return self._after + + try: + self._after = await self._iterator.__anext__() + except StopAsyncIteration: + self._after = missing -class AsyncLoopContextIterator(object): - __slots__ = ('context',) + return self._after - def __init__(self, context): - self.context = context + @property + async def last(self): + return await self._peek_next() is missing + + @property + async def nextitem(self): + rv = await self._peek_next() + + if rv is missing: + return self._undefined("there is no next item") + + return rv def __aiter__(self): return self async def __anext__(self): - ctx = self.context - ctx.index0 += 1 - if ctx._after is _last_iteration: - raise StopAsyncIteration() - ctx._before = ctx._current - ctx._current = ctx._after - try: - ctx._after = await ctx._async_iterator.__anext__() - except StopAsyncIteration: - ctx._after = _last_iteration - return ctx._current, ctx + if self._after is not missing: + rv = self._after + self._after = missing + else: + rv = await self._iterator.__anext__() + + self.index0 += 1 + self._before = self._current + self._current = rv + return rv, self async def make_async_loop_context(iterable, undefined, recurse=None, depth0=0): - # Length is more complicated and less efficient in async mode. The - # reason for this is that we cannot know if length will be used - # upfront but because length is a property we cannot lazily execute it - # later. This means that we need to buffer it up and measure :( - # - # We however only do this for actual iterators, not for async - # iterators as blocking here does not seem like the best idea in the - # world. - try: - length = len(iterable) - except (TypeError, AttributeError): - if not hasattr(iterable, '__aiter__'): - iterable = tuple(iterable) - length = len(iterable) - else: - length = None - async_iterator = auto_aiter(iterable) - try: - after = await async_iterator.__anext__() - except StopAsyncIteration: - after = _last_iteration - return AsyncLoopContext(async_iterator, undefined, after, length, recurse, - depth0) + import warnings + + warnings.warn( + "This template must be recompiled with at least Jinja 2.11, or" + " it will fail in 3.0.", + DeprecationWarning, + stacklevel=2, + ) + return AsyncLoopContext(iterable, undefined, recurse, depth0) + + +patch_all() diff --git a/Splunk_TA_paloalto/bin/splunk_ta_paloalto/aob_py3/jinja2/bccache.py b/Splunk_TA_paloalto/bin/splunk_ta_paloalto/aob_py3/jinja2/bccache.py old mode 100755 new mode 100644 index 080e527c..9c066103 --- a/Splunk_TA_paloalto/bin/splunk_ta_paloalto/aob_py3/jinja2/bccache.py +++ b/Splunk_TA_paloalto/bin/splunk_ta_paloalto/aob_py3/jinja2/bccache.py @@ -1,60 +1,37 @@ # -*- coding: utf-8 -*- -""" - jinja2.bccache - ~~~~~~~~~~~~~~ - - This module implements the bytecode cache system Jinja is optionally - using. This is useful if you have very complex template situations and - the compiliation of all those templates slow down your application too - much. - - Situations where this is useful are often forking web applications that - are initialized on the first request. +"""The optional bytecode cache system. This is useful if you have very +complex template situations and the compilation of all those templates +slows down your application too much. - :copyright: (c) 2017 by the Jinja Team. - :license: BSD. +Situations where this is useful are often forking web applications that +are initialized on the first request. """ -from os import path, listdir +import errno +import fnmatch import os -import sys import stat -import errno -import marshal +import sys import tempfile -import fnmatch from hashlib import sha1 -from jinja2.utils import open_if_exists -from jinja2._compat import BytesIO, pickle, PY2, text_type - - -# marshal works better on 3.x, one hack less required -if not PY2: - marshal_dump = marshal.dump - marshal_load = marshal.load -else: - - def marshal_dump(code, f): - if isinstance(f, file): - marshal.dump(code, f) - else: - f.write(marshal.dumps(code)) - - def marshal_load(f): - if isinstance(f, file): - return marshal.load(f) - return marshal.loads(f.read()) - - -bc_version = 3 - -# magic version used to only change with new jinja versions. With 2.6 -# we change this to also take Python version changes into account. The -# reason for this is that Python tends to segfault if fed earlier bytecode -# versions because someone thought it would be a good idea to reuse opcodes -# or make Python incompatible with earlier versions. -bc_magic = 'j2'.encode('ascii') + \ - pickle.dumps(bc_version, 2) + \ - pickle.dumps((sys.version_info[0] << 24) | sys.version_info[1]) +from os import listdir +from os import path + +from ._compat import BytesIO +from ._compat import marshal_dump +from ._compat import marshal_load +from ._compat import pickle +from ._compat import text_type +from .utils import open_if_exists + +bc_version = 4 +# Magic bytes to identify Jinja bytecode cache files. Contains the +# Python major and minor version to avoid loading incompatible bytecode +# if a project upgrades its Python version. +bc_magic = ( + b"j2" + + pickle.dumps(bc_version, 2) + + pickle.dumps((sys.version_info[0] << 24) | sys.version_info[1], 2) +) class Bucket(object): @@ -98,7 +75,7 @@ def load_bytecode(self, f): def write_bytecode(self, f): """Dump the bytecode into the file or file like object passed.""" if self.code is None: - raise TypeError('can\'t write empty bucket') + raise TypeError("can't write empty bucket") f.write(bc_magic) pickle.dump(self.checksum, f, 2) marshal_dump(self.code, f) @@ -140,7 +117,7 @@ def dump_bytecode(self, bucket): bucket.write_bytecode(f) A more advanced version of a filesystem based bytecode cache is part of - Jinja2. + Jinja. """ def load_bytecode(self, bucket): @@ -158,24 +135,24 @@ def dump_bytecode(self, bucket): raise NotImplementedError() def clear(self): - """Clears the cache. This method is not used by Jinja2 but should be + """Clears the cache. This method is not used by Jinja but should be implemented to allow applications to clear the bytecode cache used by a particular environment. """ def get_cache_key(self, name, filename=None): """Returns the unique hash key for this template name.""" - hash = sha1(name.encode('utf-8')) + hash = sha1(name.encode("utf-8")) if filename is not None: - filename = '|' + filename + filename = "|" + filename if isinstance(filename, text_type): - filename = filename.encode('utf-8') + filename = filename.encode("utf-8") hash.update(filename) return hash.hexdigest() def get_source_checksum(self, source): """Returns a checksum for the source.""" - return sha1(source.encode('utf-8')).hexdigest() + return sha1(source.encode("utf-8")).hexdigest() def get_bucket(self, environment, name, filename, source): """Return a cache bucket for the given template. All arguments are @@ -210,7 +187,7 @@ class FileSystemBytecodeCache(BytecodeCache): This bytecode cache supports clearing of the cache using the clear method. """ - def __init__(self, directory=None, pattern='__jinja2_%s.cache'): + def __init__(self, directory=None, pattern="__jinja2_%s.cache"): if directory is None: directory = self._get_default_cache_dir() self.directory = directory @@ -218,19 +195,21 @@ def __init__(self, directory=None, pattern='__jinja2_%s.cache'): def _get_default_cache_dir(self): def _unsafe_dir(): - raise RuntimeError('Cannot determine safe temp directory. You ' - 'need to explicitly provide one.') + raise RuntimeError( + "Cannot determine safe temp directory. You " + "need to explicitly provide one." + ) tmpdir = tempfile.gettempdir() # On windows the temporary directory is used specific unless # explicitly forced otherwise. We can just use that. - if os.name == 'nt': + if os.name == "nt": return tmpdir - if not hasattr(os, 'getuid'): + if not hasattr(os, "getuid"): _unsafe_dir() - dirname = '_jinja2-cache-%d' % os.getuid() + dirname = "_jinja2-cache-%d" % os.getuid() actual_dir = os.path.join(tmpdir, dirname) try: @@ -241,18 +220,22 @@ def _unsafe_dir(): try: os.chmod(actual_dir, stat.S_IRWXU) actual_dir_stat = os.lstat(actual_dir) - if actual_dir_stat.st_uid != os.getuid() \ - or not stat.S_ISDIR(actual_dir_stat.st_mode) \ - or stat.S_IMODE(actual_dir_stat.st_mode) != stat.S_IRWXU: + if ( + actual_dir_stat.st_uid != os.getuid() + or not stat.S_ISDIR(actual_dir_stat.st_mode) + or stat.S_IMODE(actual_dir_stat.st_mode) != stat.S_IRWXU + ): _unsafe_dir() except OSError as e: if e.errno != errno.EEXIST: raise actual_dir_stat = os.lstat(actual_dir) - if actual_dir_stat.st_uid != os.getuid() \ - or not stat.S_ISDIR(actual_dir_stat.st_mode) \ - or stat.S_IMODE(actual_dir_stat.st_mode) != stat.S_IRWXU: + if ( + actual_dir_stat.st_uid != os.getuid() + or not stat.S_ISDIR(actual_dir_stat.st_mode) + or stat.S_IMODE(actual_dir_stat.st_mode) != stat.S_IRWXU + ): _unsafe_dir() return actual_dir @@ -261,7 +244,7 @@ def _get_cache_filename(self, bucket): return path.join(self.directory, self.pattern % bucket.key) def load_bytecode(self, bucket): - f = open_if_exists(self._get_cache_filename(bucket), 'rb') + f = open_if_exists(self._get_cache_filename(bucket), "rb") if f is not None: try: bucket.load_bytecode(f) @@ -269,7 +252,7 @@ def load_bytecode(self, bucket): f.close() def dump_bytecode(self, bucket): - f = open(self._get_cache_filename(bucket), 'wb') + f = open(self._get_cache_filename(bucket), "wb") try: bucket.write_bytecode(f) finally: @@ -280,7 +263,8 @@ def clear(self): # write access on the file system and the function does not exist # normally. from os import remove - files = fnmatch.filter(listdir(self.directory), self.pattern % '*') + + files = fnmatch.filter(listdir(self.directory), self.pattern % "*") for filename in files: try: remove(path.join(self.directory, filename)) @@ -296,9 +280,8 @@ class MemcachedBytecodeCache(BytecodeCache): Libraries compatible with this class: - - `werkzeug `_.contrib.cache - - `python-memcached `_ - - `cmemcache `_ + - `cachelib `_ + - `python-memcached `_ (Unfortunately the django cache interface is not compatible because it does not support storing binary data, only unicode. You can however pass @@ -334,8 +317,13 @@ class MemcachedBytecodeCache(BytecodeCache): `ignore_memcache_errors` parameter. """ - def __init__(self, client, prefix='jinja2/bytecode/', timeout=None, - ignore_memcache_errors=True): + def __init__( + self, + client, + prefix="jinja2/bytecode/", + timeout=None, + ignore_memcache_errors=True, + ): self.client = client self.prefix = prefix self.timeout = timeout diff --git a/Splunk_TA_paloalto/bin/splunk_ta_paloalto/aob_py3/jinja2/compiler.py b/Splunk_TA_paloalto/bin/splunk_ta_paloalto/aob_py3/jinja2/compiler.py old mode 100755 new mode 100644 index d534a827..63297b42 --- a/Splunk_TA_paloalto/bin/splunk_ta_paloalto/aob_py3/jinja2/compiler.py +++ b/Splunk_TA_paloalto/bin/splunk_ta_paloalto/aob_py3/jinja2/compiler.py @@ -1,59 +1,62 @@ # -*- coding: utf-8 -*- -""" - jinja2.compiler - ~~~~~~~~~~~~~~~ - - Compiles nodes into python code. - - :copyright: (c) 2017 by the Jinja Team. - :license: BSD, see LICENSE for more details. -""" +"""Compiles nodes from the parser into Python code.""" +from collections import namedtuple +from functools import update_wrapper from itertools import chain -from copy import deepcopy from keyword import iskeyword as is_python_keyword -from functools import update_wrapper -from jinja2 import nodes -from jinja2.nodes import EvalContext -from jinja2.visitor import NodeVisitor -from jinja2.optimizer import Optimizer -from jinja2.exceptions import TemplateAssertionError -from jinja2.utils import Markup, concat, escape -from jinja2._compat import range_type, text_type, string_types, \ - iteritems, NativeStringIO, imap, izip -from jinja2.idtracking import Symbols, VAR_LOAD_PARAMETER, \ - VAR_LOAD_RESOLVE, VAR_LOAD_ALIAS, VAR_LOAD_UNDEFINED +from markupsafe import escape +from markupsafe import Markup + +from . import nodes +from ._compat import imap +from ._compat import iteritems +from ._compat import izip +from ._compat import NativeStringIO +from ._compat import range_type +from ._compat import string_types +from ._compat import text_type +from .exceptions import TemplateAssertionError +from .idtracking import Symbols +from .idtracking import VAR_LOAD_ALIAS +from .idtracking import VAR_LOAD_PARAMETER +from .idtracking import VAR_LOAD_RESOLVE +from .idtracking import VAR_LOAD_UNDEFINED +from .nodes import EvalContext +from .optimizer import Optimizer +from .utils import concat +from .visitor import NodeVisitor operators = { - 'eq': '==', - 'ne': '!=', - 'gt': '>', - 'gteq': '>=', - 'lt': '<', - 'lteq': '<=', - 'in': 'in', - 'notin': 'not in' + "eq": "==", + "ne": "!=", + "gt": ">", + "gteq": ">=", + "lt": "<", + "lteq": "<=", + "in": "in", + "notin": "not in", } # what method to iterate over items do we want to use for dict iteration # in generated code? on 2.x let's go with iteritems, on 3.x with items -if hasattr(dict, 'iteritems'): - dict_item_iter = 'iteritems' +if hasattr(dict, "iteritems"): + dict_item_iter = "iteritems" else: - dict_item_iter = 'items' + dict_item_iter = "items" -code_features = ['division'] +code_features = ["division"] # does this python version support generator stops? (PEP 0479) try: - exec('from __future__ import generator_stop') - code_features.append('generator_stop') + exec("from __future__ import generator_stop") + code_features.append("generator_stop") except SyntaxError: pass # does this python version support yield from? try: - exec('def f(): yield from x()') + exec("def f(): yield from x()") except SyntaxError: supports_yield_from = False else: @@ -68,17 +71,19 @@ def new_func(self, node, frame, **kwargs): if new_node != node: return self.visit(new_node, frame) return f(self, node, frame, **kwargs) + return update_wrapper(new_func, f) -def generate(node, environment, name, filename, stream=None, - defer_init=False, optimized=True): +def generate( + node, environment, name, filename, stream=None, defer_init=False, optimized=True +): """Generate the python source for a node tree.""" if not isinstance(node, nodes.Template): - raise TypeError('Can\'t compile non template nodes') - generator = environment.code_generator_class(environment, name, filename, - stream, defer_init, - optimized) + raise TypeError("Can't compile non template nodes") + generator = environment.code_generator_class( + environment, name, filename, stream, defer_init, optimized + ) generator.visit(node) if stream is None: return generator.stream.getvalue() @@ -119,7 +124,6 @@ def find_undeclared(nodes, names): class MacroRef(object): - def __init__(self, node): self.node = node self.accesses_caller = False @@ -132,8 +136,7 @@ class Frame(object): def __init__(self, eval_ctx, parent=None, level=None): self.eval_ctx = eval_ctx - self.symbols = Symbols(parent and parent.symbols or None, - level=level) + self.symbols = Symbols(parent and parent.symbols or None, level=level) # a toplevel frame is the root + soft frames such as if conditions. self.toplevel = False @@ -223,7 +226,7 @@ def __init__(self, names): self.undeclared = set() def visit_Name(self, node): - if node.ctx == 'load' and node.name in self.names: + if node.ctx == "load" and node.name in self.names: self.undeclared.add(node.name) if self.undeclared == self.names: raise VisitorExit() @@ -242,9 +245,9 @@ class CompilerExit(Exception): class CodeGenerator(NodeVisitor): - - def __init__(self, environment, name, filename, stream=None, - defer_init=False, optimized=True): + def __init__( + self, environment, name, filename, stream=None, defer_init=False, optimized=True + ): if stream is None: stream = NativeStringIO() self.environment = environment @@ -306,7 +309,7 @@ def __init__(self, environment, name, filename, stream=None, self._param_def_block = [] # Tracks the current context. - self._context_reference_stack = ['context'] + self._context_reference_stack = ["context"] # -- Various compilation helpers @@ -317,30 +320,30 @@ def fail(self, msg, lineno): def temporary_identifier(self): """Get a new unique identifier.""" self._last_identifier += 1 - return 't_%d' % self._last_identifier + return "t_%d" % self._last_identifier def buffer(self, frame): """Enable buffering for the frame from that point onwards.""" frame.buffer = self.temporary_identifier() - self.writeline('%s = []' % frame.buffer) + self.writeline("%s = []" % frame.buffer) def return_buffer_contents(self, frame, force_unescaped=False): """Return the buffer contents of the frame.""" if not force_unescaped: if frame.eval_ctx.volatile: - self.writeline('if context.eval_ctx.autoescape:') + self.writeline("if context.eval_ctx.autoescape:") self.indent() - self.writeline('return Markup(concat(%s))' % frame.buffer) + self.writeline("return Markup(concat(%s))" % frame.buffer) self.outdent() - self.writeline('else:') + self.writeline("else:") self.indent() - self.writeline('return concat(%s)' % frame.buffer) + self.writeline("return concat(%s)" % frame.buffer) self.outdent() return elif frame.eval_ctx.autoescape: - self.writeline('return Markup(concat(%s))' % frame.buffer) + self.writeline("return Markup(concat(%s))" % frame.buffer) return - self.writeline('return concat(%s)' % frame.buffer) + self.writeline("return concat(%s)" % frame.buffer) def indent(self): """Indent by one.""" @@ -353,14 +356,14 @@ def outdent(self, step=1): def start_write(self, frame, node=None): """Yield or write into the frame buffer.""" if frame.buffer is None: - self.writeline('yield ', node) + self.writeline("yield ", node) else: - self.writeline('%s.append(' % frame.buffer, node) + self.writeline("%s.append(" % frame.buffer, node) def end_write(self, frame): """End the writing process started by `start_write`.""" if frame.buffer is not None: - self.write(')') + self.write(")") def simple_write(self, s, frame, node=None): """Simple shortcut for start_write + write + end_write.""" @@ -373,7 +376,7 @@ def blockvisit(self, nodes, frame): is no buffer a dummy ``if 0: yield None`` is written automatically. """ try: - self.writeline('pass') + self.writeline("pass") for node in nodes: self.visit(node, frame) except CompilerExit: @@ -383,14 +386,13 @@ def write(self, x): """Write a string into the output stream.""" if self._new_lines: if not self._first_write: - self.stream.write('\n' * self._new_lines) + self.stream.write("\n" * self._new_lines) self.code_lineno += self._new_lines if self._write_debug_info is not None: - self.debug_info.append((self._write_debug_info, - self.code_lineno)) + self.debug_info.append((self._write_debug_info, self.code_lineno)) self._write_debug_info = None self._first_write = False - self.stream.write(' ' * self._indentation) + self.stream.write(" " * self._indentation) self._new_lines = 0 self.stream.write(x) @@ -410,7 +412,7 @@ def signature(self, node, frame, extra_kwargs=None): """Writes a function call to the stream for the current node. A leading comma is added automatically. The extra keyword arguments may not include python keywords otherwise a syntax - error could occour. The extra keyword arguments should be given + error could occur. The extra keyword arguments should be given as python dict. """ # if any of the given keyword arguments is a python keyword @@ -422,41 +424,41 @@ def signature(self, node, frame, extra_kwargs=None): break for arg in node.args: - self.write(', ') + self.write(", ") self.visit(arg, frame) if not kwarg_workaround: for kwarg in node.kwargs: - self.write(', ') + self.write(", ") self.visit(kwarg, frame) if extra_kwargs is not None: for key, value in iteritems(extra_kwargs): - self.write(', %s=%s' % (key, value)) + self.write(", %s=%s" % (key, value)) if node.dyn_args: - self.write(', *') + self.write(", *") self.visit(node.dyn_args, frame) if kwarg_workaround: if node.dyn_kwargs is not None: - self.write(', **dict({') + self.write(", **dict({") else: - self.write(', **{') + self.write(", **{") for kwarg in node.kwargs: - self.write('%r: ' % kwarg.key) + self.write("%r: " % kwarg.key) self.visit(kwarg.value, frame) - self.write(', ') + self.write(", ") if extra_kwargs is not None: for key, value in iteritems(extra_kwargs): - self.write('%r: %s, ' % (key, value)) + self.write("%r: %s, " % (key, value)) if node.dyn_kwargs is not None: - self.write('}, **') + self.write("}, **") self.visit(node.dyn_kwargs, frame) - self.write(')') + self.write(")") else: - self.write('}') + self.write("}") elif node.dyn_kwargs is not None: - self.write(', **') + self.write(", **") self.visit(node.dyn_kwargs, frame) def pull_dependencies(self, nodes): @@ -464,13 +466,14 @@ def pull_dependencies(self, nodes): visitor = DependencyFinderVisitor() for node in nodes: visitor.visit(node) - for dependency in 'filters', 'tests': + for dependency in "filters", "tests": mapping = getattr(self, dependency) for name in getattr(visitor, dependency): if name not in mapping: mapping[name] = self.temporary_identifier() - self.writeline('%s = environment.%s[%r]' % - (mapping[name], dependency, name)) + self.writeline( + "%s = environment.%s[%r]" % (mapping[name], dependency, name) + ) def enter_frame(self, frame): undefs = [] @@ -478,16 +481,15 @@ def enter_frame(self, frame): if action == VAR_LOAD_PARAMETER: pass elif action == VAR_LOAD_RESOLVE: - self.writeline('%s = %s(%r)' % - (target, self.get_resolve_func(), param)) + self.writeline("%s = %s(%r)" % (target, self.get_resolve_func(), param)) elif action == VAR_LOAD_ALIAS: - self.writeline('%s = %s' % (target, param)) + self.writeline("%s = %s" % (target, param)) elif action == VAR_LOAD_UNDEFINED: undefs.append(target) else: - raise NotImplementedError('unknown load instruction') + raise NotImplementedError("unknown load instruction") if undefs: - self.writeline('%s = missing' % ' = '.join(undefs)) + self.writeline("%s = missing" % " = ".join(undefs)) def leave_frame(self, frame, with_python_scope=False): if not with_python_scope: @@ -495,12 +497,12 @@ def leave_frame(self, frame, with_python_scope=False): for target, _ in iteritems(frame.symbols.loads): undefs.append(target) if undefs: - self.writeline('%s = missing' % ' = '.join(undefs)) + self.writeline("%s = missing" % " = ".join(undefs)) def func(self, name): if self.environment.is_async: - return 'async def %s' % name - return 'def %s' % name + return "async def %s" % name + return "def %s" % name def macro_body(self, node, frame): """Dump the function def of a macro or call block.""" @@ -512,16 +514,16 @@ def macro_body(self, node, frame): skip_special_params = set() args = [] for idx, arg in enumerate(node.args): - if arg.name == 'caller': + if arg.name == "caller": explicit_caller = idx - if arg.name in ('kwargs', 'varargs'): + if arg.name in ("kwargs", "varargs"): skip_special_params.add(arg.name) args.append(frame.symbols.ref(arg.name)) - undeclared = find_undeclared(node.body, ('caller', 'kwargs', 'varargs')) + undeclared = find_undeclared(node.body, ("caller", "kwargs", "varargs")) - if 'caller' in undeclared: - # In older Jinja2 versions there was a bug that allowed caller + if "caller" in undeclared: + # In older Jinja versions there was a bug that allowed caller # to retain the special behavior even if it was mentioned in # the argument list. However thankfully this was only really # working if it was the last argument. So we are explicitly @@ -531,23 +533,26 @@ def macro_body(self, node, frame): try: node.defaults[explicit_caller - len(node.args)] except IndexError: - self.fail('When defining macros or call blocks the ' - 'special "caller" argument must be omitted ' - 'or be given a default.', node.lineno) + self.fail( + "When defining macros or call blocks the " + 'special "caller" argument must be omitted ' + "or be given a default.", + node.lineno, + ) else: - args.append(frame.symbols.declare_parameter('caller')) + args.append(frame.symbols.declare_parameter("caller")) macro_ref.accesses_caller = True - if 'kwargs' in undeclared and not 'kwargs' in skip_special_params: - args.append(frame.symbols.declare_parameter('kwargs')) + if "kwargs" in undeclared and "kwargs" not in skip_special_params: + args.append(frame.symbols.declare_parameter("kwargs")) macro_ref.accesses_kwargs = True - if 'varargs' in undeclared and not 'varargs' in skip_special_params: - args.append(frame.symbols.declare_parameter('varargs')) + if "varargs" in undeclared and "varargs" not in skip_special_params: + args.append(frame.symbols.declare_parameter("varargs")) macro_ref.accesses_varargs = True # macros are delayed, they never require output checks frame.require_output_check = False frame.symbols.analyze_node(node) - self.writeline('%s(%s):' % (self.func('macro'), ', '.join(args)), node) + self.writeline("%s(%s):" % (self.func("macro"), ", ".join(args)), node) self.indent() self.buffer(frame) @@ -556,17 +561,17 @@ def macro_body(self, node, frame): self.push_parameter_definitions(frame) for idx, arg in enumerate(node.args): ref = frame.symbols.ref(arg.name) - self.writeline('if %s is missing:' % ref) + self.writeline("if %s is missing:" % ref) self.indent() try: default = node.defaults[idx - len(node.args)] except IndexError: - self.writeline('%s = undefined(%r, name=%r)' % ( - ref, - 'parameter %r was not provided' % arg.name, - arg.name)) + self.writeline( + "%s = undefined(%r, name=%r)" + % (ref, "parameter %r was not provided" % arg.name, arg.name) + ) else: - self.writeline('%s = ' % ref) + self.writeline("%s = " % ref) self.visit(default, frame) self.mark_parameter_stored(ref) self.outdent() @@ -581,35 +586,46 @@ def macro_body(self, node, frame): def macro_def(self, macro_ref, frame): """Dump the macro definition for the def created by macro_body.""" - arg_tuple = ', '.join(repr(x.name) for x in macro_ref.node.args) - name = getattr(macro_ref.node, 'name', None) + arg_tuple = ", ".join(repr(x.name) for x in macro_ref.node.args) + name = getattr(macro_ref.node, "name", None) if len(macro_ref.node.args) == 1: - arg_tuple += ',' - self.write('Macro(environment, macro, %r, (%s), %r, %r, %r, ' - 'context.eval_ctx.autoescape)' % - (name, arg_tuple, macro_ref.accesses_kwargs, - macro_ref.accesses_varargs, macro_ref.accesses_caller)) + arg_tuple += "," + self.write( + "Macro(environment, macro, %r, (%s), %r, %r, %r, " + "context.eval_ctx.autoescape)" + % ( + name, + arg_tuple, + macro_ref.accesses_kwargs, + macro_ref.accesses_varargs, + macro_ref.accesses_caller, + ) + ) def position(self, node): """Return a human readable position for the node.""" - rv = 'line %d' % node.lineno + rv = "line %d" % node.lineno if self.name is not None: - rv += ' in ' + repr(self.name) + rv += " in " + repr(self.name) return rv def dump_local_context(self, frame): - return '{%s}' % ', '.join( - '%r: %s' % (name, target) for name, target - in iteritems(frame.symbols.dump_stores())) + return "{%s}" % ", ".join( + "%r: %s" % (name, target) + for name, target in iteritems(frame.symbols.dump_stores()) + ) def write_commons(self): """Writes a common preamble that is used by root and block functions. Primarily this sets up common local helpers and enforces a generator through a dead branch. """ - self.writeline('resolve = context.resolve_or_missing') - self.writeline('undefined = environment.undefined') - self.writeline('if 0: yield None') + self.writeline("resolve = context.resolve_or_missing") + self.writeline("undefined = environment.undefined") + # always use the standard Undefined class for the implicit else of + # conditional expressions + self.writeline("cond_expr_undefined = Undefined") + self.writeline("if 0: yield None") def push_parameter_definitions(self, frame): """Pushes all parameter targets from the given frame into a local @@ -642,12 +658,12 @@ def get_context_ref(self): def get_resolve_func(self): target = self._context_reference_stack[-1] - if target == 'context': - return 'resolve' - return '%s.resolve' % target + if target == "context": + return "resolve" + return "%s.resolve" % target def derive_context(self, frame): - return '%s.derived(%s)' % ( + return "%s.derived(%s)" % ( self.get_context_ref(), self.dump_local_context(frame), ) @@ -669,44 +685,48 @@ def pop_assign_tracking(self, frame): vars = self._assign_stack.pop() if not frame.toplevel or not vars: return - public_names = [x for x in vars if x[:1] != '_'] + public_names = [x for x in vars if x[:1] != "_"] if len(vars) == 1: name = next(iter(vars)) ref = frame.symbols.ref(name) - self.writeline('context.vars[%r] = %s' % (name, ref)) + self.writeline("context.vars[%r] = %s" % (name, ref)) else: - self.writeline('context.vars.update({') + self.writeline("context.vars.update({") for idx, name in enumerate(vars): if idx: - self.write(', ') + self.write(", ") ref = frame.symbols.ref(name) - self.write('%r: %s' % (name, ref)) - self.write('})') + self.write("%r: %s" % (name, ref)) + self.write("})") if public_names: if len(public_names) == 1: - self.writeline('context.exported_vars.add(%r)' % - public_names[0]) + self.writeline("context.exported_vars.add(%r)" % public_names[0]) else: - self.writeline('context.exported_vars.update((%s))' % - ', '.join(imap(repr, public_names))) + self.writeline( + "context.exported_vars.update((%s))" + % ", ".join(imap(repr, public_names)) + ) # -- Statement Visitors def visit_Template(self, node, frame=None): - assert frame is None, 'no root frame allowed' + assert frame is None, "no root frame allowed" eval_ctx = EvalContext(self.environment, self.name) - from jinja2.runtime import __all__ as exported - self.writeline('from __future__ import %s' % ', '.join(code_features)) - self.writeline('from jinja2.runtime import ' + ', '.join(exported)) + from .runtime import exported + + self.writeline("from __future__ import %s" % ", ".join(code_features)) + self.writeline("from jinja2.runtime import " + ", ".join(exported)) if self.environment.is_async: - self.writeline('from jinja2.asyncsupport import auto_await, ' - 'auto_aiter, make_async_loop_context') + self.writeline( + "from jinja2.asyncsupport import auto_await, " + "auto_aiter, AsyncLoopContext" + ) # if we want a deferred initialization we cannot move the # environment into a local name - envenv = not self.defer_init and ', environment=environment' or '' + envenv = not self.defer_init and ", environment=environment" or "" # do we have an extends tag at all? If not, we can save some # overhead by just not processing any inheritance code. @@ -715,7 +735,7 @@ def visit_Template(self, node, frame=None): # find all blocks for block in node.find_all(nodes.Block): if block.name in self.blocks: - self.fail('block %r defined twice' % block.name, block.lineno) + self.fail("block %r defined twice" % block.name, block.lineno) self.blocks[block.name] = block # find all imports and import them @@ -723,32 +743,32 @@ def visit_Template(self, node, frame=None): if import_.importname not in self.import_aliases: imp = import_.importname self.import_aliases[imp] = alias = self.temporary_identifier() - if '.' in imp: - module, obj = imp.rsplit('.', 1) - self.writeline('from %s import %s as %s' % - (module, obj, alias)) + if "." in imp: + module, obj = imp.rsplit(".", 1) + self.writeline("from %s import %s as %s" % (module, obj, alias)) else: - self.writeline('import %s as %s' % (imp, alias)) + self.writeline("import %s as %s" % (imp, alias)) # add the load name - self.writeline('name = %r' % self.name) + self.writeline("name = %r" % self.name) # generate the root render function. - self.writeline('%s(context, missing=missing%s):' % - (self.func('root'), envenv), extra=1) + self.writeline( + "%s(context, missing=missing%s):" % (self.func("root"), envenv), extra=1 + ) self.indent() self.write_commons() # process the root frame = Frame(eval_ctx) - if 'self' in find_undeclared(node.body, ('self',)): - ref = frame.symbols.declare_parameter('self') - self.writeline('%s = TemplateReference(context)' % ref) + if "self" in find_undeclared(node.body, ("self",)): + ref = frame.symbols.declare_parameter("self") + self.writeline("%s = TemplateReference(context)" % ref) frame.symbols.analyze_node(node) frame.toplevel = frame.rootlevel = True frame.require_output_check = have_extends and not self.has_known_extends if have_extends: - self.writeline('parent_template = None') + self.writeline("parent_template = None") self.enter_frame(frame) self.pull_dependencies(node.body) self.blockvisit(node.body, frame) @@ -759,39 +779,42 @@ def visit_Template(self, node, frame=None): if have_extends: if not self.has_known_extends: self.indent() - self.writeline('if parent_template is not None:') + self.writeline("if parent_template is not None:") self.indent() if supports_yield_from and not self.environment.is_async: - self.writeline('yield from parent_template.' - 'root_render_func(context)') + self.writeline("yield from parent_template.root_render_func(context)") else: - self.writeline('%sfor event in parent_template.' - 'root_render_func(context):' % - (self.environment.is_async and 'async ' or '')) + self.writeline( + "%sfor event in parent_template." + "root_render_func(context):" + % (self.environment.is_async and "async " or "") + ) self.indent() - self.writeline('yield event') + self.writeline("yield event") self.outdent() self.outdent(1 + (not self.has_known_extends)) # at this point we now have the blocks collected and can visit them too. for name, block in iteritems(self.blocks): - self.writeline('%s(context, missing=missing%s):' % - (self.func('block_' + name), envenv), - block, 1) + self.writeline( + "%s(context, missing=missing%s):" + % (self.func("block_" + name), envenv), + block, + 1, + ) self.indent() self.write_commons() # It's important that we do not make this frame a child of the # toplevel template. This would cause a variety of # interesting issues with identifier tracking. block_frame = Frame(eval_ctx) - undeclared = find_undeclared(block.body, ('self', 'super')) - if 'self' in undeclared: - ref = block_frame.symbols.declare_parameter('self') - self.writeline('%s = TemplateReference(context)' % ref) - if 'super' in undeclared: - ref = block_frame.symbols.declare_parameter('super') - self.writeline('%s = context.super(%r, ' - 'block_%s)' % (ref, name, name)) + undeclared = find_undeclared(block.body, ("self", "super")) + if "self" in undeclared: + ref = block_frame.symbols.declare_parameter("self") + self.writeline("%s = TemplateReference(context)" % ref) + if "super" in undeclared: + ref = block_frame.symbols.declare_parameter("super") + self.writeline("%s = context.super(%r, block_%s)" % (ref, name, name)) block_frame.symbols.analyze_node(block) block_frame.block = name self.enter_frame(block_frame) @@ -800,13 +823,15 @@ def visit_Template(self, node, frame=None): self.leave_frame(block_frame, with_python_scope=True) self.outdent() - self.writeline('blocks = {%s}' % ', '.join('%r: block_%s' % (x, x) - for x in self.blocks), - extra=1) + self.writeline( + "blocks = {%s}" % ", ".join("%r: block_%s" % (x, x) for x in self.blocks), + extra=1, + ) # add a function that returns the debug info - self.writeline('debug_info = %r' % '&'.join('%s=%s' % x for x - in self.debug_info)) + self.writeline( + "debug_info = %r" % "&".join("%s=%s" % x for x in self.debug_info) + ) def visit_Block(self, node, frame): """Call a block and register it for the template.""" @@ -817,7 +842,7 @@ def visit_Block(self, node, frame): if self.has_known_extends: return if self.extends_so_far > 0: - self.writeline('if parent_template is None:') + self.writeline("if parent_template is None:") self.indent() level += 1 @@ -826,16 +851,22 @@ def visit_Block(self, node, frame): else: context = self.get_context_ref() - if supports_yield_from and not self.environment.is_async and \ - frame.buffer is None: - self.writeline('yield from context.blocks[%r][0](%s)' % ( - node.name, context), node) + if ( + supports_yield_from + and not self.environment.is_async + and frame.buffer is None + ): + self.writeline( + "yield from context.blocks[%r][0](%s)" % (node.name, context), node + ) else: - loop = self.environment.is_async and 'async for' or 'for' - self.writeline('%s event in context.blocks[%r][0](%s):' % ( - loop, node.name, context), node) + loop = self.environment.is_async and "async for" or "for" + self.writeline( + "%s event in context.blocks[%r][0](%s):" % (loop, node.name, context), + node, + ) self.indent() - self.simple_write('event', frame) + self.simple_write("event", frame) self.outdent() self.outdent(level) @@ -843,8 +874,7 @@ def visit_Block(self, node, frame): def visit_Extends(self, node, frame): """Calls the extender.""" if not frame.toplevel: - self.fail('cannot use extend from a non top-level scope', - node.lineno) + self.fail("cannot use extend from a non top-level scope", node.lineno) # if the number of extends statements in general is zero so # far, we don't have to add a check if something extended @@ -856,10 +886,9 @@ def visit_Extends(self, node, frame): # time too, but i welcome it not to confuse users by throwing the # same error at different times just "because we can". if not self.has_known_extends: - self.writeline('if parent_template is not None:') + self.writeline("if parent_template is not None:") self.indent() - self.writeline('raise TemplateRuntimeError(%r)' % - 'extended multiple times') + self.writeline("raise TemplateRuntimeError(%r)" % "extended multiple times") # if we have a known extends already we don't need that code here # as we know that the template execution will end here. @@ -868,14 +897,14 @@ def visit_Extends(self, node, frame): else: self.outdent() - self.writeline('parent_template = environment.get_template(', node) + self.writeline("parent_template = environment.get_template(", node) self.visit(node.template, frame) - self.write(', %r)' % self.name) - self.writeline('for name, parent_block in parent_template.' - 'blocks.%s():' % dict_item_iter) + self.write(", %r)" % self.name) + self.writeline( + "for name, parent_block in parent_template.blocks.%s():" % dict_item_iter + ) self.indent() - self.writeline('context.blocks.setdefault(name, []).' - 'append(parent_block)') + self.writeline("context.blocks.setdefault(name, []).append(parent_block)") self.outdent() # if this extends statement was in the root level we can take @@ -890,52 +919,56 @@ def visit_Extends(self, node, frame): def visit_Include(self, node, frame): """Handles includes.""" if node.ignore_missing: - self.writeline('try:') + self.writeline("try:") self.indent() - func_name = 'get_or_select_template' + func_name = "get_or_select_template" if isinstance(node.template, nodes.Const): if isinstance(node.template.value, string_types): - func_name = 'get_template' + func_name = "get_template" elif isinstance(node.template.value, (tuple, list)): - func_name = 'select_template' + func_name = "select_template" elif isinstance(node.template, (nodes.Tuple, nodes.List)): - func_name = 'select_template' + func_name = "select_template" - self.writeline('template = environment.%s(' % func_name, node) + self.writeline("template = environment.%s(" % func_name, node) self.visit(node.template, frame) - self.write(', %r)' % self.name) + self.write(", %r)" % self.name) if node.ignore_missing: self.outdent() - self.writeline('except TemplateNotFound:') + self.writeline("except TemplateNotFound:") self.indent() - self.writeline('pass') + self.writeline("pass") self.outdent() - self.writeline('else:') + self.writeline("else:") self.indent() skip_event_yield = False if node.with_context: - loop = self.environment.is_async and 'async for' or 'for' - self.writeline('%s event in template.root_render_func(' - 'template.new_context(context.get_all(), True, ' - '%s)):' % (loop, self.dump_local_context(frame))) + loop = self.environment.is_async and "async for" or "for" + self.writeline( + "%s event in template.root_render_func(" + "template.new_context(context.get_all(), True, " + "%s)):" % (loop, self.dump_local_context(frame)) + ) elif self.environment.is_async: - self.writeline('for event in (await ' - 'template._get_default_module_async())' - '._body_stream:') + self.writeline( + "for event in (await " + "template._get_default_module_async())" + "._body_stream:" + ) else: if supports_yield_from: - self.writeline('yield from template._get_default_module()' - '._body_stream') + self.writeline("yield from template._get_default_module()._body_stream") skip_event_yield = True else: - self.writeline('for event in template._get_default_module()' - '._body_stream:') + self.writeline( + "for event in template._get_default_module()._body_stream:" + ) if not skip_event_yield: self.indent() - self.simple_write('event', frame) + self.simple_write("event", frame) self.outdent() if node.ignore_missing: @@ -943,40 +976,50 @@ def visit_Include(self, node, frame): def visit_Import(self, node, frame): """Visit regular imports.""" - self.writeline('%s = ' % frame.symbols.ref(node.target), node) + self.writeline("%s = " % frame.symbols.ref(node.target), node) if frame.toplevel: - self.write('context.vars[%r] = ' % node.target) + self.write("context.vars[%r] = " % node.target) if self.environment.is_async: - self.write('await ') - self.write('environment.get_template(') + self.write("await ") + self.write("environment.get_template(") self.visit(node.template, frame) - self.write(', %r).' % self.name) + self.write(", %r)." % self.name) if node.with_context: - self.write('make_module%s(context.get_all(), True, %s)' - % (self.environment.is_async and '_async' or '', - self.dump_local_context(frame))) + self.write( + "make_module%s(context.get_all(), True, %s)" + % ( + self.environment.is_async and "_async" or "", + self.dump_local_context(frame), + ) + ) elif self.environment.is_async: - self.write('_get_default_module_async()') + self.write("_get_default_module_async()") else: - self.write('_get_default_module()') - if frame.toplevel and not node.target.startswith('_'): - self.writeline('context.exported_vars.discard(%r)' % node.target) + self.write("_get_default_module()") + if frame.toplevel and not node.target.startswith("_"): + self.writeline("context.exported_vars.discard(%r)" % node.target) def visit_FromImport(self, node, frame): """Visit named imports.""" self.newline(node) - self.write('included_template = %senvironment.get_template(' - % (self.environment.is_async and 'await ' or '')) + self.write( + "included_template = %senvironment.get_template(" + % (self.environment.is_async and "await " or "") + ) self.visit(node.template, frame) - self.write(', %r).' % self.name) + self.write(", %r)." % self.name) if node.with_context: - self.write('make_module%s(context.get_all(), True, %s)' - % (self.environment.is_async and '_async' or '', - self.dump_local_context(frame))) + self.write( + "make_module%s(context.get_all(), True, %s)" + % ( + self.environment.is_async and "_async" or "", + self.dump_local_context(frame), + ) + ) elif self.environment.is_async: - self.write('_get_default_module_async()') + self.write("_get_default_module_async()") else: - self.write('_get_default_module()') + self.write("_get_default_module()") var_names = [] discarded_names = [] @@ -985,41 +1028,51 @@ def visit_FromImport(self, node, frame): name, alias = name else: alias = name - self.writeline('%s = getattr(included_template, ' - '%r, missing)' % (frame.symbols.ref(alias), name)) - self.writeline('if %s is missing:' % frame.symbols.ref(alias)) + self.writeline( + "%s = getattr(included_template, " + "%r, missing)" % (frame.symbols.ref(alias), name) + ) + self.writeline("if %s is missing:" % frame.symbols.ref(alias)) self.indent() - self.writeline('%s = undefined(%r %% ' - 'included_template.__name__, ' - 'name=%r)' % - (frame.symbols.ref(alias), - 'the template %%r (imported on %s) does ' - 'not export the requested name %s' % ( - self.position(node), - repr(name) - ), name)) + self.writeline( + "%s = undefined(%r %% " + "included_template.__name__, " + "name=%r)" + % ( + frame.symbols.ref(alias), + "the template %%r (imported on %s) does " + "not export the requested name %s" + % (self.position(node), repr(name)), + name, + ) + ) self.outdent() if frame.toplevel: var_names.append(alias) - if not alias.startswith('_'): + if not alias.startswith("_"): discarded_names.append(alias) if var_names: if len(var_names) == 1: name = var_names[0] - self.writeline('context.vars[%r] = %s' % - (name, frame.symbols.ref(name))) + self.writeline( + "context.vars[%r] = %s" % (name, frame.symbols.ref(name)) + ) else: - self.writeline('context.vars.update({%s})' % ', '.join( - '%r: %s' % (name, frame.symbols.ref(name)) for name in var_names - )) + self.writeline( + "context.vars.update({%s})" + % ", ".join( + "%r: %s" % (name, frame.symbols.ref(name)) for name in var_names + ) + ) if discarded_names: if len(discarded_names) == 1: - self.writeline('context.exported_vars.discard(%r)' % - discarded_names[0]) + self.writeline("context.exported_vars.discard(%r)" % discarded_names[0]) else: - self.writeline('context.exported_vars.difference_' - 'update((%s))' % ', '.join(imap(repr, discarded_names))) + self.writeline( + "context.exported_vars.difference_" + "update((%s))" % ", ".join(imap(repr, discarded_names)) + ) def visit_For(self, node, frame): loop_frame = frame.inner() @@ -1029,35 +1082,35 @@ def visit_For(self, node, frame): # try to figure out if we have an extended loop. An extended loop # is necessary if the loop is in recursive mode if the special loop # variable is accessed in the body. - extended_loop = node.recursive or 'loop' in \ - find_undeclared(node.iter_child_nodes( - only=('body',)), ('loop',)) + extended_loop = node.recursive or "loop" in find_undeclared( + node.iter_child_nodes(only=("body",)), ("loop",) + ) loop_ref = None if extended_loop: - loop_ref = loop_frame.symbols.declare_parameter('loop') + loop_ref = loop_frame.symbols.declare_parameter("loop") - loop_frame.symbols.analyze_node(node, for_branch='body') + loop_frame.symbols.analyze_node(node, for_branch="body") if node.else_: - else_frame.symbols.analyze_node(node, for_branch='else') + else_frame.symbols.analyze_node(node, for_branch="else") if node.test: loop_filter_func = self.temporary_identifier() - test_frame.symbols.analyze_node(node, for_branch='test') - self.writeline('%s(fiter):' % self.func(loop_filter_func), node.test) + test_frame.symbols.analyze_node(node, for_branch="test") + self.writeline("%s(fiter):" % self.func(loop_filter_func), node.test) self.indent() self.enter_frame(test_frame) - self.writeline(self.environment.is_async and 'async for ' or 'for ') + self.writeline(self.environment.is_async and "async for " or "for ") self.visit(node.target, loop_frame) - self.write(' in ') - self.write(self.environment.is_async and 'auto_aiter(fiter)' or 'fiter') - self.write(':') + self.write(" in ") + self.write(self.environment.is_async and "auto_aiter(fiter)" or "fiter") + self.write(":") self.indent() - self.writeline('if ', node.test) + self.writeline("if ", node.test) self.visit(node.test, test_frame) - self.write(':') + self.write(":") self.indent() - self.writeline('yield ') + self.writeline("yield ") self.visit(node.target, loop_frame) self.outdent(3) self.leave_frame(test_frame, with_python_scope=True) @@ -1066,8 +1119,9 @@ def visit_For(self, node, frame): # variables at that point. Because loops can be nested but the loop # variable is a special one we have to enforce aliasing for it. if node.recursive: - self.writeline('%s(reciter, loop_render_func, depth=0):' % - self.func('loop'), node) + self.writeline( + "%s(reciter, loop_render_func, depth=0):" % self.func("loop"), node + ) self.indent() self.buffer(loop_frame) @@ -1077,57 +1131,60 @@ def visit_For(self, node, frame): # make sure the loop variable is a special one and raise a template # assertion error if a loop tries to write to loop if extended_loop: - self.writeline('%s = missing' % loop_ref) + self.writeline("%s = missing" % loop_ref) for name in node.find_all(nodes.Name): - if name.ctx == 'store' and name.name == 'loop': - self.fail('Can\'t assign to special loop variable ' - 'in for-loop target', name.lineno) + if name.ctx == "store" and name.name == "loop": + self.fail( + "Can't assign to special loop variable in for-loop target", + name.lineno, + ) if node.else_: iteration_indicator = self.temporary_identifier() - self.writeline('%s = 1' % iteration_indicator) + self.writeline("%s = 1" % iteration_indicator) - self.writeline(self.environment.is_async and 'async for ' or 'for ', node) + self.writeline(self.environment.is_async and "async for " or "for ", node) self.visit(node.target, loop_frame) if extended_loop: if self.environment.is_async: - self.write(', %s in await make_async_loop_context(' % loop_ref) + self.write(", %s in AsyncLoopContext(" % loop_ref) else: - self.write(', %s in LoopContext(' % loop_ref) + self.write(", %s in LoopContext(" % loop_ref) else: - self.write(' in ') + self.write(" in ") if node.test: - self.write('%s(' % loop_filter_func) + self.write("%s(" % loop_filter_func) if node.recursive: - self.write('reciter') + self.write("reciter") else: if self.environment.is_async and not extended_loop: - self.write('auto_aiter(') + self.write("auto_aiter(") self.visit(node.iter, frame) if self.environment.is_async and not extended_loop: - self.write(')') + self.write(")") if node.test: - self.write(')') + self.write(")") if node.recursive: - self.write(', undefined, loop_render_func, depth):') + self.write(", undefined, loop_render_func, depth):") else: - self.write(extended_loop and ', undefined):' or ':') + self.write(extended_loop and ", undefined):" or ":") self.indent() self.enter_frame(loop_frame) self.blockvisit(node.body, loop_frame) if node.else_: - self.writeline('%s = 0' % iteration_indicator) + self.writeline("%s = 0" % iteration_indicator) self.outdent() - self.leave_frame(loop_frame, with_python_scope=node.recursive - and not node.else_) + self.leave_frame( + loop_frame, with_python_scope=node.recursive and not node.else_ + ) if node.else_: - self.writeline('if %s:' % iteration_indicator) + self.writeline("if %s:" % iteration_indicator) self.indent() self.enter_frame(else_frame) self.blockvisit(node.else_, else_frame) @@ -1141,33 +1198,33 @@ def visit_For(self, node, frame): self.outdent() self.start_write(frame, node) if self.environment.is_async: - self.write('await ') - self.write('loop(') + self.write("await ") + self.write("loop(") if self.environment.is_async: - self.write('auto_aiter(') + self.write("auto_aiter(") self.visit(node.iter, frame) if self.environment.is_async: - self.write(')') - self.write(', loop)') + self.write(")") + self.write(", loop)") self.end_write(frame) def visit_If(self, node, frame): if_frame = frame.soft() - self.writeline('if ', node) + self.writeline("if ", node) self.visit(node.test, if_frame) - self.write(':') + self.write(":") self.indent() self.blockvisit(node.body, if_frame) self.outdent() for elif_ in node.elif_: - self.writeline('elif ', elif_) + self.writeline("elif ", elif_) self.visit(elif_.test, if_frame) - self.write(':') + self.write(":") self.indent() self.blockvisit(elif_.body, if_frame) self.outdent() if node.else_: - self.writeline('else:') + self.writeline("else:") self.indent() self.blockvisit(node.else_, if_frame) self.outdent() @@ -1176,16 +1233,15 @@ def visit_Macro(self, node, frame): macro_frame, macro_ref = self.macro_body(node, frame) self.newline() if frame.toplevel: - if not node.name.startswith('_'): - self.write('context.exported_vars.add(%r)' % node.name) - ref = frame.symbols.ref(node.name) - self.writeline('context.vars[%r] = ' % node.name) - self.write('%s = ' % frame.symbols.ref(node.name)) + if not node.name.startswith("_"): + self.write("context.exported_vars.add(%r)" % node.name) + self.writeline("context.vars[%r] = " % node.name) + self.write("%s = " % frame.symbols.ref(node.name)) self.macro_def(macro_ref, macro_frame) def visit_CallBlock(self, node, frame): call_frame, macro_ref = self.macro_body(node, frame) - self.writeline('caller = ') + self.writeline("caller = ") self.macro_def(macro_ref, call_frame) self.start_write(frame, node) self.visit_Call(node.call, frame, forward_caller=True) @@ -1206,10 +1262,10 @@ def visit_With(self, node, frame): with_frame = frame.inner() with_frame.symbols.analyze_node(node) self.enter_frame(with_frame) - for idx, (target, expr) in enumerate(izip(node.targets, node.values)): + for target, expr in izip(node.targets, node.values): self.newline() self.visit(target, with_frame) - self.write(' = ') + self.write(" = ") self.visit(expr, frame) self.blockvisit(node.body, with_frame) self.leave_frame(with_frame) @@ -1218,156 +1274,187 @@ def visit_ExprStmt(self, node, frame): self.newline(node) self.visit(node.node, frame) - def visit_Output(self, node, frame): - # if we have a known extends statement, we don't output anything - # if we are in a require_output_check section - if self.has_known_extends and frame.require_output_check: - return + _FinalizeInfo = namedtuple("_FinalizeInfo", ("const", "src")) + #: The default finalize function if the environment isn't configured + #: with one. Or if the environment has one, this is called on that + #: function's output for constants. + _default_finalize = text_type + _finalize = None + + def _make_finalize(self): + """Build the finalize function to be used on constants and at + runtime. Cached so it's only created once for all output nodes. + + Returns a ``namedtuple`` with the following attributes: + + ``const`` + A function to finalize constant data at compile time. + + ``src`` + Source code to output around nodes to be evaluated at + runtime. + """ + if self._finalize is not None: + return self._finalize + + finalize = default = self._default_finalize + src = None - allow_constant_finalize = True if self.environment.finalize: - func = self.environment.finalize - if getattr(func, 'contextfunction', False) or \ - getattr(func, 'evalcontextfunction', False): - allow_constant_finalize = False - elif getattr(func, 'environmentfunction', False): - finalize = lambda x: text_type( - self.environment.finalize(self.environment, x)) - else: - finalize = lambda x: text_type(self.environment.finalize(x)) + src = "environment.finalize(" + env_finalize = self.environment.finalize + + def finalize(value): + return default(env_finalize(value)) + + if getattr(env_finalize, "contextfunction", False) is True: + src += "context, " + finalize = None # noqa: F811 + elif getattr(env_finalize, "evalcontextfunction", False) is True: + src += "context.eval_ctx, " + finalize = None + elif getattr(env_finalize, "environmentfunction", False) is True: + src += "environment, " + + def finalize(value): + return default(env_finalize(self.environment, value)) + + self._finalize = self._FinalizeInfo(finalize, src) + return self._finalize + + def _output_const_repr(self, group): + """Given a group of constant values converted from ``Output`` + child nodes, produce a string to write to the template module + source. + """ + return repr(concat(group)) + + def _output_child_to_const(self, node, frame, finalize): + """Try to optimize a child of an ``Output`` node by trying to + convert it to constant, finalized data at compile time. + + If :exc:`Impossible` is raised, the node is not constant and + will be evaluated at runtime. Any other exception will also be + evaluated at runtime for easier debugging. + """ + const = node.as_const(frame.eval_ctx) + + if frame.eval_ctx.autoescape: + const = escape(const) + + # Template data doesn't go through finalize. + if isinstance(node, nodes.TemplateData): + return text_type(const) + + return finalize.const(const) + + def _output_child_pre(self, node, frame, finalize): + """Output extra source code before visiting a child of an + ``Output`` node. + """ + if frame.eval_ctx.volatile: + self.write("(escape if context.eval_ctx.autoescape else to_string)(") + elif frame.eval_ctx.autoescape: + self.write("escape(") else: - finalize = text_type + self.write("to_string(") + + if finalize.src is not None: + self.write(finalize.src) + + def _output_child_post(self, node, frame, finalize): + """Output extra source code after visiting a child of an + ``Output`` node. + """ + self.write(")") + + if finalize.src is not None: + self.write(")") - # if we are inside a frame that requires output checking, we do so - outdent_later = False + def visit_Output(self, node, frame): + # If an extends is active, don't render outside a block. if frame.require_output_check: - self.writeline('if parent_template is None:') + # A top-level extends is known to exist at compile time. + if self.has_known_extends: + return + + self.writeline("if parent_template is None:") self.indent() - outdent_later = True - # try to evaluate as many chunks as possible into a static - # string at compile time. + finalize = self._make_finalize() body = [] + + # Evaluate constants at compile time if possible. Each item in + # body will be either a list of static data or a node to be + # evaluated at runtime. for child in node.nodes: try: - if not allow_constant_finalize: + if not ( + # If the finalize function requires runtime context, + # constants can't be evaluated at compile time. + finalize.const + # Unless it's basic template data that won't be + # finalized anyway. + or isinstance(child, nodes.TemplateData) + ): raise nodes.Impossible() - const = child.as_const(frame.eval_ctx) - except nodes.Impossible: - body.append(child) - continue - # the frame can't be volatile here, becaus otherwise the - # as_const() function would raise an Impossible exception - # at that point. - try: - if frame.eval_ctx.autoescape: - if hasattr(const, '__html__'): - const = const.__html__() - else: - const = escape(const) - const = finalize(const) - except Exception: - # if something goes wrong here we evaluate the node - # at runtime for easier debugging + + const = self._output_child_to_const(child, frame, finalize) + except (nodes.Impossible, Exception): + # The node was not constant and needs to be evaluated at + # runtime. Or another error was raised, which is easier + # to debug at runtime. body.append(child) continue + if body and isinstance(body[-1], list): body[-1].append(const) else: body.append([const]) - # if we have less than 3 nodes or a buffer we yield or extend/append - if len(body) < 3 or frame.buffer is not None: - if frame.buffer is not None: - # for one item we append, for more we extend - if len(body) == 1: - self.writeline('%s.append(' % frame.buffer) + if frame.buffer is not None: + if len(body) == 1: + self.writeline("%s.append(" % frame.buffer) + else: + self.writeline("%s.extend((" % frame.buffer) + + self.indent() + + for item in body: + if isinstance(item, list): + # A group of constant data to join and output. + val = self._output_const_repr(item) + + if frame.buffer is None: + self.writeline("yield " + val) else: - self.writeline('%s.extend((' % frame.buffer) - self.indent() - for item in body: - if isinstance(item, list): - val = repr(concat(item)) - if frame.buffer is None: - self.writeline('yield ' + val) - else: - self.writeline(val + ',') + self.writeline(val + ",") + else: + if frame.buffer is None: + self.writeline("yield ", item) else: - if frame.buffer is None: - self.writeline('yield ', item) - else: - self.newline(item) - close = 1 - if frame.eval_ctx.volatile: - self.write('(escape if context.eval_ctx.autoescape' - ' else to_string)(') - elif frame.eval_ctx.autoescape: - self.write('escape(') - else: - self.write('to_string(') - if self.environment.finalize is not None: - self.write('environment.finalize(') - if getattr(self.environment.finalize, - "contextfunction", False): - self.write('context, ') - close += 1 - self.visit(item, frame) - self.write(')' * close) - if frame.buffer is not None: - self.write(',') - if frame.buffer is not None: - # close the open parentheses - self.outdent() - self.writeline(len(body) == 1 and ')' or '))') + self.newline(item) - # otherwise we create a format string as this is faster in that case - else: - format = [] - arguments = [] - for item in body: - if isinstance(item, list): - format.append(concat(item).replace('%', '%%')) - else: - format.append('%s') - arguments.append(item) - self.writeline('yield ') - self.write(repr(concat(format)) + ' % (') - self.indent() - for argument in arguments: - self.newline(argument) - close = 0 - if frame.eval_ctx.volatile: - self.write('(escape if context.eval_ctx.autoescape else' - ' to_string)(') - close += 1 - elif frame.eval_ctx.autoescape: - self.write('escape(') - close += 1 - if self.environment.finalize is not None: - self.write('environment.finalize(') - if getattr(self.environment.finalize, - 'contextfunction', False): - self.write('context, ') - elif getattr(self.environment.finalize, - 'evalcontextfunction', False): - self.write('context.eval_ctx, ') - elif getattr(self.environment.finalize, - 'environmentfunction', False): - self.write('environment, ') - close += 1 - self.visit(argument, frame) - self.write(')' * close + ', ') + # A node to be evaluated at runtime. + self._output_child_pre(item, frame, finalize) + self.visit(item, frame) + self._output_child_post(item, frame, finalize) + + if frame.buffer is not None: + self.write(",") + + if frame.buffer is not None: self.outdent() - self.writeline(')') + self.writeline(")" if len(body) == 1 else "))") - if outdent_later: + if frame.require_output_check: self.outdent() def visit_Assign(self, node, frame): self.push_assign_tracking() self.newline(node) self.visit(node.target, frame) - self.write(' = ') + self.write(" = ") self.visit(node.node, frame) self.pop_assign_tracking(frame) @@ -1384,20 +1471,19 @@ def visit_AssignBlock(self, node, frame): self.blockvisit(node.body, block_frame) self.newline(node) self.visit(node.target, frame) - self.write(' = (Markup if context.eval_ctx.autoescape ' - 'else identity)(') + self.write(" = (Markup if context.eval_ctx.autoescape else identity)(") if node.filter is not None: self.visit_Filter(node.filter, block_frame) else: - self.write('concat(%s)' % block_frame.buffer) - self.write(')') + self.write("concat(%s)" % block_frame.buffer) + self.write(")") self.pop_assign_tracking(frame) self.leave_frame(block_frame) # -- Expression Visitors def visit_Name(self, node, frame): - if node.ctx == 'store' and frame.toplevel: + if node.ctx == "store" and frame.toplevel: if self._assign_stack: self._assign_stack[-1].add(node.name) ref = frame.symbols.ref(node.name) @@ -1405,12 +1491,17 @@ def visit_Name(self, node, frame): # If we are looking up a variable we might have to deal with the # case where it's undefined. We can skip that case if the load # instruction indicates a parameter which are always defined. - if node.ctx == 'load': + if node.ctx == "load": load = frame.symbols.find_load(ref) - if not (load is not None and load[0] == VAR_LOAD_PARAMETER and \ - not self.parameter_is_undeclared(ref)): - self.write('(undefined(name=%r) if %s is missing else %s)' % - (node.name, ref, ref)) + if not ( + load is not None + and load[0] == VAR_LOAD_PARAMETER + and not self.parameter_is_undeclared(ref) + ): + self.write( + "(undefined(name=%r) if %s is missing else %s)" + % (node.name, ref, ref) + ) return self.write(ref) @@ -1420,12 +1511,14 @@ def visit_NSRef(self, node, frame): # `foo.bar` notation they will be parsed as a normal attribute access # when used anywhere but in a `set` context ref = frame.symbols.ref(node.name) - self.writeline('if not isinstance(%s, Namespace):' % ref) + self.writeline("if not isinstance(%s, Namespace):" % ref) self.indent() - self.writeline('raise TemplateRuntimeError(%r)' % - 'cannot assign attribute on non-namespace object') + self.writeline( + "raise TemplateRuntimeError(%r)" + % "cannot assign attribute on non-namespace object" + ) self.outdent() - self.writeline('%s[%r]' % (ref, node.attr)) + self.writeline("%s[%r]" % (ref, node.attr)) def visit_Const(self, node, frame): val = node.as_const(frame.eval_ctx) @@ -1438,230 +1531,256 @@ def visit_TemplateData(self, node, frame): try: self.write(repr(node.as_const(frame.eval_ctx))) except nodes.Impossible: - self.write('(Markup if context.eval_ctx.autoescape else identity)(%r)' - % node.data) + self.write( + "(Markup if context.eval_ctx.autoescape else identity)(%r)" % node.data + ) def visit_Tuple(self, node, frame): - self.write('(') + self.write("(") idx = -1 for idx, item in enumerate(node.items): if idx: - self.write(', ') + self.write(", ") self.visit(item, frame) - self.write(idx == 0 and ',)' or ')') + self.write(idx == 0 and ",)" or ")") def visit_List(self, node, frame): - self.write('[') + self.write("[") for idx, item in enumerate(node.items): if idx: - self.write(', ') + self.write(", ") self.visit(item, frame) - self.write(']') + self.write("]") def visit_Dict(self, node, frame): - self.write('{') + self.write("{") for idx, item in enumerate(node.items): if idx: - self.write(', ') + self.write(", ") self.visit(item.key, frame) - self.write(': ') + self.write(": ") self.visit(item.value, frame) - self.write('}') + self.write("}") - def binop(operator, interceptable=True): + def binop(operator, interceptable=True): # noqa: B902 @optimizeconst def visitor(self, node, frame): - if self.environment.sandboxed and \ - operator in self.environment.intercepted_binops: - self.write('environment.call_binop(context, %r, ' % operator) + if ( + self.environment.sandboxed + and operator in self.environment.intercepted_binops + ): + self.write("environment.call_binop(context, %r, " % operator) self.visit(node.left, frame) - self.write(', ') + self.write(", ") self.visit(node.right, frame) else: - self.write('(') + self.write("(") self.visit(node.left, frame) - self.write(' %s ' % operator) + self.write(" %s " % operator) self.visit(node.right, frame) - self.write(')') + self.write(")") + return visitor - def uaop(operator, interceptable=True): + def uaop(operator, interceptable=True): # noqa: B902 @optimizeconst def visitor(self, node, frame): - if self.environment.sandboxed and \ - operator in self.environment.intercepted_unops: - self.write('environment.call_unop(context, %r, ' % operator) + if ( + self.environment.sandboxed + and operator in self.environment.intercepted_unops + ): + self.write("environment.call_unop(context, %r, " % operator) self.visit(node.node, frame) else: - self.write('(' + operator) + self.write("(" + operator) self.visit(node.node, frame) - self.write(')') + self.write(")") + return visitor - visit_Add = binop('+') - visit_Sub = binop('-') - visit_Mul = binop('*') - visit_Div = binop('/') - visit_FloorDiv = binop('//') - visit_Pow = binop('**') - visit_Mod = binop('%') - visit_And = binop('and', interceptable=False) - visit_Or = binop('or', interceptable=False) - visit_Pos = uaop('+') - visit_Neg = uaop('-') - visit_Not = uaop('not ', interceptable=False) + visit_Add = binop("+") + visit_Sub = binop("-") + visit_Mul = binop("*") + visit_Div = binop("/") + visit_FloorDiv = binop("//") + visit_Pow = binop("**") + visit_Mod = binop("%") + visit_And = binop("and", interceptable=False) + visit_Or = binop("or", interceptable=False) + visit_Pos = uaop("+") + visit_Neg = uaop("-") + visit_Not = uaop("not ", interceptable=False) del binop, uaop @optimizeconst def visit_Concat(self, node, frame): if frame.eval_ctx.volatile: - func_name = '(context.eval_ctx.volatile and' \ - ' markup_join or unicode_join)' + func_name = "(context.eval_ctx.volatile and markup_join or unicode_join)" elif frame.eval_ctx.autoescape: - func_name = 'markup_join' + func_name = "markup_join" else: - func_name = 'unicode_join' - self.write('%s((' % func_name) + func_name = "unicode_join" + self.write("%s((" % func_name) for arg in node.nodes: self.visit(arg, frame) - self.write(', ') - self.write('))') + self.write(", ") + self.write("))") @optimizeconst def visit_Compare(self, node, frame): + self.write("(") self.visit(node.expr, frame) for op in node.ops: self.visit(op, frame) + self.write(")") def visit_Operand(self, node, frame): - self.write(' %s ' % operators[node.op]) + self.write(" %s " % operators[node.op]) self.visit(node.expr, frame) @optimizeconst def visit_Getattr(self, node, frame): - self.write('environment.getattr(') + if self.environment.is_async: + self.write("(await auto_await(") + + self.write("environment.getattr(") self.visit(node.node, frame) - self.write(', %r)' % node.attr) + self.write(", %r)" % node.attr) + + if self.environment.is_async: + self.write("))") @optimizeconst def visit_Getitem(self, node, frame): # slices bypass the environment getitem method. if isinstance(node.arg, nodes.Slice): self.visit(node.node, frame) - self.write('[') + self.write("[") self.visit(node.arg, frame) - self.write(']') + self.write("]") else: - self.write('environment.getitem(') + if self.environment.is_async: + self.write("(await auto_await(") + + self.write("environment.getitem(") self.visit(node.node, frame) - self.write(', ') + self.write(", ") self.visit(node.arg, frame) - self.write(')') + self.write(")") + + if self.environment.is_async: + self.write("))") def visit_Slice(self, node, frame): if node.start is not None: self.visit(node.start, frame) - self.write(':') + self.write(":") if node.stop is not None: self.visit(node.stop, frame) if node.step is not None: - self.write(':') + self.write(":") self.visit(node.step, frame) @optimizeconst def visit_Filter(self, node, frame): if self.environment.is_async: - self.write('await auto_await(') - self.write(self.filters[node.name] + '(') + self.write("await auto_await(") + self.write(self.filters[node.name] + "(") func = self.environment.filters.get(node.name) if func is None: - self.fail('no filter named %r' % node.name, node.lineno) - if getattr(func, 'contextfilter', False): - self.write('context, ') - elif getattr(func, 'evalcontextfilter', False): - self.write('context.eval_ctx, ') - elif getattr(func, 'environmentfilter', False): - self.write('environment, ') + self.fail("no filter named %r" % node.name, node.lineno) + if getattr(func, "contextfilter", False) is True: + self.write("context, ") + elif getattr(func, "evalcontextfilter", False) is True: + self.write("context.eval_ctx, ") + elif getattr(func, "environmentfilter", False) is True: + self.write("environment, ") # if the filter node is None we are inside a filter block # and want to write to the current buffer if node.node is not None: self.visit(node.node, frame) elif frame.eval_ctx.volatile: - self.write('(context.eval_ctx.autoescape and' - ' Markup(concat(%s)) or concat(%s))' % - (frame.buffer, frame.buffer)) + self.write( + "(context.eval_ctx.autoescape and" + " Markup(concat(%s)) or concat(%s))" % (frame.buffer, frame.buffer) + ) elif frame.eval_ctx.autoescape: - self.write('Markup(concat(%s))' % frame.buffer) + self.write("Markup(concat(%s))" % frame.buffer) else: - self.write('concat(%s)' % frame.buffer) + self.write("concat(%s)" % frame.buffer) self.signature(node, frame) - self.write(')') + self.write(")") if self.environment.is_async: - self.write(')') + self.write(")") @optimizeconst def visit_Test(self, node, frame): - self.write(self.tests[node.name] + '(') + self.write(self.tests[node.name] + "(") if node.name not in self.environment.tests: - self.fail('no test named %r' % node.name, node.lineno) + self.fail("no test named %r" % node.name, node.lineno) self.visit(node.node, frame) self.signature(node, frame) - self.write(')') + self.write(")") @optimizeconst def visit_CondExpr(self, node, frame): def write_expr2(): if node.expr2 is not None: return self.visit(node.expr2, frame) - self.write('undefined(%r)' % ('the inline if-' - 'expression on %s evaluated to false and ' - 'no else section was defined.' % self.position(node))) - - self.write('(') + self.write( + "cond_expr_undefined(%r)" + % ( + "the inline if-" + "expression on %s evaluated to false and " + "no else section was defined." % self.position(node) + ) + ) + + self.write("(") self.visit(node.expr1, frame) - self.write(' if ') + self.write(" if ") self.visit(node.test, frame) - self.write(' else ') + self.write(" else ") write_expr2() - self.write(')') + self.write(")") @optimizeconst def visit_Call(self, node, frame, forward_caller=False): if self.environment.is_async: - self.write('await auto_await(') + self.write("await auto_await(") if self.environment.sandboxed: - self.write('environment.call(context, ') + self.write("environment.call(context, ") else: - self.write('context.call(') + self.write("context.call(") self.visit(node.node, frame) - extra_kwargs = forward_caller and {'caller': 'caller'} or None + extra_kwargs = forward_caller and {"caller": "caller"} or None self.signature(node, frame, extra_kwargs) - self.write(')') + self.write(")") if self.environment.is_async: - self.write(')') + self.write(")") def visit_Keyword(self, node, frame): - self.write(node.key + '=') + self.write(node.key + "=") self.visit(node.value, frame) # -- Unused nodes for extensions def visit_MarkSafe(self, node, frame): - self.write('Markup(') + self.write("Markup(") self.visit(node.expr, frame) - self.write(')') + self.write(")") def visit_MarkSafeIfAutoescape(self, node, frame): - self.write('(context.eval_ctx.autoescape and Markup or identity)(') + self.write("(context.eval_ctx.autoescape and Markup or identity)(") self.visit(node.expr, frame) - self.write(')') + self.write(")") def visit_EnvironmentAttribute(self, node, frame): - self.write('environment.' + node.name) + self.write("environment." + node.name) def visit_ExtensionAttribute(self, node, frame): - self.write('environment.extensions[%r].%s' % (node.identifier, node.name)) + self.write("environment.extensions[%r].%s" % (node.identifier, node.name)) def visit_ImportedName(self, node, frame): self.write(self.import_aliases[node.importname]) @@ -1670,13 +1789,16 @@ def visit_InternalName(self, node, frame): self.write(node.name) def visit_ContextReference(self, node, frame): - self.write('context') + self.write("context") + + def visit_DerivedContextReference(self, node, frame): + self.write(self.derive_context(frame)) def visit_Continue(self, node, frame): - self.writeline('continue', node) + self.writeline("continue", node) def visit_Break(self, node, frame): - self.writeline('break', node) + self.writeline("break", node) def visit_Scope(self, node, frame): scope_frame = frame.inner() @@ -1687,8 +1809,8 @@ def visit_Scope(self, node, frame): def visit_OverlayScope(self, node, frame): ctx = self.temporary_identifier() - self.writeline('%s = %s' % (ctx, self.derive_context(frame))) - self.writeline('%s.vars = ' % ctx) + self.writeline("%s = %s" % (ctx, self.derive_context(frame))) + self.writeline("%s.vars = " % ctx) self.visit(node.context, frame) self.push_context_reference(ctx) @@ -1701,7 +1823,7 @@ def visit_OverlayScope(self, node, frame): def visit_EvalContextModifier(self, node, frame): for keyword in node.options: - self.writeline('context.eval_ctx.%s = ' % keyword.key) + self.writeline("context.eval_ctx.%s = " % keyword.key) self.visit(keyword.value, frame) try: val = keyword.value.as_const(frame.eval_ctx) @@ -1713,9 +1835,9 @@ def visit_EvalContextModifier(self, node, frame): def visit_ScopedEvalContextModifier(self, node, frame): old_ctx_name = self.temporary_identifier() saved_ctx = frame.eval_ctx.save() - self.writeline('%s = context.eval_ctx.save()' % old_ctx_name) + self.writeline("%s = context.eval_ctx.save()" % old_ctx_name) self.visit_EvalContextModifier(node, frame) for child in node.body: self.visit(child, frame) frame.eval_ctx.revert(saved_ctx) - self.writeline('context.eval_ctx.revert(%s)' % old_ctx_name) + self.writeline("context.eval_ctx.revert(%s)" % old_ctx_name) diff --git a/Splunk_TA_paloalto/bin/splunk_ta_paloalto/aob_py3/jinja2/constants.py b/Splunk_TA_paloalto/bin/splunk_ta_paloalto/aob_py3/jinja2/constants.py old mode 100755 new mode 100644 index 11efd1ed..bf7f2ca7 --- a/Splunk_TA_paloalto/bin/splunk_ta_paloalto/aob_py3/jinja2/constants.py +++ b/Splunk_TA_paloalto/bin/splunk_ta_paloalto/aob_py3/jinja2/constants.py @@ -1,17 +1,6 @@ # -*- coding: utf-8 -*- -""" - jinja.constants - ~~~~~~~~~~~~~~~ - - Various constants. - - :copyright: (c) 2017 by the Jinja Team. - :license: BSD, see LICENSE for more details. -""" - - #: list of lorem ipsum words used by the lipsum() helper function -LOREM_IPSUM_WORDS = u'''\ +LOREM_IPSUM_WORDS = u"""\ a ac accumsan ad adipiscing aenean aliquam aliquet amet ante aptent arcu at auctor augue bibendum blandit class commodo condimentum congue consectetuer consequat conubia convallis cras cubilia cum curabitur curae cursus dapibus @@ -29,4 +18,4 @@ sociis sociosqu sodales sollicitudin suscipit suspendisse taciti tellus tempor tempus tincidunt torquent tortor tristique turpis ullamcorper ultrices ultricies urna ut varius vehicula vel velit venenatis vestibulum vitae vivamus -viverra volutpat vulputate''' +viverra volutpat vulputate""" diff --git a/Splunk_TA_paloalto/bin/splunk_ta_paloalto/aob_py3/jinja2/debug.py b/Splunk_TA_paloalto/bin/splunk_ta_paloalto/aob_py3/jinja2/debug.py old mode 100755 new mode 100644 index b61139f0..5d8aec31 --- a/Splunk_TA_paloalto/bin/splunk_ta_paloalto/aob_py3/jinja2/debug.py +++ b/Splunk_TA_paloalto/bin/splunk_ta_paloalto/aob_py3/jinja2/debug.py @@ -1,372 +1,268 @@ -# -*- coding: utf-8 -*- -""" - jinja2.debug - ~~~~~~~~~~~~ - - Implements the debug interface for Jinja. This module does some pretty - ugly stuff with the Python traceback system in order to achieve tracebacks - with correct line numbers, locals and contents. - - :copyright: (c) 2017 by the Jinja Team. - :license: BSD, see LICENSE for more details. -""" import sys -import traceback -from types import TracebackType, CodeType -from jinja2.utils import missing, internal_code -from jinja2.exceptions import TemplateSyntaxError -from jinja2._compat import iteritems, reraise, PY2 +from types import CodeType -# on pypy we can take advantage of transparent proxies -try: - from __pypy__ import tproxy -except ImportError: - tproxy = None +from . import TemplateSyntaxError +from ._compat import PYPY +from .utils import internal_code +from .utils import missing -# how does the raise helper look like? -try: - exec("raise TypeError, 'foo'") -except SyntaxError: - raise_helper = 'raise __jinja_exception__[1]' -except TypeError: - raise_helper = 'raise __jinja_exception__[0], __jinja_exception__[1]' +def rewrite_traceback_stack(source=None): + """Rewrite the current exception to replace any tracebacks from + within compiled template code with tracebacks that look like they + came from the template source. + This must be called within an ``except`` block. -class TracebackFrameProxy(object): - """Proxies a traceback frame.""" + :param exc_info: A :meth:`sys.exc_info` tuple. If not provided, + the current ``exc_info`` is used. + :param source: For ``TemplateSyntaxError``, the original source if + known. + :return: A :meth:`sys.exc_info` tuple that can be re-raised. + """ + exc_type, exc_value, tb = sys.exc_info() - def __init__(self, tb): - self.tb = tb - self._tb_next = None + if isinstance(exc_value, TemplateSyntaxError) and not exc_value.translated: + exc_value.translated = True + exc_value.source = source - @property - def tb_next(self): - return self._tb_next + try: + # Remove the old traceback on Python 3, otherwise the frames + # from the compiler still show up. + exc_value.with_traceback(None) + except AttributeError: + pass - def set_next(self, next): - if tb_set_next is not None: - try: - tb_set_next(self.tb, next and next.tb or None) - except Exception: - # this function can fail due to all the hackery it does - # on various python implementations. We just catch errors - # down and ignore them if necessary. - pass - self._tb_next = next - - @property - def is_jinja_frame(self): - return '__jinja_template__' in self.tb.tb_frame.f_globals - - def __getattr__(self, name): - return getattr(self.tb, name) - - -def make_frame_proxy(frame): - proxy = TracebackFrameProxy(frame) - if tproxy is None: - return proxy - def operation_handler(operation, *args, **kwargs): - if operation in ('__getattribute__', '__getattr__'): - return getattr(proxy, args[0]) - elif operation == '__setattr__': - proxy.__setattr__(*args, **kwargs) - else: - return getattr(proxy, operation)(*args, **kwargs) - return tproxy(TracebackType, operation_handler) - - -class ProcessedTraceback(object): - """Holds a Jinja preprocessed traceback for printing or reraising.""" - - def __init__(self, exc_type, exc_value, frames): - assert frames, 'no frames for this traceback?' - self.exc_type = exc_type - self.exc_value = exc_value - self.frames = frames - - # newly concatenate the frames (which are proxies) - prev_tb = None - for tb in self.frames: - if prev_tb is not None: - prev_tb.set_next(tb) - prev_tb = tb - prev_tb.set_next(None) - - def render_as_text(self, limit=None): - """Return a string with the traceback.""" - lines = traceback.format_exception(self.exc_type, self.exc_value, - self.frames[0], limit=limit) - return ''.join(lines).rstrip() - - def render_as_html(self, full=False): - """Return a unicode string with the traceback as rendered HTML.""" - from jinja2.debugrenderer import render_traceback - return u'%s\n\n' % ( - render_traceback(self, full=full), - self.render_as_text().decode('utf-8', 'replace') + # Outside of runtime, so the frame isn't executing template + # code, but it still needs to point at the template. + tb = fake_traceback( + exc_value, None, exc_value.filename or "", exc_value.lineno ) - - @property - def is_template_syntax_error(self): - """`True` if this is a template syntax error.""" - return isinstance(self.exc_value, TemplateSyntaxError) - - @property - def exc_info(self): - """Exception info tuple with a proxy around the frame objects.""" - return self.exc_type, self.exc_value, self.frames[0] - - @property - def standard_exc_info(self): - """Standard python exc_info for re-raising""" - tb = self.frames[0] - # the frame will be an actual traceback (or transparent proxy) if - # we are on pypy or a python implementation with support for tproxy - if type(tb) is not TracebackType: - tb = tb.tb - return self.exc_type, self.exc_value, tb - - -def make_traceback(exc_info, source_hint=None): - """Creates a processed traceback object from the exc_info.""" - exc_type, exc_value, tb = exc_info - if isinstance(exc_value, TemplateSyntaxError): - exc_info = translate_syntax_error(exc_value, source_hint) - initial_skip = 0 else: - initial_skip = 1 - return translate_exception(exc_info, initial_skip) - - -def translate_syntax_error(error, source=None): - """Rewrites a syntax error to please traceback systems.""" - error.source = source - error.translated = True - exc_info = (error.__class__, error, None) - filename = error.filename - if filename is None: - filename = '' - return fake_exc_info(exc_info, filename, error.lineno) + # Skip the frame for the render function. + tb = tb.tb_next + stack = [] -def translate_exception(exc_info, initial_skip=0): - """If passed an exc_info it will automatically rewrite the exceptions - all the way down to the correct line numbers and frames. - """ - tb = exc_info[2] - frames = [] - - # skip some internal frames if wanted - for x in range(initial_skip): - if tb is not None: - tb = tb.tb_next - initial_tb = tb - + # Build the stack of traceback object, replacing any in template + # code with the source file and line information. while tb is not None: - # skip frames decorated with @internalcode. These are internal - # calls we can't avoid and that are useless in template debugging - # output. + # Skip frames decorated with @internalcode. These are internal + # calls that aren't useful in template debugging output. if tb.tb_frame.f_code in internal_code: tb = tb.tb_next continue - # save a reference to the next frame if we override the current - # one with a faked one. - next = tb.tb_next + template = tb.tb_frame.f_globals.get("__jinja_template__") - # fake template exceptions - template = tb.tb_frame.f_globals.get('__jinja_template__') if template is not None: lineno = template.get_corresponding_lineno(tb.tb_lineno) - tb = fake_exc_info(exc_info[:2] + (tb,), template.filename, - lineno)[2] + fake_tb = fake_traceback(exc_value, tb, template.filename, lineno) + stack.append(fake_tb) + else: + stack.append(tb) - frames.append(make_frame_proxy(tb)) - tb = next + tb = tb.tb_next - # if we don't have any exceptions in the frames left, we have to - # reraise it unchanged. - # XXX: can we backup here? when could this happen? - if not frames: - reraise(exc_info[0], exc_info[1], exc_info[2]) + tb_next = None - return ProcessedTraceback(exc_info[0], exc_info[1], frames) + # Assign tb_next in reverse to avoid circular references. + for tb in reversed(stack): + tb_next = tb_set_next(tb, tb_next) + return exc_type, exc_value, tb_next -def get_jinja_locals(real_locals): - ctx = real_locals.get('context') - if ctx: - locals = ctx.get_all().copy() + +def fake_traceback(exc_value, tb, filename, lineno): + """Produce a new traceback object that looks like it came from the + template source instead of the compiled code. The filename, line + number, and location name will point to the template, and the local + variables will be the current template context. + + :param exc_value: The original exception to be re-raised to create + the new traceback. + :param tb: The original traceback to get the local variables and + code info from. + :param filename: The template filename. + :param lineno: The line number in the template source. + """ + if tb is not None: + # Replace the real locals with the context that would be + # available at that point in the template. + locals = get_template_locals(tb.tb_frame.f_locals) + locals.pop("__jinja_exception__", None) else: locals = {} + globals = { + "__name__": filename, + "__file__": filename, + "__jinja_exception__": exc_value, + } + # Raise an exception at the correct line number. + code = compile("\n" * (lineno - 1) + "raise __jinja_exception__", filename, "exec") + + # Build a new code object that points to the template file and + # replaces the location with a block name. + try: + location = "template" + + if tb is not None: + function = tb.tb_frame.f_code.co_name + + if function == "root": + location = "top-level template code" + elif function.startswith("block_"): + location = 'block "%s"' % function[6:] + + # Collect arguments for the new code object. CodeType only + # accepts positional arguments, and arguments were inserted in + # new Python versions. + code_args = [] + + for attr in ( + "argcount", + "posonlyargcount", # Python 3.8 + "kwonlyargcount", # Python 3 + "nlocals", + "stacksize", + "flags", + "code", # codestring + "consts", # constants + "names", + "varnames", + ("filename", filename), + ("name", location), + "firstlineno", + "lnotab", + "freevars", + "cellvars", + ): + if isinstance(attr, tuple): + # Replace with given value. + code_args.append(attr[1]) + continue + + try: + # Copy original value if it exists. + code_args.append(getattr(code, "co_" + attr)) + except AttributeError: + # Some arguments were added later. + continue + + code = CodeType(*code_args) + except Exception: + # Some environments such as Google App Engine don't support + # modifying code objects. + pass + + # Execute the new code, which is guaranteed to raise, and return + # the new traceback without this frame. + try: + exec(code, globals, locals) + except BaseException: + return sys.exc_info()[2].tb_next + + +def get_template_locals(real_locals): + """Based on the runtime locals, get the context that would be + available at that point in the template. + """ + # Start with the current template context. + ctx = real_locals.get("context") + + if ctx: + data = ctx.get_all().copy() + else: + data = {} + + # Might be in a derived context that only sets local variables + # rather than pushing a context. Local variables follow the scheme + # l_depth_name. Find the highest-depth local that has a value for + # each name. local_overrides = {} - for name, value in iteritems(real_locals): - if not name.startswith('l_') or value is missing: + for name, value in real_locals.items(): + if not name.startswith("l_") or value is missing: + # Not a template variable, or no longer relevant. continue + try: - _, depth, name = name.split('_', 2) + _, depth, name = name.split("_", 2) depth = int(depth) except ValueError: continue + cur_depth = local_overrides.get(name, (-1,))[0] + if cur_depth < depth: local_overrides[name] = (depth, value) - for name, (_, value) in iteritems(local_overrides): + # Modify the context with any derived context. + for name, (_, value) in local_overrides.items(): if value is missing: - locals.pop(name, None) + data.pop(name, None) else: - locals[name] = value + data[name] = value - return locals + return data -def fake_exc_info(exc_info, filename, lineno): - """Helper for `translate_exception`.""" - exc_type, exc_value, tb = exc_info +if sys.version_info >= (3, 7): + # tb_next is directly assignable as of Python 3.7 + def tb_set_next(tb, tb_next): + tb.tb_next = tb_next + return tb - # figure the real context out - if tb is not None: - locals = get_jinja_locals(tb.tb_frame.f_locals) - # if there is a local called __jinja_exception__, we get - # rid of it to not break the debug functionality. - locals.pop('__jinja_exception__', None) - else: - locals = {} - - # assamble fake globals we need - globals = { - '__name__': filename, - '__file__': filename, - '__jinja_exception__': exc_info[:2], - - # we don't want to keep the reference to the template around - # to not cause circular dependencies, but we mark it as Jinja - # frame for the ProcessedTraceback - '__jinja_template__': None - } - - # and fake the exception - code = compile('\n' * (lineno - 1) + raise_helper, filename, 'exec') - - # if it's possible, change the name of the code. This won't work - # on some python environments such as google appengine +elif PYPY: + # PyPy might have special support, and won't work with ctypes. try: - if tb is None: - location = 'template' - else: - function = tb.tb_frame.f_code.co_name - if function == 'root': - location = 'top-level template code' - elif function.startswith('block_'): - location = 'block "%s"' % function[6:] - else: - location = 'template' - - if PY2: - code = CodeType(0, code.co_nlocals, code.co_stacksize, - code.co_flags, code.co_code, code.co_consts, - code.co_names, code.co_varnames, filename, - location, code.co_firstlineno, - code.co_lnotab, (), ()) - else: - code = CodeType(0, code.co_kwonlyargcount, - code.co_nlocals, code.co_stacksize, - code.co_flags, code.co_code, code.co_consts, - code.co_names, code.co_varnames, filename, - location, code.co_firstlineno, - code.co_lnotab, (), ()) - except Exception as e: - pass + import tputil + except ImportError: + # Without tproxy support, use the original traceback. + def tb_set_next(tb, tb_next): + return tb - # execute the code and catch the new traceback - try: - exec(code, globals, locals) - except: - exc_info = sys.exc_info() - new_tb = exc_info[2].tb_next + else: + # With tproxy support, create a proxy around the traceback that + # returns the new tb_next. + def tb_set_next(tb, tb_next): + def controller(op): + if op.opname == "__getattribute__" and op.args[0] == "tb_next": + return tb_next - # return without this frame - return exc_info[:2] + (new_tb,) + return op.delegate() + return tputil.make_proxy(controller, obj=tb) -def _init_ugly_crap(): - """This function implements a few ugly things so that we can patch the - traceback objects. The function returned allows resetting `tb_next` on - any python traceback object. Do not attempt to use this on non cpython - interpreters - """ - import ctypes - from types import TracebackType - if PY2: - # figure out size of _Py_ssize_t for Python 2: - if hasattr(ctypes.pythonapi, 'Py_InitModule4_64'): - _Py_ssize_t = ctypes.c_int64 - else: - _Py_ssize_t = ctypes.c_int - else: - # platform ssize_t on Python 3 - _Py_ssize_t = ctypes.c_ssize_t +else: + # Use ctypes to assign tb_next at the C level since it's read-only + # from Python. + import ctypes - # regular python - class _PyObject(ctypes.Structure): - pass - _PyObject._fields_ = [ - ('ob_refcnt', _Py_ssize_t), - ('ob_type', ctypes.POINTER(_PyObject)) - ] - - # python with trace - if hasattr(sys, 'getobjects'): - class _PyObject(ctypes.Structure): - pass - _PyObject._fields_ = [ - ('_ob_next', ctypes.POINTER(_PyObject)), - ('_ob_prev', ctypes.POINTER(_PyObject)), - ('ob_refcnt', _Py_ssize_t), - ('ob_type', ctypes.POINTER(_PyObject)) + class _CTraceback(ctypes.Structure): + _fields_ = [ + # Extra PyObject slots when compiled with Py_TRACE_REFS. + ("PyObject_HEAD", ctypes.c_byte * object().__sizeof__()), + # Only care about tb_next as an object, not a traceback. + ("tb_next", ctypes.py_object), ] - class _Traceback(_PyObject): - pass - _Traceback._fields_ = [ - ('tb_next', ctypes.POINTER(_Traceback)), - ('tb_frame', ctypes.POINTER(_PyObject)), - ('tb_lasti', ctypes.c_int), - ('tb_lineno', ctypes.c_int) - ] - - def tb_set_next(tb, next): - """Set the tb_next attribute of a traceback object.""" - if not (isinstance(tb, TracebackType) and - (next is None or isinstance(next, TracebackType))): - raise TypeError('tb_set_next arguments must be traceback objects') - obj = _Traceback.from_address(id(tb)) - if tb.tb_next is not None: - old = _Traceback.from_address(id(tb.tb_next)) - old.ob_refcnt -= 1 - if next is None: - obj.tb_next = ctypes.POINTER(_Traceback)() - else: - next = _Traceback.from_address(id(next)) - next.ob_refcnt += 1 - obj.tb_next = ctypes.pointer(next) + def tb_set_next(tb, tb_next): + c_tb = _CTraceback.from_address(id(tb)) - return tb_set_next + # Clear out the old tb_next. + if tb.tb_next is not None: + c_tb_next = ctypes.py_object(tb.tb_next) + c_tb.tb_next = ctypes.py_object() + ctypes.pythonapi.Py_DecRef(c_tb_next) + # Assign the new tb_next. + if tb_next is not None: + c_tb_next = ctypes.py_object(tb_next) + ctypes.pythonapi.Py_IncRef(c_tb_next) + c_tb.tb_next = c_tb_next -# try to get a tb_set_next implementation if we don't have transparent -# proxies. -tb_set_next = None -if tproxy is None: - try: - tb_set_next = _init_ugly_crap() - except: - pass - del _init_ugly_crap + return tb diff --git a/Splunk_TA_paloalto/bin/splunk_ta_paloalto/aob_py3/jinja2/defaults.py b/Splunk_TA_paloalto/bin/splunk_ta_paloalto/aob_py3/jinja2/defaults.py old mode 100755 new mode 100644 index 7c93dec0..8e0e7d77 --- a/Splunk_TA_paloalto/bin/splunk_ta_paloalto/aob_py3/jinja2/defaults.py +++ b/Splunk_TA_paloalto/bin/splunk_ta_paloalto/aob_py3/jinja2/defaults.py @@ -1,56 +1,44 @@ # -*- coding: utf-8 -*- -""" - jinja2.defaults - ~~~~~~~~~~~~~~~ - - Jinja default filters and tags. - - :copyright: (c) 2017 by the Jinja Team. - :license: BSD, see LICENSE for more details. -""" -from jinja2._compat import range_type -from jinja2.utils import generate_lorem_ipsum, Cycler, Joiner, Namespace - +from ._compat import range_type +from .filters import FILTERS as DEFAULT_FILTERS # noqa: F401 +from .tests import TESTS as DEFAULT_TESTS # noqa: F401 +from .utils import Cycler +from .utils import generate_lorem_ipsum +from .utils import Joiner +from .utils import Namespace # defaults for the parser / lexer -BLOCK_START_STRING = '{%' -BLOCK_END_STRING = '%}' -VARIABLE_START_STRING = '{{' -VARIABLE_END_STRING = '}}' -COMMENT_START_STRING = '{#' -COMMENT_END_STRING = '#}' +BLOCK_START_STRING = "{%" +BLOCK_END_STRING = "%}" +VARIABLE_START_STRING = "{{" +VARIABLE_END_STRING = "}}" +COMMENT_START_STRING = "{#" +COMMENT_END_STRING = "#}" LINE_STATEMENT_PREFIX = None LINE_COMMENT_PREFIX = None TRIM_BLOCKS = False LSTRIP_BLOCKS = False -NEWLINE_SEQUENCE = '\n' +NEWLINE_SEQUENCE = "\n" KEEP_TRAILING_NEWLINE = False - # default filters, tests and namespace -from jinja2.filters import FILTERS as DEFAULT_FILTERS -from jinja2.tests import TESTS as DEFAULT_TESTS + DEFAULT_NAMESPACE = { - 'range': range_type, - 'dict': dict, - 'lipsum': generate_lorem_ipsum, - 'cycler': Cycler, - 'joiner': Joiner, - 'namespace': Namespace + "range": range_type, + "dict": dict, + "lipsum": generate_lorem_ipsum, + "cycler": Cycler, + "joiner": Joiner, + "namespace": Namespace, } - # default policies DEFAULT_POLICIES = { - 'compiler.ascii_str': True, - 'urlize.rel': 'noopener', - 'urlize.target': None, - 'truncate.leeway': 5, - 'json.dumps_function': None, - 'json.dumps_kwargs': {'sort_keys': True}, - 'ext.i18n.trimmed': False, + "compiler.ascii_str": True, + "urlize.rel": "noopener", + "urlize.target": None, + "truncate.leeway": 5, + "json.dumps_function": None, + "json.dumps_kwargs": {"sort_keys": True}, + "ext.i18n.trimmed": False, } - - -# export all constants -__all__ = tuple(x for x in locals().keys() if x.isupper()) diff --git a/Splunk_TA_paloalto/bin/splunk_ta_paloalto/aob_py3/jinja2/environment.py b/Splunk_TA_paloalto/bin/splunk_ta_paloalto/aob_py3/jinja2/environment.py old mode 100755 new mode 100644 index 549d9afa..8430390e --- a/Splunk_TA_paloalto/bin/splunk_ta_paloalto/aob_py3/jinja2/environment.py +++ b/Splunk_TA_paloalto/bin/splunk_ta_paloalto/aob_py3/jinja2/environment.py @@ -1,60 +1,83 @@ # -*- coding: utf-8 -*- -""" - jinja2.environment - ~~~~~~~~~~~~~~~~~~ - - Provides a class that holds runtime and parsing time options. - - :copyright: (c) 2017 by the Jinja Team. - :license: BSD, see LICENSE for more details. +"""Classes for managing templates and their runtime and compile time +options. """ import os import sys import weakref -from functools import reduce, partial -from jinja2 import nodes -from jinja2.defaults import BLOCK_START_STRING, \ - BLOCK_END_STRING, VARIABLE_START_STRING, VARIABLE_END_STRING, \ - COMMENT_START_STRING, COMMENT_END_STRING, LINE_STATEMENT_PREFIX, \ - LINE_COMMENT_PREFIX, TRIM_BLOCKS, NEWLINE_SEQUENCE, \ - DEFAULT_FILTERS, DEFAULT_TESTS, DEFAULT_NAMESPACE, \ - DEFAULT_POLICIES, KEEP_TRAILING_NEWLINE, LSTRIP_BLOCKS -from jinja2.lexer import get_lexer, TokenStream -from jinja2.parser import Parser -from jinja2.nodes import EvalContext -from jinja2.compiler import generate, CodeGenerator -from jinja2.runtime import Undefined, new_context, Context -from jinja2.exceptions import TemplateSyntaxError, TemplateNotFound, \ - TemplatesNotFound, TemplateRuntimeError -from jinja2.utils import import_string, LRUCache, Markup, missing, \ - concat, consume, internalcode, have_async_gen -from jinja2._compat import imap, ifilter, string_types, iteritems, \ - text_type, reraise, implements_iterator, implements_to_string, \ - encode_filename, PY2, PYPY - +from functools import partial +from functools import reduce + +from markupsafe import Markup + +from . import nodes +from ._compat import encode_filename +from ._compat import implements_iterator +from ._compat import implements_to_string +from ._compat import iteritems +from ._compat import PY2 +from ._compat import PYPY +from ._compat import reraise +from ._compat import string_types +from ._compat import text_type +from .compiler import CodeGenerator +from .compiler import generate +from .defaults import BLOCK_END_STRING +from .defaults import BLOCK_START_STRING +from .defaults import COMMENT_END_STRING +from .defaults import COMMENT_START_STRING +from .defaults import DEFAULT_FILTERS +from .defaults import DEFAULT_NAMESPACE +from .defaults import DEFAULT_POLICIES +from .defaults import DEFAULT_TESTS +from .defaults import KEEP_TRAILING_NEWLINE +from .defaults import LINE_COMMENT_PREFIX +from .defaults import LINE_STATEMENT_PREFIX +from .defaults import LSTRIP_BLOCKS +from .defaults import NEWLINE_SEQUENCE +from .defaults import TRIM_BLOCKS +from .defaults import VARIABLE_END_STRING +from .defaults import VARIABLE_START_STRING +from .exceptions import TemplateNotFound +from .exceptions import TemplateRuntimeError +from .exceptions import TemplatesNotFound +from .exceptions import TemplateSyntaxError +from .exceptions import UndefinedError +from .lexer import get_lexer +from .lexer import TokenStream +from .nodes import EvalContext +from .parser import Parser +from .runtime import Context +from .runtime import new_context +from .runtime import Undefined +from .utils import concat +from .utils import consume +from .utils import have_async_gen +from .utils import import_string +from .utils import internalcode +from .utils import LRUCache +from .utils import missing # for direct template usage we have up to ten living environments _spontaneous_environments = LRUCache(10) -# the function to create jinja traceback objects. This is dynamically -# imported on the first exception in the exception handler. -_make_traceback = None +def get_spontaneous_environment(cls, *args): + """Return a new spontaneous environment. A spontaneous environment + is used for templates created directly rather than through an + existing environment. -def get_spontaneous_environment(*args): - """Return a new spontaneous environment. A spontaneous environment is an - unnamed and unaccessible (in theory) environment that is used for - templates generated from a string and not from the file system. + :param cls: Environment class to create. + :param args: Positional arguments passed to environment. """ + key = (cls, args) + try: - env = _spontaneous_environments.get(args) - except TypeError: - return Environment(*args) - if env is not None: + return _spontaneous_environments[key] + except KeyError: + _spontaneous_environments[key] = env = cls(*args) + env.shared = True return env - _spontaneous_environments[args] = env = Environment(*args) - env.shared = True - return env def create_cache(size): @@ -93,20 +116,25 @@ def fail_for_missing_callable(string, name): try: name._fail_with_undefined_error() except Exception as e: - msg = '%s (%s; did you forget to quote the callable name?)' % (msg, e) + msg = "%s (%s; did you forget to quote the callable name?)" % (msg, e) raise TemplateRuntimeError(msg) def _environment_sanity_check(environment): """Perform a sanity check on the environment.""" - assert issubclass(environment.undefined, Undefined), 'undefined must ' \ - 'be a subclass of undefined because filters depend on it.' - assert environment.block_start_string != \ - environment.variable_start_string != \ - environment.comment_start_string, 'block, variable and comment ' \ - 'start strings must be different' - assert environment.newline_sequence in ('\r', '\r\n', '\n'), \ - 'newline_sequence set to unknown line ending string.' + assert issubclass( + environment.undefined, Undefined + ), "undefined must be a subclass of undefined because filters depend on it." + assert ( + environment.block_start_string + != environment.variable_start_string + != environment.comment_start_string + ), "block, variable and comment start strings must be different" + assert environment.newline_sequence in ( + "\r", + "\r\n", + "\n", + ), "newline_sequence set to unknown line ending string." return environment @@ -191,7 +219,7 @@ class Environment(object): `autoescape` If set to ``True`` the XML/HTML autoescaping feature is enabled by default. For more details about autoescaping see - :class:`~jinja2.utils.Markup`. As of Jinja 2.4 this can also + :class:`~markupsafe.Markup`. As of Jinja 2.4 this can also be a callable that is passed the template name and has to return ``True`` or ``False`` depending on autoescape should be enabled by default. @@ -249,10 +277,6 @@ class Environment(object): #: must not be modified shared = False - #: these are currently EXPERIMENTAL undocumented features. - exception_handler = None - exception_formatter = None - #: the class that is used for code generation. See #: :class:`~jinja2.compiler.CodeGenerator` for more information. code_generator_class = CodeGenerator @@ -261,29 +285,31 @@ class Environment(object): #: :class:`~jinja2.runtime.Context` for more information. context_class = Context - def __init__(self, - block_start_string=BLOCK_START_STRING, - block_end_string=BLOCK_END_STRING, - variable_start_string=VARIABLE_START_STRING, - variable_end_string=VARIABLE_END_STRING, - comment_start_string=COMMENT_START_STRING, - comment_end_string=COMMENT_END_STRING, - line_statement_prefix=LINE_STATEMENT_PREFIX, - line_comment_prefix=LINE_COMMENT_PREFIX, - trim_blocks=TRIM_BLOCKS, - lstrip_blocks=LSTRIP_BLOCKS, - newline_sequence=NEWLINE_SEQUENCE, - keep_trailing_newline=KEEP_TRAILING_NEWLINE, - extensions=(), - optimized=True, - undefined=Undefined, - finalize=None, - autoescape=False, - loader=None, - cache_size=400, - auto_reload=True, - bytecode_cache=None, - enable_async=False): + def __init__( + self, + block_start_string=BLOCK_START_STRING, + block_end_string=BLOCK_END_STRING, + variable_start_string=VARIABLE_START_STRING, + variable_end_string=VARIABLE_END_STRING, + comment_start_string=COMMENT_START_STRING, + comment_end_string=COMMENT_END_STRING, + line_statement_prefix=LINE_STATEMENT_PREFIX, + line_comment_prefix=LINE_COMMENT_PREFIX, + trim_blocks=TRIM_BLOCKS, + lstrip_blocks=LSTRIP_BLOCKS, + newline_sequence=NEWLINE_SEQUENCE, + keep_trailing_newline=KEEP_TRAILING_NEWLINE, + extensions=(), + optimized=True, + undefined=Undefined, + finalize=None, + autoescape=False, + loader=None, + cache_size=400, + auto_reload=True, + bytecode_cache=None, + enable_async=False, + ): # !!Important notice!! # The constructor accepts quite a few arguments that should be # passed by keyword rather than position. However it's important to @@ -334,6 +360,9 @@ def __init__(self, self.enable_async = enable_async self.is_async = self.enable_async and have_async_gen + if self.is_async: + # runs patch_all() to enable async support + from . import asyncsupport # noqa: F401 _environment_sanity_check(self) @@ -353,15 +382,28 @@ def extend(self, **attributes): if not hasattr(self, key): setattr(self, key, value) - def overlay(self, block_start_string=missing, block_end_string=missing, - variable_start_string=missing, variable_end_string=missing, - comment_start_string=missing, comment_end_string=missing, - line_statement_prefix=missing, line_comment_prefix=missing, - trim_blocks=missing, lstrip_blocks=missing, - extensions=missing, optimized=missing, - undefined=missing, finalize=missing, autoescape=missing, - loader=missing, cache_size=missing, auto_reload=missing, - bytecode_cache=missing): + def overlay( + self, + block_start_string=missing, + block_end_string=missing, + variable_start_string=missing, + variable_end_string=missing, + comment_start_string=missing, + comment_end_string=missing, + line_statement_prefix=missing, + line_comment_prefix=missing, + trim_blocks=missing, + lstrip_blocks=missing, + extensions=missing, + optimized=missing, + undefined=missing, + finalize=missing, + autoescape=missing, + loader=missing, + cache_size=missing, + auto_reload=missing, + bytecode_cache=missing, + ): """Create a new overlay environment that shares all the data with the current environment except for cache and the overridden attributes. Extensions cannot be removed for an overlayed environment. An overlayed @@ -374,7 +416,7 @@ def overlay(self, block_start_string=missing, block_end_string=missing, through. """ args = dict(locals()) - del args['self'], args['cache_size'], args['extensions'] + del args["self"], args["cache_size"], args["extensions"] rv = object.__new__(self.__class__) rv.__dict__.update(self.__dict__) @@ -402,8 +444,7 @@ def overlay(self, block_start_string=missing, block_end_string=missing, def iter_extensions(self): """Iterates over the extensions by priority.""" - return iter(sorted(self.extensions.values(), - key=lambda x: x.priority)) + return iter(sorted(self.extensions.values(), key=lambda x: x.priority)) def getitem(self, obj, argument): """Get an item or attribute of an object but prefer the item.""" @@ -435,8 +476,9 @@ def getattr(self, obj, attribute): except (TypeError, LookupError, AttributeError): return self.undefined(obj=obj, name=attribute) - def call_filter(self, name, value, args=None, kwargs=None, - context=None, eval_ctx=None): + def call_filter( + self, name, value, args=None, kwargs=None, context=None, eval_ctx=None + ): """Invokes a filter on a value the same way the compiler does it. Note that on Python 3 this might return a coroutine in case the @@ -448,21 +490,22 @@ def call_filter(self, name, value, args=None, kwargs=None, """ func = self.filters.get(name) if func is None: - fail_for_missing_callable('no filter named %r', name) + fail_for_missing_callable("no filter named %r", name) args = [value] + list(args or ()) - if getattr(func, 'contextfilter', False): + if getattr(func, "contextfilter", False) is True: if context is None: - raise TemplateRuntimeError('Attempted to invoke context ' - 'filter without context') + raise TemplateRuntimeError( + "Attempted to invoke context filter without context" + ) args.insert(0, context) - elif getattr(func, 'evalcontextfilter', False): + elif getattr(func, "evalcontextfilter", False) is True: if eval_ctx is None: if context is not None: eval_ctx = context.eval_ctx else: eval_ctx = EvalContext(self) args.insert(0, eval_ctx) - elif getattr(func, 'environmentfilter', False): + elif getattr(func, "environmentfilter", False) is True: args.insert(0, self) return func(*args, **(kwargs or {})) @@ -473,7 +516,7 @@ def call_test(self, name, value, args=None, kwargs=None): """ func = self.tests.get(name) if func is None: - fail_for_missing_callable('no test named %r', name) + fail_for_missing_callable("no test named %r", name) return func(value, *(args or ()), **(kwargs or {})) @internalcode @@ -483,14 +526,13 @@ def parse(self, source, name=None, filename=None): executable source- or bytecode. This is useful for debugging or to extract information from templates. - If you are :ref:`developing Jinja2 extensions ` + If you are :ref:`developing Jinja extensions ` this gives you a good overview of the node tree generated. """ try: return self._parse(source, name, filename) except TemplateSyntaxError: - exc_info = sys.exc_info() - self.handle_exception(exc_info, source_hint=source) + self.handle_exception(source=source) def _parse(self, source, name, filename): """Internal parsing function used by `parse` and `compile`.""" @@ -510,16 +552,18 @@ def lex(self, source, name=None, filename=None): try: return self.lexer.tokeniter(source, name, filename) except TemplateSyntaxError: - exc_info = sys.exc_info() - self.handle_exception(exc_info, source_hint=source) + self.handle_exception(source=source) def preprocess(self, source, name=None, filename=None): """Preprocesses the source with all extensions. This is automatically called for all parsing and compiling methods but *not* for :meth:`lex` because there you usually only want the actual source tokenized. """ - return reduce(lambda s, e: e.preprocess(s, name, filename), - self.iter_extensions(), text_type(source)) + return reduce( + lambda s, e: e.preprocess(s, name, filename), + self.iter_extensions(), + text_type(source), + ) def _tokenize(self, source, name, filename=None, state=None): """Called by the parser to do the preprocessing and filtering @@ -539,8 +583,14 @@ def _generate(self, source, name, filename, defer_init=False): .. versionadded:: 2.5 """ - return generate(source, self, name, filename, defer_init=defer_init, - optimized=self.optimized) + return generate( + source, + self, + name, + filename, + defer_init=defer_init, + optimized=self.optimized, + ) def _compile(self, source, filename): """Internal hook that can be overridden to hook a different compile @@ -548,11 +598,10 @@ def _compile(self, source, filename): .. versionadded:: 2.5 """ - return compile(source, filename, 'exec') + return compile(source, filename, "exec") @internalcode - def compile(self, source, name=None, filename=None, raw=False, - defer_init=False): + def compile(self, source, name=None, filename=None, raw=False, defer_init=False): """Compile a node or template source code. The `name` parameter is the load name of the template after it was joined using :meth:`join_path` if necessary, not the filename on the file system. @@ -577,18 +626,16 @@ def compile(self, source, name=None, filename=None, raw=False, if isinstance(source, string_types): source_hint = source source = self._parse(source, name, filename) - source = self._generate(source, name, filename, - defer_init=defer_init) + source = self._generate(source, name, filename, defer_init=defer_init) if raw: return source if filename is None: - filename = '