From 5d4ed4b65296cc4fb4ec37e708e220c065d3c0fd Mon Sep 17 00:00:00 2001 From: Izan Date: Thu, 21 Oct 2021 20:54:58 +0100 Subject: [PATCH 1/3] Migrate to `og_blurple` --- bot/exts/backend/branding/_cog.py | 4 ++-- bot/exts/events/code_jams/_cog.py | 2 +- bot/exts/info/information.py | 6 +++--- bot/exts/info/site.py | 12 ++++++------ bot/exts/moderation/defcon.py | 4 ++-- bot/exts/moderation/infraction/management.py | 2 +- bot/exts/moderation/modlog.py | 20 ++++++++++---------- bot/exts/utils/clean.py | 2 +- bot/exts/utils/extensions.py | 2 +- bot/exts/utils/internal.py | 2 +- bot/exts/utils/reminders.py | 6 +++--- bot/exts/utils/utils.py | 2 +- tests/bot/exts/info/test_information.py | 12 ++++++------ 13 files changed, 38 insertions(+), 38 deletions(-) diff --git a/bot/exts/backend/branding/_cog.py b/bot/exts/backend/branding/_cog.py index 9c5bdbb4ec..0c5839a7a5 100644 --- a/bot/exts/backend/branding/_cog.py +++ b/bot/exts/backend/branding/_cog.py @@ -294,7 +294,7 @@ async def send_info_embed(self, channel_id: int, *, is_notification: bool) -> No else: content = "Python Discord is entering a new event!" if is_notification else None - embed = discord.Embed(description=description[:4096], colour=discord.Colour.blurple()) + embed = discord.Embed(description=description[:4096], colour=discord.Colour.og_blurple()) embed.set_footer(text=duration[:4096]) await channel.send(content=content, embed=embed) @@ -573,7 +573,7 @@ async def branding_calendar_group(self, ctx: commands.Context) -> None: await ctx.send(embed=resp) return - embed = discord.Embed(title="Current event calendar", colour=discord.Colour.blurple()) + embed = discord.Embed(title="Current event calendar", colour=discord.Colour.og_blurple()) # Because Discord embeds can only contain up to 25 fields, we only show the first 25. first_25 = list(available_events.items())[:25] diff --git a/bot/exts/events/code_jams/_cog.py b/bot/exts/events/code_jams/_cog.py index b31d628d56..452199f5fd 100644 --- a/bot/exts/events/code_jams/_cog.py +++ b/bot/exts/events/code_jams/_cog.py @@ -160,7 +160,7 @@ async def info(self, ctx: commands.Context, member: Member) -> None: embed = Embed( title=str(member), - colour=Colour.blurple() + colour=Colour.og_blurple() ) embed.add_field(name="Team", value=self.team_name(channel), inline=True) diff --git a/bot/exts/info/information.py b/bot/exts/info/information.py index 0dcb8de116..7f4811a43f 100644 --- a/bot/exts/info/information.py +++ b/bot/exts/info/information.py @@ -112,7 +112,7 @@ async def roles_info(self, ctx: Context) -> None: # Build an embed embed = Embed( title=f"Role information (Total {len(roles)} role{'s' * (len(role_list) > 1)})", - colour=Colour.blurple() + colour=Colour.og_blurple() ) await LinePaginator.paginate(role_list, ctx, embed, empty=False) @@ -170,7 +170,7 @@ async def role_info(self, ctx: Context, *roles: Union[Role, str]) -> None: @command(name="server", aliases=["server_info", "guild", "guild_info"]) async def server_info(self, ctx: Context) -> None: """Returns an embed full of server information.""" - embed = Embed(colour=Colour.blurple(), title="Server Information") + embed = Embed(colour=Colour.og_blurple(), title="Server Information") created = discord_timestamp(ctx.guild.created_at, TimestampFormats.RELATIVE) region = ctx.guild.region @@ -316,7 +316,7 @@ async def create_user_embed(self, ctx: Context, user: MemberOrUser) -> Embed: embed.add_field(name=field_name, value=field_content, inline=False) embed.set_thumbnail(url=user.display_avatar.url) - embed.colour = user.colour if user.colour != Colour.default() else Colour.blurple() + embed.colour = user.colour if user.colour != Colour.default() else Colour.og_blurple() return embed diff --git a/bot/exts/info/site.py b/bot/exts/info/site.py index e1f2f5153a..e8e71558b6 100644 --- a/bot/exts/info/site.py +++ b/bot/exts/info/site.py @@ -29,7 +29,7 @@ async def site_main(self, ctx: Context) -> None: embed = Embed(title="Python Discord website") embed.set_footer(text=url) - embed.colour = Colour.blurple() + embed.colour = Colour.og_blurple() embed.description = ( f"[Our official website]({url}) is an open-source community project " "created with Python and Django. It contains information about the server " @@ -46,7 +46,7 @@ async def site_resources(self, ctx: Context) -> None: embed = Embed(title="Resources") embed.set_footer(text=f"{learning_url}") - embed.colour = Colour.blurple() + embed.colour = Colour.og_blurple() embed.description = ( f"The [Resources page]({learning_url}) on our website contains a " "list of hand-selected learning resources that we regularly recommend " @@ -62,7 +62,7 @@ async def site_tools(self, ctx: Context) -> None: embed = Embed(title="Tools") embed.set_footer(text=f"{tools_url}") - embed.colour = Colour.blurple() + embed.colour = Colour.og_blurple() embed.description = ( f"The [Tools page]({tools_url}) on our website contains a " f"couple of the most popular tools for programming in Python." @@ -77,7 +77,7 @@ async def site_help(self, ctx: Context) -> None: embed = Embed(title="Asking Good Questions") embed.set_footer(text=url) - embed.colour = Colour.blurple() + embed.colour = Colour.og_blurple() embed.description = ( "Asking the right question about something that's new to you can sometimes be tricky. " f"To help with this, we've created a [guide to asking good questions]({url}) on our website. " @@ -93,7 +93,7 @@ async def site_faq(self, ctx: Context) -> None: embed = Embed(title="FAQ") embed.set_footer(text=url) - embed.colour = Colour.blurple() + embed.colour = Colour.og_blurple() embed.description = ( "As the largest Python community on Discord, we get hundreds of questions every day. " "Many of these questions have been asked before. We've compiled a list of the most " @@ -106,7 +106,7 @@ async def site_faq(self, ctx: Context) -> None: @site_group.command(name="rules", aliases=("r", "rule"), root_aliases=("rules", "rule")) async def site_rules(self, ctx: Context, rules: Greedy[int]) -> None: """Provides a link to all rules or, if specified, displays specific rule(s).""" - rules_embed = Embed(title='Rules', color=Colour.blurple(), url=f'{BASE_URL}/pages/rules') + rules_embed = Embed(title='Rules', color=Colour.og_blurple(), url=f'{BASE_URL}/pages/rules') if not rules: # Rules were not submitted. Return the default description. diff --git a/bot/exts/moderation/defcon.py b/bot/exts/moderation/defcon.py index 80ba101129..e38bfd75d3 100644 --- a/bot/exts/moderation/defcon.py +++ b/bot/exts/moderation/defcon.py @@ -49,7 +49,7 @@ class Action(Enum): SERVER_OPEN = ActionInfo(Icons.defcon_unshutdown, Emojis.defcon_unshutdown, Colours.soft_green, "") SERVER_SHUTDOWN = ActionInfo(Icons.defcon_shutdown, Emojis.defcon_shutdown, Colours.soft_red, "") DURATION_UPDATE = ActionInfo( - Icons.defcon_update, Emojis.defcon_update, Colour.blurple(), "**Threshold:** {threshold}\n\n" + Icons.defcon_update, Emojis.defcon_update, Colour.og_blurple(), "**Threshold:** {threshold}\n\n" ) @@ -151,7 +151,7 @@ async def defcon_group(self, ctx: Context) -> None: async def status(self, ctx: Context) -> None: """Check the current status of DEFCON mode.""" embed = Embed( - colour=Colour.blurple(), title="DEFCON Status", + colour=Colour.og_blurple(), title="DEFCON Status", description=f""" **Threshold:** {humanize_delta(self.threshold) if self.threshold else "-"} **Expires:** {discord_timestamp(self.expiry, TimestampFormats.RELATIVE) if self.expiry else "-"} diff --git a/bot/exts/moderation/infraction/management.py b/bot/exts/moderation/infraction/management.py index b1c8b64dcc..1f15d9950a 100644 --- a/bot/exts/moderation/infraction/management.py +++ b/bot/exts/moderation/infraction/management.py @@ -203,7 +203,7 @@ async def infraction_edit( await self.mod_log.send_log_message( icon_url=constants.Icons.pencil, - colour=discord.Colour.blurple(), + colour=discord.Colour.og_blurple(), title="Infraction edited", thumbnail=thumbnail, text=textwrap.dedent(f""" diff --git a/bot/exts/moderation/modlog.py b/bot/exts/moderation/modlog.py index 9d1ae6853f..fa3925f7cf 100644 --- a/bot/exts/moderation/modlog.py +++ b/bot/exts/moderation/modlog.py @@ -251,7 +251,7 @@ async def on_guild_channel_update(self, before: GUILD_CHANNEL, after: GuildChann message = f"**#{after.name}** (`{after.id}`)\n{message}" await self.send_log_message( - Icons.hash_blurple, Colour.blurple(), + Icons.hash_blurple, Colour.og_blurple(), "Channel updated", message ) @@ -326,7 +326,7 @@ async def on_guild_role_update(self, before: discord.Role, after: discord.Role) message = f"**{after.name}** (`{after.id}`)\n{message}" await self.send_log_message( - Icons.crown_blurple, Colour.blurple(), + Icons.crown_blurple, Colour.og_blurple(), "Role updated", message ) @@ -376,7 +376,7 @@ async def on_guild_update(self, before: discord.Guild, after: discord.Guild) -> message = f"**{after.name}** (`{after.id}`)\n{message}" await self.send_log_message( - Icons.guild_update, Colour.blurple(), + Icons.guild_update, Colour.og_blurple(), "Guild updated", message, thumbnail=after.icon.with_static_format("png") ) @@ -447,7 +447,7 @@ async def on_member_unban(self, guild: discord.Guild, member: discord.User) -> N return await self.send_log_message( - Icons.user_unban, Colour.blurple(), + Icons.user_unban, Colour.og_blurple(), "User unbanned", format_user(member), thumbnail=member.display_avatar.url, channel_id=Channels.mod_log @@ -512,7 +512,7 @@ async def on_member_update(self, before: discord.Member, after: discord.Member) await self.send_log_message( icon_url=Icons.user_update, - colour=Colour.blurple(), + colour=Colour.og_blurple(), title="Member updated", text=message, thumbnail=after.display_avatar.url, @@ -718,7 +718,7 @@ async def on_message_edit(self, msg_before: discord.Message, msg_after: discord. footer = None await self.send_log_message( - Icons.message_edit, Colour.blurple(), "Message edited", response, + Icons.message_edit, Colour.og_blurple(), "Message edited", response, channel_id=Channels.message_log, timestamp_override=timestamp, footer=footer ) @@ -761,12 +761,12 @@ async def on_raw_message_edit(self, event: discord.RawMessageUpdateEvent) -> Non ) await self.send_log_message( - Icons.message_edit, Colour.blurple(), "Message edited (Before)", + Icons.message_edit, Colour.og_blurple(), "Message edited (Before)", before_response, channel_id=Channels.message_log ) await self.send_log_message( - Icons.message_edit, Colour.blurple(), "Message edited (After)", + Icons.message_edit, Colour.og_blurple(), "Message edited (After)", after_response, channel_id=Channels.message_log ) @@ -776,7 +776,7 @@ async def on_thread_update(self, before: Thread, after: Thread) -> None: if before.name != after.name: await self.send_log_message( Icons.hash_blurple, - Colour.blurple(), + Colour.og_blurple(), "Thread name edited", ( f"Thread {after.mention} (`{after.id}`) from {after.parent.mention} (`{after.parent.id}`): " @@ -861,7 +861,7 @@ async def on_voice_state_update( diff_values = {**diff.get("values_changed", {}), **diff.get("type_changes", {})} icon = Icons.voice_state_blue - colour = Colour.blurple() + colour = Colour.og_blurple() changes = [] for attr, values in diff_values.items(): diff --git a/bot/exts/utils/clean.py b/bot/exts/utils/clean.py index a2e2d3eed1..e59eea6d5b 100644 --- a/bot/exts/utils/clean.py +++ b/bot/exts/utils/clean.py @@ -263,7 +263,7 @@ async def clean_cancel(self, ctx: Context) -> None: self.cleaning = False embed = Embed( - color=Colour.blurple(), + color=Colour.og_blurple(), description="Clean interrupted." ) await ctx.send(embed=embed, delete_after=10) diff --git a/bot/exts/utils/extensions.py b/bot/exts/utils/extensions.py index fa5d38917c..fda1e49e25 100644 --- a/bot/exts/utils/extensions.py +++ b/bot/exts/utils/extensions.py @@ -113,7 +113,7 @@ async def list_command(self, ctx: Context) -> None: Grey indicates that the extension is unloaded. Green indicates that the extension is currently loaded. """ - embed = Embed(colour=Colour.blurple()) + embed = Embed(colour=Colour.og_blurple()) embed.set_author( name="Extensions List", url=URLs.github_bot_repo, diff --git a/bot/exts/utils/internal.py b/bot/exts/utils/internal.py index 96664929b6..f54d692b66 100644 --- a/bot/exts/utils/internal.py +++ b/bot/exts/utils/internal.py @@ -243,7 +243,7 @@ async def socketstats(self, ctx: Context) -> None: stats_embed = discord.Embed( title="WebSocket statistics", description=f"Receiving {per_s:0.2f} events per second.", - color=discord.Color.blurple() + color=discord.Color.og_blurple() ) for event_type, count in self.socket_events.most_common(25): diff --git a/bot/exts/utils/reminders.py b/bot/exts/utils/reminders.py index 3cb9307a9e..c12932f854 100644 --- a/bot/exts/utils/reminders.py +++ b/bot/exts/utils/reminders.py @@ -183,7 +183,7 @@ async def send_reminder(self, reminder: dict, expected_time: datetime = None) -> name="Sorry, your reminder should have arrived earlier!" ) else: - embed.colour = discord.Colour.blurple() + embed.colour = discord.Colour.og_blurple() embed.set_author( icon_url=Icons.remind_blurple, name="It has arrived!" @@ -350,7 +350,7 @@ async def list_reminders(self, ctx: Context) -> None: lines.append(text) embed = discord.Embed() - embed.colour = discord.Colour.blurple() + embed.colour = discord.Colour.og_blurple() embed.title = f"Reminders for {ctx.author}" # Remind the user that they have no reminders :^) @@ -360,7 +360,7 @@ async def list_reminders(self, ctx: Context) -> None: return # Construct the embed and paginate it. - embed.colour = discord.Colour.blurple() + embed.colour = discord.Colour.og_blurple() await LinePaginator.paginate( lines, diff --git a/bot/exts/utils/utils.py b/bot/exts/utils/utils.py index f69bab781c..821cebd8cd 100644 --- a/bot/exts/utils/utils.py +++ b/bot/exts/utils/utils.py @@ -96,7 +96,7 @@ async def zen(self, ctx: Context, *, search_value: Union[int, str, None] = None) If a string is provided, the line which matches best will be produced. """ embed = Embed( - colour=Colour.blurple(), + colour=Colour.og_blurple(), title="The Zen of Python", description=ZEN_OF_PYTHON ) diff --git a/tests/bot/exts/info/test_information.py b/tests/bot/exts/info/test_information.py index 4b50c3fd9e..1f3914bd4b 100644 --- a/tests/bot/exts/info/test_information.py +++ b/tests/bot/exts/info/test_information.py @@ -42,7 +42,7 @@ async def test_roles_command_command(self): embed = kwargs.pop('embed') self.assertEqual(embed.title, "Role information (Total 1 role)") - self.assertEqual(embed.colour, discord.Colour.blurple()) + self.assertEqual(embed.colour, discord.Colour.og_blurple()) self.assertEqual(embed.description, f"\n`{self.moderator_role.id}` - {self.moderator_role.mention}\n") async def test_role_info_command(self): @@ -50,7 +50,7 @@ async def test_role_info_command(self): dummy_role = helpers.MockRole( name="Dummy", id=112233445566778899, - colour=discord.Colour.blurple(), + colour=discord.Colour.og_blurple(), position=10, members=[self.ctx.author], permissions=discord.Permissions(0) @@ -80,7 +80,7 @@ async def test_role_info_command(self): admin_embed = admin_kwargs["embed"] self.assertEqual(dummy_embed.title, "Dummy info") - self.assertEqual(dummy_embed.colour, discord.Colour.blurple()) + self.assertEqual(dummy_embed.colour, discord.Colour.og_blurple()) self.assertEqual(dummy_embed.fields[0].value, str(dummy_role.id)) self.assertEqual(dummy_embed.fields[1].value, f"#{dummy_role.colour.value:0>6x}") @@ -417,14 +417,14 @@ async def test_create_user_embed_uses_top_role_colour_when_user_has_roles(self): f"{COG_PATH}.basic_user_infraction_counts", new=unittest.mock.AsyncMock(return_value=("Infractions", "basic infractions")) ) - async def test_create_user_embed_uses_blurple_colour_when_user_has_no_roles(self): - """The embed should be created with a blurple colour if the user has no assigned roles.""" + async def test_create_user_embed_uses_og_blurple_colour_when_user_has_no_roles(self): + """The embed should be created with the og blurple colour if the user has no assigned roles.""" ctx = helpers.MockContext() user = helpers.MockMember(id=217, colour=discord.Colour.default()) embed = await self.cog.create_user_embed(ctx, user) - self.assertEqual(embed.colour, discord.Colour.blurple()) + self.assertEqual(embed.colour, discord.Colour.og_blurple()) @unittest.mock.patch( f"{COG_PATH}.basic_user_infraction_counts", From 91d08d0dd04c0380b37a21bd0731a27bb4bb49ec Mon Sep 17 00:00:00 2001 From: Izan Date: Sun, 31 Oct 2021 16:09:16 +0000 Subject: [PATCH 2/3] Fix tests --- tests/bot/exts/info/test_information.py | 2 +- 1 file changed, 1 insertion(+), 1 deletion(-) diff --git a/tests/bot/exts/info/test_information.py b/tests/bot/exts/info/test_information.py index 1f3914bd4b..632287322e 100644 --- a/tests/bot/exts/info/test_information.py +++ b/tests/bot/exts/info/test_information.py @@ -84,7 +84,7 @@ async def test_role_info_command(self): self.assertEqual(dummy_embed.fields[0].value, str(dummy_role.id)) self.assertEqual(dummy_embed.fields[1].value, f"#{dummy_role.colour.value:0>6x}") - self.assertEqual(dummy_embed.fields[2].value, "0.65 0.64 242") + self.assertEqual(dummy_embed.fields[2].value, "0.63 0.48 218") self.assertEqual(dummy_embed.fields[3].value, "1") self.assertEqual(dummy_embed.fields[4].value, "10") self.assertEqual(dummy_embed.fields[5].value, "0") From 3d429274b8c11a0bf49155a3dae0ce48dd50702a Mon Sep 17 00:00:00 2001 From: Izan Date: Mon, 1 Nov 2021 15:23:12 +0000 Subject: [PATCH 3/3] Merge branch 'main' into og_blurple-migration --- bot/constants.py | 1 + bot/converters.py | 38 +- bot/exts/filters/antispam.py | 15 +- bot/exts/filters/filter_lists.py | 7 + bot/exts/filters/filtering.py | 58 +- bot/exts/fun/off_topic_names.py | 7 +- bot/exts/help_channels/_cog.py | 6 + bot/exts/help_channels/_message.py | 6 +- bot/exts/moderation/clean.py | 595 ++++++++++++++++++ bot/exts/moderation/defcon.py | 8 +- bot/exts/moderation/infraction/_scheduler.py | 26 +- bot/exts/moderation/infraction/management.py | 9 +- bot/exts/moderation/modlog.py | 23 +- bot/exts/moderation/modpings.py | 5 +- bot/exts/moderation/voice_gate.py | 6 +- .../moderation/watchchannels/_watchchannel.py | 3 +- bot/exts/recruitment/talentpool/_review.py | 7 +- bot/exts/utils/internal.py | 6 +- bot/exts/utils/ping.py | 5 +- bot/exts/utils/reminders.py | 10 +- bot/monkey_patches.py | 7 +- bot/utils/channel.py | 9 +- bot/utils/checks.py | 3 +- bot/utils/time.py | 19 +- config-default.yml | 1 + poetry.lock | 181 +++--- pyproject.toml | 2 +- tests/bot/test_converters.py | 50 +- tests/bot/utils/test_time.py | 27 +- 29 files changed, 920 insertions(+), 220 deletions(-) create mode 100644 bot/exts/moderation/clean.py diff --git a/bot/constants.py b/bot/constants.py index f704c9e6a2..e3846fb3de 100644 --- a/bot/constants.py +++ b/bot/constants.py @@ -444,6 +444,7 @@ class Channels(metaclass=YAMLGetter): incidents: int incidents_archive: int mod_alerts: int + mod_meta: int nominations: int nomination_voting: int organisation: int diff --git a/bot/converters.py b/bot/converters.py index dd02f6ae61..0984fa0a34 100644 --- a/bot/converters.py +++ b/bot/converters.py @@ -2,7 +2,7 @@ import re import typing as t -from datetime import datetime +from datetime import datetime, timezone from ssl import CertificateError import dateutil.parser @@ -11,7 +11,7 @@ from aiohttp import ClientConnectorError from dateutil.relativedelta import relativedelta from discord.ext.commands import BadArgument, Bot, Context, Converter, IDConverter, MemberConverter, UserConverter -from discord.utils import DISCORD_EPOCH, escape_markdown, snowflake_time +from discord.utils import escape_markdown, snowflake_time from bot import exts from bot.api import ResponseCodeError @@ -28,7 +28,7 @@ log = get_logger(__name__) -DISCORD_EPOCH_DT = datetime.utcfromtimestamp(DISCORD_EPOCH / 1000) +DISCORD_EPOCH_DT = snowflake_time(0) RE_USER_MENTION = re.compile(r"<@!?([0-9]+)>$") @@ -273,14 +273,14 @@ async def convert(self, ctx: Context, arg: str) -> int: snowflake = int(arg) try: - time = snowflake_time(snowflake).replace(tzinfo=None) + time = snowflake_time(snowflake) except (OverflowError, OSError) as e: # Not sure if this can ever even happen, but let's be safe. raise BadArgument(f"{error}: {e}") if time < DISCORD_EPOCH_DT: raise BadArgument(f"{error}: timestamp is before the Discord epoch.") - elif (datetime.utcnow() - time).days < -1: + elif (datetime.now(timezone.utc) - time).days < -1: raise BadArgument(f"{error}: timestamp is too far into the future.") return snowflake @@ -387,7 +387,7 @@ async def convert(self, ctx: Context, duration: str) -> datetime: The converter supports the same symbols for each unit of time as its parent class. """ delta = await super().convert(ctx, duration) - now = datetime.utcnow() + now = datetime.now(timezone.utc) try: return now + delta @@ -395,6 +395,24 @@ async def convert(self, ctx: Context, duration: str) -> datetime: raise BadArgument(f"`{duration}` results in a datetime outside the supported range.") +class Age(DurationDelta): + """Convert duration strings into UTC datetime.datetime objects.""" + + async def convert(self, ctx: Context, duration: str) -> datetime: + """ + Converts a `duration` string to a datetime object that's `duration` in the past. + + The converter supports the same symbols for each unit of time as its parent class. + """ + delta = await super().convert(ctx, duration) + now = datetime.now(timezone.utc) + + try: + return now - delta + except (ValueError, OverflowError): + raise BadArgument(f"`{duration}` results in a datetime outside the supported range.") + + class OffTopicName(Converter): """A converter that ensures an added off-topic name is valid.""" @@ -443,8 +461,8 @@ async def convert(self, ctx: Context, datetime_string: str) -> datetime: The converter is flexible in the formats it accepts, as it uses the `isoparse` method of `dateutil.parser`. In general, it accepts datetime strings that start with a date, optionally followed by a time. Specifying a timezone offset in the datetime string is - supported, but the `datetime` object will be converted to UTC and will be returned without - `tzinfo` as a timezone-unaware `datetime` object. + supported, but the `datetime` object will be converted to UTC. If no timezone is specified, the datetime will + be assumed to be in UTC already. In all cases, the returned object will have the UTC timezone. See: https://dateutil.readthedocs.io/en/stable/parser.html#dateutil.parser.isoparse @@ -470,7 +488,8 @@ async def convert(self, ctx: Context, datetime_string: str) -> datetime: if dt.tzinfo: dt = dt.astimezone(dateutil.tz.UTC) - dt = dt.replace(tzinfo=None) + else: # Without a timezone, assume it represents UTC. + dt = dt.replace(tzinfo=dateutil.tz.UTC) return dt @@ -600,6 +619,7 @@ async def convert(self, ctx: Context, arg: str) -> t.Optional[dict]: SourceConverter = SourceType # noqa: F811 DurationDelta = relativedelta # noqa: F811 Duration = datetime # noqa: F811 + Age = datetime # noqa: F811 OffTopicName = str # noqa: F811 ISODateTime = datetime # noqa: F811 HushDurationConverter = int # noqa: F811 diff --git a/bot/exts/filters/antispam.py b/bot/exts/filters/antispam.py index 37ac705086..ddfd112318 100644 --- a/bot/exts/filters/antispam.py +++ b/bot/exts/filters/antispam.py @@ -2,11 +2,12 @@ from collections import defaultdict from collections.abc import Mapping from dataclasses import dataclass, field -from datetime import datetime, timedelta +from datetime import timedelta from itertools import takewhile from operator import attrgetter, itemgetter from typing import Dict, Iterable, List, Set +import arrow from discord import Colour, Member, Message, NotFound, Object, TextChannel from discord.ext.commands import Cog @@ -177,21 +178,17 @@ async def on_message(self, message: Message) -> None: self.cache.append(message) - earliest_relevant_at = datetime.utcnow() - timedelta(seconds=self.max_interval) - relevant_messages = list( - takewhile(lambda msg: msg.created_at.replace(tzinfo=None) > earliest_relevant_at, self.cache) - ) + earliest_relevant_at = arrow.utcnow() - timedelta(seconds=self.max_interval) + relevant_messages = list(takewhile(lambda msg: msg.created_at > earliest_relevant_at, self.cache)) for rule_name in AntiSpamConfig.rules: rule_config = AntiSpamConfig.rules[rule_name] rule_function = RULE_FUNCTION_MAPPING[rule_name] # Create a list of messages that were sent in the interval that the rule cares about. - latest_interesting_stamp = datetime.utcnow() - timedelta(seconds=rule_config['interval']) + latest_interesting_stamp = arrow.utcnow() - timedelta(seconds=rule_config['interval']) messages_for_rule = list( - takewhile( - lambda msg: msg.created_at.replace(tzinfo=None) > latest_interesting_stamp, relevant_messages - ) + takewhile(lambda msg: msg.created_at > latest_interesting_stamp, relevant_messages) ) result = await rule_function(message, messages_for_rule, rule_config) diff --git a/bot/exts/filters/filter_lists.py b/bot/exts/filters/filter_lists.py index 4b5200684b..ee5bd89f34 100644 --- a/bot/exts/filters/filter_lists.py +++ b/bot/exts/filters/filter_lists.py @@ -6,6 +6,7 @@ from bot import constants from bot.api import ResponseCodeError from bot.bot import Bot +from bot.constants import Channels from bot.converters import ValidDiscordServerInvite, ValidFilterListType from bot.log import get_logger from bot.pagination import LinePaginator @@ -100,6 +101,12 @@ async def _add_data( ) raise + # If it is an autoban trigger we send a warning in #mod-meta + if comment and "[autoban]" in comment: + await self.bot.get_channel(Channels.mod_meta).send( + f":warning: Heads-up! The new filter `{content}` (`{comment}`) will automatically ban users." + ) + # Insert the item into the cache self.bot.insert_item_into_filter_list_cache(item) await ctx.message.add_reaction("✅") diff --git a/bot/exts/filters/filtering.py b/bot/exts/filters/filtering.py index a151db1f0c..022b4ab025 100644 --- a/bot/exts/filters/filtering.py +++ b/bot/exts/filters/filtering.py @@ -1,9 +1,10 @@ import asyncio import re -from datetime import datetime, timedelta +from datetime import timedelta from typing import Any, Dict, List, Mapping, NamedTuple, Optional, Tuple, Union -import dateutil +import arrow +import dateutil.parser import discord.errors import regex from async_rediscache import RedisCache @@ -43,6 +44,23 @@ DAYS_BETWEEN_ALERTS = 3 OFFENSIVE_MSG_DELETE_TIME = timedelta(days=Filter.offensive_msg_delete_days) +# Autoban +LINK_PASSWORD = "https://support.discord.com/hc/en-us/articles/218410947-I-forgot-my-Password-Where-can-I-set-a-new-one" +LINK_2FA = "https://support.discord.com/hc/en-us/articles/219576828-Setting-up-Two-Factor-Authentication" +AUTO_BAN_REASON = ( + "Your account has been used to send links to a phishing website. You have been automatically banned. " + "If you are not aware of sending them, that means your account has been compromised.\n\n" + + f"Here is a guide from Discord on [how to change your password]({LINK_PASSWORD}).\n\n" + + f"We also highly recommend that you [enable 2 factor authentication on your account]({LINK_2FA}), " + "for heightened security.\n\n" + + "Once you have changed your password, feel free to follow the instructions at the bottom of " + "this message to appeal your ban." +) +AUTO_BAN_DURATION = timedelta(days=4) + FilterMatch = Union[re.Match, dict, bool, List[discord.Embed]] @@ -192,8 +210,8 @@ def get_name_matches(self, name: str) -> List[re.Match]: async def check_send_alert(self, member: Member) -> bool: """When there is less than 3 days after last alert, return `False`, otherwise `True`.""" if last_alert := await self.name_alerts.get(member.id): - last_alert = datetime.utcfromtimestamp(last_alert) - if datetime.utcnow() - timedelta(days=DAYS_BETWEEN_ALERTS) < last_alert: + last_alert = arrow.get(last_alert) + if arrow.utcnow() - timedelta(days=DAYS_BETWEEN_ALERTS) < last_alert: log.trace(f"Last alert was too recent for {member}'s nickname.") return False @@ -227,7 +245,7 @@ async def check_bad_words_in_name(self, member: Member) -> None: ) # Update time when alert sent - await self.name_alerts.set(member.id, datetime.utcnow().timestamp()) + await self.name_alerts.set(member.id, arrow.utcnow().timestamp()) async def filter_eval(self, result: str, msg: Message) -> bool: """ @@ -346,6 +364,24 @@ async def _filter_message(self, msg: Message, delta: Optional[int] = None) -> No stats = self._add_stats(filter_name, match, msg.content) await self._send_log(filter_name, _filter, msg, stats, reason) + # If the filter reason contains `[autoban]`, we want to auto-ban the user + if reason and "[autoban]" in reason.lower(): + # Create a new context, with the author as is the bot, and the channel as #mod-alerts. + # This sends the ban confirmation directly under watchlist trigger embed, to inform + # mods that the user was auto-banned for the message. + context = await self.bot.get_context(msg) + context.guild = self.bot.get_guild(Guild.id) + context.author = context.guild.get_member(self.bot.user.id) + context.channel = self.bot.get_channel(Channels.mod_alerts) + context.command = self.bot.get_command("tempban") + + await context.invoke( + context.command, + msg.author, + arrow.utcnow() + AUTO_BAN_DURATION, + reason=AUTO_BAN_REASON + ) + break # We don't want multiple filters to trigger async def _send_log( @@ -367,6 +403,10 @@ async def _send_log( # Allow specific filters to override ping_everyone ping_everyone = Filter.ping_everyone and _filter.get("ping_everyone", True) + # If we are going to autoban, we don't want to ping + if reason and "[autoban]" in reason: + ping_everyone = False + eval_msg = "using !eval " if is_eval else "" footer = f"Reason: {reason}" if reason else None message = ( @@ -603,7 +643,7 @@ async def notify_member(self, filtered_member: Member, reason: str, channel: Tex def schedule_msg_delete(self, msg: dict) -> None: """Delete an offensive message once its deletion date is reached.""" - delete_at = dateutil.parser.isoparse(msg['delete_date']).replace(tzinfo=None) + delete_at = dateutil.parser.isoparse(msg['delete_date']) self.scheduler.schedule_at(delete_at, msg['id'], self.delete_offensive_msg(msg)) async def reschedule_offensive_msg_deletion(self) -> None: @@ -611,17 +651,17 @@ async def reschedule_offensive_msg_deletion(self) -> None: await self.bot.wait_until_ready() response = await self.bot.api_client.get('bot/offensive-messages',) - now = datetime.utcnow() + now = arrow.utcnow() for msg in response: - delete_at = dateutil.parser.isoparse(msg['delete_date']).replace(tzinfo=None) + delete_at = dateutil.parser.isoparse(msg['delete_date']) if delete_at < now: await self.delete_offensive_msg(msg) else: self.schedule_msg_delete(msg) - async def delete_offensive_msg(self, msg: Mapping[str, str]) -> None: + async def delete_offensive_msg(self, msg: Mapping[str, int]) -> None: """Delete an offensive message, and then delete it from the db.""" try: channel = self.bot.get_channel(msg['channel_id']) diff --git a/bot/exts/fun/off_topic_names.py b/bot/exts/fun/off_topic_names.py index 427667c665..7df1d172d0 100644 --- a/bot/exts/fun/off_topic_names.py +++ b/bot/exts/fun/off_topic_names.py @@ -1,6 +1,7 @@ import difflib -from datetime import datetime, timedelta +from datetime import timedelta +import arrow from discord import Colour, Embed from discord.ext.commands import Cog, Context, group, has_any_role from discord.utils import sleep_until @@ -22,9 +23,9 @@ async def update_names(bot: Bot) -> None: while True: # Since we truncate the compute timedelta to seconds, we add one second to ensure # we go past midnight in the `seconds_to_sleep` set below. - today_at_midnight = datetime.utcnow().replace(microsecond=0, second=0, minute=0, hour=0) + today_at_midnight = arrow.utcnow().replace(microsecond=0, second=0, minute=0, hour=0) next_midnight = today_at_midnight + timedelta(days=1) - await sleep_until(next_midnight) + await sleep_until(next_midnight.datetime) try: channel_0_name, channel_1_name, channel_2_name = await bot.api_client.get( diff --git a/bot/exts/help_channels/_cog.py b/bot/exts/help_channels/_cog.py index 3c6cf7f262..0905cb23d0 100644 --- a/bot/exts/help_channels/_cog.py +++ b/bot/exts/help_channels/_cog.py @@ -376,6 +376,12 @@ async def move_to_available(self) -> None: log.trace(f"Moving #{channel} ({channel.id}) to the Available category.") + # Unpin any previously stuck pins + log.trace(f"Looking for pins stuck in #{channel} ({channel.id}).") + for message in await channel.pins(): + await _message.pin_wrapper(message.id, channel, pin=False) + log.debug(f"Removed a stuck pin from #{channel} ({channel.id}). ID: {message.id}") + await _channel.move_to_bottom( channel=channel, category_id=constants.Categories.help_available, diff --git a/bot/exts/help_channels/_message.py b/bot/exts/help_channels/_message.py index a52c675709..241dd606c2 100644 --- a/bot/exts/help_channels/_message.py +++ b/bot/exts/help_channels/_message.py @@ -174,7 +174,7 @@ async def notify(channel: discord.TextChannel, last_notification: t.Optional[Arr async def pin(message: discord.Message) -> None: """Pin an initial question `message` and store it in a cache.""" - if await _pin_wrapper(message.id, message.channel, pin=True): + if await pin_wrapper(message.id, message.channel, pin=True): await _caches.question_messages.set(message.channel.id, message.id) @@ -205,7 +205,7 @@ async def unpin(channel: discord.TextChannel) -> None: if msg_id is None: log.debug(f"#{channel} ({channel.id}) doesn't have a message pinned.") else: - await _pin_wrapper(msg_id, channel, pin=False) + await pin_wrapper(msg_id, channel, pin=False) def _match_bot_embed(message: t.Optional[discord.Message], description: str) -> bool: @@ -220,7 +220,7 @@ def _match_bot_embed(message: t.Optional[discord.Message], description: str) -> return message.author == bot.instance.user and bot_msg_desc.strip() == description.strip() -async def _pin_wrapper(msg_id: int, channel: discord.TextChannel, *, pin: bool) -> bool: +async def pin_wrapper(msg_id: int, channel: discord.TextChannel, *, pin: bool) -> bool: """ Pin message `msg_id` in `channel` if `pin` is True or unpin if it's False. diff --git a/bot/exts/moderation/clean.py b/bot/exts/moderation/clean.py new file mode 100644 index 0000000000..94494b9837 --- /dev/null +++ b/bot/exts/moderation/clean.py @@ -0,0 +1,595 @@ +import contextlib +import logging +import re +import time +from collections import defaultdict +from contextlib import suppress +from datetime import datetime +from itertools import islice +from typing import Any, Callable, Iterable, Literal, Optional, TYPE_CHECKING, Union + +from discord import Colour, Message, NotFound, TextChannel, User, errors +from discord.ext.commands import Cog, Context, Converter, Greedy, group, has_any_role +from discord.ext.commands.converter import TextChannelConverter +from discord.ext.commands.errors import BadArgument + +from bot.bot import Bot +from bot.constants import Channels, CleanMessages, Colours, Emojis, Event, Icons, MODERATION_ROLES +from bot.converters import Age, ISODateTime +from bot.exts.moderation.modlog import ModLog +from bot.utils.channel import is_mod_channel + +log = logging.getLogger(__name__) + +# Default number of messages to look at in each channel. +DEFAULT_TRAVERSE = 10 +# Number of seconds before command invocations and responses are deleted in non-moderation channels. +MESSAGE_DELETE_DELAY = 5 + +# Type alias for checks for whether a message should be deleted. +Predicate = Callable[[Message], bool] +# Type alias for message lookup ranges. +CleanLimit = Union[Message, Age, ISODateTime] + + +class CleanChannels(Converter): + """A converter that turns the given string to a list of channels to clean, or the literal `*` for all channels.""" + + _channel_converter = TextChannelConverter() + + async def convert(self, ctx: Context, argument: str) -> Union[Literal["*"], list[TextChannel]]: + """Converts a string to a list of channels to clean, or the literal `*` for all channels.""" + if argument == "*": + return "*" + return [await self._channel_converter.convert(ctx, channel) for channel in argument.split()] + + +class Regex(Converter): + """A converter that takes a string in the form `.+` and returns the contents of the inline code compiled.""" + + async def convert(self, ctx: Context, argument: str) -> re.Pattern: + """Strips the backticks from the string and compiles it to a regex pattern.""" + match = re.fullmatch(r"`(.+?)`", argument) + if not match: + raise BadArgument("Regex pattern missing wrapping backticks") + try: + return re.compile(match.group(1), re.IGNORECASE + re.DOTALL) + except re.error as e: + raise BadArgument(f"Regex error: {e.msg}") + + +if TYPE_CHECKING: # Used to allow method resolution in IDEs like in converters.py. + CleanChannels = Union[Literal["*"], list[TextChannel]] # noqa: F811 + Regex = re.Pattern # noqa: F811 + + +class Clean(Cog): + """ + A cog that allows messages to be deleted in bulk while applying various filters. + + You can delete messages sent by a specific user, messages sent by bots, all messages, or messages that match a + specific regular expression. + + The deleted messages are saved and uploaded to the database via an API endpoint, and a URL is returned which can be + used to view the messages in the Discord dark theme style. + """ + + def __init__(self, bot: Bot): + self.bot = bot + self.cleaning = False + + @property + def mod_log(self) -> ModLog: + """Get currently loaded ModLog cog instance.""" + return self.bot.get_cog("ModLog") + + # region: Helper functions + + @staticmethod + def _validate_input( + traverse: int, + channels: Optional[CleanChannels], + bots_only: bool, + users: Optional[list[User]], + first_limit: Optional[CleanLimit], + second_limit: Optional[CleanLimit], + ) -> None: + """Raise errors if an argument value or a combination of values is invalid.""" + # Is this an acceptable amount of messages to traverse? + if traverse > CleanMessages.message_limit: + raise BadArgument(f"Cannot traverse more than {CleanMessages.message_limit} messages.") + + if (isinstance(first_limit, Message) or isinstance(second_limit, Message)) and channels: + raise BadArgument("Both a message limit and channels specified.") + + if isinstance(first_limit, Message) and isinstance(second_limit, Message): + # Messages are not in same channel. + if first_limit.channel != second_limit.channel: + raise BadArgument("Message limits are in different channels.") + + if users and bots_only: + raise BadArgument("Marked as bots only, but users were specified.") + + # This is an implementation error rather than user error. + if second_limit and not first_limit: + raise ValueError("Second limit specified without the first.") + + @staticmethod + async def _send_expiring_message(ctx: Context, content: str) -> None: + """Send `content` to the context channel. Automatically delete if it's not a mod channel.""" + delete_after = None if is_mod_channel(ctx.channel) else MESSAGE_DELETE_DELAY + await ctx.send(content, delete_after=delete_after) + + @staticmethod + def _build_predicate( + bots_only: bool = False, + users: Optional[list[User]] = None, + regex: Optional[re.Pattern] = None, + first_limit: Optional[datetime] = None, + second_limit: Optional[datetime] = None, + ) -> Predicate: + """Return the predicate that decides whether to delete a given message.""" + def predicate_bots_only(message: Message) -> bool: + """Return True if the message was sent by a bot.""" + return message.author.bot + + def predicate_specific_users(message: Message) -> bool: + """Return True if the message was sent by the user provided in the _clean_messages call.""" + return message.author in users + + def predicate_regex(message: Message) -> bool: + """Check if the regex provided in _clean_messages matches the message content or any embed attributes.""" + content = [message.content] + + # Add the content for all embed attributes + for embed in message.embeds: + content.append(embed.title) + content.append(embed.description) + content.append(embed.footer.text) + content.append(embed.author.name) + for field in embed.fields: + content.append(field.name) + content.append(field.value) + + # Get rid of empty attributes and turn it into a string + content = "\n".join(attr for attr in content if attr) + + # Now let's see if there's a regex match + return bool(regex.search(content)) + + def predicate_range(message: Message) -> bool: + """Check if the message age is between the two limits.""" + return first_limit <= message.created_at <= second_limit + + def predicate_after(message: Message) -> bool: + """Check if the message is older than the first limit.""" + return message.created_at >= first_limit + + predicates = [] + # Set up the correct predicate + if bots_only: + predicates.append(predicate_bots_only) # Delete messages from bots + if users: + predicates.append(predicate_specific_users) # Delete messages from specific user + if regex: + predicates.append(predicate_regex) # Delete messages that match regex + # Add up to one of the following: + if second_limit: + predicates.append(predicate_range) # Delete messages in the specified age range + elif first_limit: + predicates.append(predicate_after) # Delete messages older than specific message + + if not predicates: + return lambda m: True + if len(predicates) == 1: + return predicates[0] + return lambda m: all(pred(m) for pred in predicates) + + async def _delete_invocation(self, ctx: Context) -> None: + """Delete the command invocation if it's not in a mod channel.""" + if not is_mod_channel(ctx.channel): + self.mod_log.ignore(Event.message_delete, ctx.message.id) + try: + await ctx.message.delete() + except errors.NotFound: + # Invocation message has already been deleted + log.info("Tried to delete invocation message, but it was already deleted.") + + def _get_messages_from_cache(self, traverse: int, to_delete: Predicate) -> tuple[defaultdict[Any, list], list[int]]: + """Helper function for getting messages from the cache.""" + message_mappings = defaultdict(list) + message_ids = [] + for message in islice(self.bot.cached_messages, traverse): + if not self.cleaning: + # Cleaning was canceled + return message_mappings, message_ids + + if to_delete(message): + message_mappings[message.channel].append(message) + message_ids.append(message.id) + + return message_mappings, message_ids + + async def _get_messages_from_channels( + self, + traverse: int, + channels: Iterable[TextChannel], + to_delete: Predicate, + before: Optional[datetime] = None, + after: Optional[datetime] = None + ) -> tuple[defaultdict[Any, list], list]: + message_mappings = defaultdict(list) + message_ids = [] + + for channel in channels: + async for message in channel.history(limit=traverse, before=before, after=after): + + if not self.cleaning: + # Cleaning was canceled, return empty containers. + return defaultdict(list), [] + + if to_delete(message): + message_mappings[message.channel].append(message) + message_ids.append(message.id) + + return message_mappings, message_ids + + @staticmethod + def is_older_than_14d(message: Message) -> bool: + """ + Precisely checks if message is older than 14 days, bulk deletion limit. + + Inspired by how purge works internally. + Comparison on message age could possibly be less accurate which in turn would resort in problems + with message deletion if said messages are very close to the 14d mark. + """ + two_weeks_old_snowflake = int((time.time() - 14 * 24 * 60 * 60) * 1000.0 - 1420070400000) << 22 + return message.id < two_weeks_old_snowflake + + async def _delete_messages_individually(self, messages: list[Message]) -> list[Message]: + """Delete each message in the list unless cleaning is cancelled. Return the deleted messages.""" + deleted = [] + for message in messages: + # Ensure that deletion was not canceled + if not self.cleaning: + return deleted + with contextlib.suppress(NotFound): # Message doesn't exist or was already deleted + await message.delete() + deleted.append(message) + return deleted + + async def _delete_found(self, message_mappings: dict[TextChannel, list[Message]]) -> list[Message]: + """ + Delete the detected messages. + + Deletion is made in bulk per channel for messages less than 14d old. + The function returns the deleted messages. + If cleaning was cancelled in the middle, return messages already deleted. + """ + deleted = [] + for channel, messages in message_mappings.items(): + to_delete = [] + + delete_old = False + for current_index, message in enumerate(messages): # noqa: B007 + if not self.cleaning: + # Means that the cleaning was canceled + return deleted + + if self.is_older_than_14d(message): + # Further messages are too old to be deleted in bulk + delete_old = True + break + + to_delete.append(message) + + if len(to_delete) == 100: + # Only up to 100 messages can be deleted in a bulk + await channel.delete_messages(to_delete) + deleted.extend(to_delete) + to_delete.clear() + + if not self.cleaning: + return deleted + if len(to_delete) > 0: + # Deleting any leftover messages if there are any + await channel.delete_messages(to_delete) + deleted.extend(to_delete) + + if not self.cleaning: + return deleted + if delete_old: + old_deleted = await self._delete_messages_individually(messages[current_index:]) + deleted.extend(old_deleted) + + return deleted + + async def _modlog_cleaned_messages(self, messages: list[Message], channels: CleanChannels, ctx: Context) -> bool: + """Log the deleted messages to the modlog. Return True if logging was successful.""" + if not messages: + # Can't build an embed, nothing to clean! + await self._send_expiring_message(ctx, ":x: No matching messages could be found.") + return False + + # Reverse the list to have reverse chronological order + log_messages = reversed(messages) + log_url = await self.mod_log.upload_log(log_messages, ctx.author.id) + + # Build the embed and send it + if channels == "*": + target_channels = "all channels" + else: + target_channels = ", ".join(channel.mention for channel in channels) + + message = ( + f"**{len(messages)}** messages deleted in {target_channels} by " + f"{ctx.author.mention}\n\n" + f"A log of the deleted messages can be found [here]({log_url})." + ) + + await self.mod_log.send_log_message( + icon_url=Icons.message_bulk_delete, + colour=Colour(Colours.soft_red), + title="Bulk message delete", + text=message, + channel_id=Channels.mod_log, + ) + + return True + + # endregion + + async def _clean_messages( + self, + ctx: Context, + traverse: int, + channels: Optional[CleanChannels], + bots_only: bool = False, + users: Optional[list[User]] = None, + regex: Optional[re.Pattern] = None, + first_limit: Optional[CleanLimit] = None, + second_limit: Optional[CleanLimit] = None, + use_cache: Optional[bool] = True + ) -> None: + """A helper function that does the actual message cleaning.""" + self._validate_input(traverse, channels, bots_only, users, first_limit, second_limit) + + # Are we already performing a clean? + if self.cleaning: + await self._send_expiring_message( + ctx, ":x: Please wait for the currently ongoing clean operation to complete." + ) + return + self.cleaning = True + + # Default to using the invoking context's channel or the channel of the message limit(s). + if not channels: + # Input was validated - if first_limit is a message, second_limit won't point at a different channel. + if isinstance(first_limit, Message): + channels = [first_limit.channel] + elif isinstance(second_limit, Message): + channels = [second_limit.channel] + else: + channels = [ctx.channel] + + if isinstance(first_limit, Message): + first_limit = first_limit.created_at + if isinstance(second_limit, Message): + second_limit = second_limit.created_at + if first_limit and second_limit: + first_limit, second_limit = sorted([first_limit, second_limit]) + + # Needs to be called after standardizing the input. + predicate = self._build_predicate(bots_only, users, regex, first_limit, second_limit) + + # Delete the invocation first + await self._delete_invocation(ctx) + + if channels == "*" and use_cache: + message_mappings, message_ids = self._get_messages_from_cache(traverse=traverse, to_delete=predicate) + else: + deletion_channels = channels + if channels == "*": + deletion_channels = [channel for channel in ctx.guild.channels if isinstance(channel, TextChannel)] + message_mappings, message_ids = await self._get_messages_from_channels( + traverse=traverse, + channels=deletion_channels, + to_delete=predicate, + before=second_limit, + after=first_limit # Remember first is the earlier datetime. + ) + + if not self.cleaning: + # Means that the cleaning was canceled + return + + # Now let's delete the actual messages with purge. + self.mod_log.ignore(Event.message_delete, *message_ids) + deleted_messages = await self._delete_found(message_mappings) + self.cleaning = False + + logged = await self._modlog_cleaned_messages(deleted_messages, channels, ctx) + + if logged and is_mod_channel(ctx.channel): + with suppress(NotFound): # Can happen if the invoker deleted their own messages. + await ctx.message.add_reaction(Emojis.check_mark) + + # region: Commands + + @group(invoke_without_command=True, name="clean", aliases=["clear", "purge"]) + async def clean_group( + self, + ctx: Context, + users: Greedy[User] = None, + traverse: Optional[int] = None, + first_limit: Optional[CleanLimit] = None, + second_limit: Optional[CleanLimit] = None, + use_cache: Optional[bool] = None, + bots_only: Optional[bool] = False, + regex: Optional[Regex] = None, + *, + channels: CleanChannels = None # "Optional" with discord.py silently ignores incorrect input. + ) -> None: + """ + Commands for cleaning messages in channels. + + If arguments are provided, will act as a master command from which all subcommands can be derived. + + \u2003• `users`: A series of user mentions, ID's, or names. + \u2003• `traverse`: The number of messages to look at in each channel. If using the cache, will look at the + first `traverse` messages in the cache. + \u2003• `first_limit` and `second_limit`: A message, a duration delta, or an ISO datetime. + If a message is provided, cleaning will happen in that channel, and channels cannot be provided. + If a limit is provided, multiple channels cannot be provided. + If only one of them is provided, acts as `clean until`. If both are provided, acts as `clean between`. + \u2003• `use_cache`: Whether to use the message cache. + If not provided, will default to False unless an asterisk is used for the channels. + \u2003• `bots_only`: Whether to delete only bots. If specified, users cannot be specified. + \u2003• `regex`: A regex pattern the message must contain to be deleted. + The pattern must be provided enclosed in backticks. + If the pattern contains spaces, it still needs to be enclosed in double quotes on top of that. + \u2003• `channels`: A series of channels to delete in, or an asterisk to delete from all channels. + """ + if not any([traverse, users, first_limit, second_limit, regex, channels]): + await ctx.send_help(ctx.command) + return + + if not traverse: + if first_limit: + traverse = CleanMessages.message_limit + else: + traverse = DEFAULT_TRAVERSE + if use_cache is None: + use_cache = channels == "*" + + await self._clean_messages( + ctx, traverse, channels, bots_only, users, regex, first_limit, second_limit, use_cache + ) + + @clean_group.command(name="user", aliases=["users"]) + async def clean_user( + self, + ctx: Context, + user: User, + traverse: Optional[int] = DEFAULT_TRAVERSE, + use_cache: Optional[bool] = True, + *, + channels: CleanChannels = None + ) -> None: + """Delete messages posted by the provided user, stop cleaning after traversing `traverse` messages.""" + await self._clean_messages(ctx, traverse, users=[user], channels=channels, use_cache=use_cache) + + @clean_group.command(name="all", aliases=["everything"]) + async def clean_all( + self, + ctx: Context, + traverse: Optional[int] = DEFAULT_TRAVERSE, + use_cache: Optional[bool] = True, + *, + channels: CleanChannels = None + ) -> None: + """Delete all messages, regardless of poster, stop cleaning after traversing `traverse` messages.""" + await self._clean_messages(ctx, traverse, channels=channels, use_cache=use_cache) + + @clean_group.command(name="bots", aliases=["bot"]) + async def clean_bots( + self, + ctx: Context, + traverse: Optional[int] = DEFAULT_TRAVERSE, + use_cache: Optional[bool] = True, + *, + channels: CleanChannels = None + ) -> None: + """Delete all messages posted by a bot, stop cleaning after traversing `traverse` messages.""" + await self._clean_messages(ctx, traverse, bots_only=True, channels=channels, use_cache=use_cache) + + @clean_group.command(name="regex", aliases=["word", "expression", "pattern"]) + async def clean_regex( + self, + ctx: Context, + regex: Regex, + traverse: Optional[int] = DEFAULT_TRAVERSE, + use_cache: Optional[bool] = True, + *, + channels: CleanChannels = None + ) -> None: + """ + Delete all messages that match a certain regex, stop cleaning after traversing `traverse` messages. + + The pattern must be provided enclosed in backticks. + If the pattern contains spaces, it still needs to be enclosed in double quotes on top of that. + For example: `[0-9]` + """ + await self._clean_messages(ctx, traverse, regex=regex, channels=channels, use_cache=use_cache) + + @clean_group.command(name="until") + async def clean_until( + self, + ctx: Context, + until: CleanLimit, + channel: TextChannel = None + ) -> None: + """ + Delete all messages until a certain limit. + + A limit can be either a message, and ISO date-time string, or a time delta. + If a message is specified, `channel` cannot be specified. + """ + await self._clean_messages( + ctx, + CleanMessages.message_limit, + channels=[channel] if channel else None, + first_limit=until, + ) + + @clean_group.command(name="between", aliases=["after-until", "from-to"]) + async def clean_between( + self, + ctx: Context, + first_limit: CleanLimit, + second_limit: CleanLimit, + channel: TextChannel = None + ) -> None: + """ + Delete all messages within range. + + The range is specified through two limits. + A limit can be either a message, and ISO date-time string, or a time delta. + + If two messages are specified, they both must be in the same channel. + If a message is specified, `channel` cannot be specified. + """ + await self._clean_messages( + ctx, + CleanMessages.message_limit, + channels=[channel] if channel else None, + first_limit=first_limit, + second_limit=second_limit, + ) + + @clean_group.command(name="stop", aliases=["cancel", "abort"]) + async def clean_cancel(self, ctx: Context) -> None: + """If there is an ongoing cleaning process, attempt to immediately cancel it.""" + if not self.cleaning: + message = ":question: There's no cleaning going on." + else: + self.cleaning = False + message = f"{Emojis.check_mark} Clean interrupted." + + await self._send_expiring_message(ctx, message) + await self._delete_invocation(ctx) + + # endregion + + async def cog_check(self, ctx: Context) -> bool: + """Only allow moderators to invoke the commands in this cog.""" + return await has_any_role(*MODERATION_ROLES).predicate(ctx) + + async def cog_command_error(self, ctx: Context, error: Exception) -> None: + """Safely end the cleaning operation on unexpected errors.""" + self.cleaning = False + + +def setup(bot: Bot) -> None: + """Load the Clean cog.""" + bot.add_cog(Clean(bot)) diff --git a/bot/exts/moderation/defcon.py b/bot/exts/moderation/defcon.py index e38bfd75d3..14db373673 100644 --- a/bot/exts/moderation/defcon.py +++ b/bot/exts/moderation/defcon.py @@ -4,6 +4,7 @@ from enum import Enum from typing import Optional, Union +import arrow from aioredis import RedisError from async_rediscache import RedisCache from dateutil.relativedelta import relativedelta @@ -109,9 +110,9 @@ async def _sync_settings(self) -> None: async def on_member_join(self, member: Member) -> None: """Check newly joining users to see if they meet the account age threshold.""" if self.threshold: - now = datetime.utcnow() + now = arrow.utcnow() - if now - member.created_at.replace(tzinfo=None) < relativedelta_to_timedelta(self.threshold): + if now - member.created_at < relativedelta_to_timedelta(self.threshold): log.info(f"Rejecting user {member}: Account is too new") message_sent = False @@ -254,7 +255,8 @@ async def _update_threshold( expiry_message = "" if expiry: - expiry_message = f" for the next {humanize_delta(relativedelta(expiry, datetime.utcnow()), max_units=2)}" + activity_duration = relativedelta(expiry, arrow.utcnow().datetime) + expiry_message = f" for the next {humanize_delta(activity_duration, max_units=2)}" if self.threshold: channel_message = ( diff --git a/bot/exts/moderation/infraction/_scheduler.py b/bot/exts/moderation/infraction/_scheduler.py index d4e96b10bb..762eb6afa1 100644 --- a/bot/exts/moderation/infraction/_scheduler.py +++ b/bot/exts/moderation/infraction/_scheduler.py @@ -1,9 +1,9 @@ import textwrap import typing as t from abc import abstractmethod -from datetime import datetime from gettext import ngettext +import arrow import dateutil.parser import discord from discord.ext.commands import Context @@ -67,7 +67,7 @@ async def reschedule_infractions(self, supported_infractions: t.Container[str]) # We make sure to fire this if to_schedule: next_reschedule_point = max( - dateutil.parser.isoparse(infr["expires_at"]).replace(tzinfo=None) for infr in to_schedule + dateutil.parser.isoparse(infr["expires_at"]) for infr in to_schedule ) log.trace("Will reschedule remaining infractions at %s", next_reschedule_point) @@ -83,8 +83,8 @@ async def reapply_infraction( """Reapply an infraction if it's still active or deactivate it if less than 60 sec left.""" if infraction["expires_at"] is not None: # Calculate the time remaining, in seconds, for the mute. - expiry = dateutil.parser.isoparse(infraction["expires_at"]).replace(tzinfo=None) - delta = (expiry - datetime.utcnow()).total_seconds() + expiry = dateutil.parser.isoparse(infraction["expires_at"]) + delta = (expiry - arrow.utcnow()).total_seconds() else: # If the infraction is permanent, it is not possible to get the time remaining. delta = None @@ -175,13 +175,7 @@ async def apply_infraction( dm_log_text = "\nDM: Sent" end_msg = "" - if infraction["actor"] == self.bot.user.id: - log.trace( - f"Infraction #{id_} actor is bot; including the reason in the confirmation message." - ) - if reason: - end_msg = f" (reason: {textwrap.shorten(reason, width=1500, placeholder='...')})" - elif is_mod_channel(ctx.channel): + if is_mod_channel(ctx.channel): log.trace(f"Fetching total infraction count for {user}.") infractions = await self.bot.api_client.get( @@ -190,6 +184,12 @@ async def apply_infraction( ) total = len(infractions) end_msg = f" (#{id_} ; {total} infraction{ngettext('', 's', total)} total)" + elif infraction["actor"] == self.bot.user.id: + log.trace( + f"Infraction #{id_} actor is bot; including the reason in the confirmation message." + ) + if reason: + end_msg = f" (reason: {textwrap.shorten(reason, width=1500, placeholder='...')})" purge = infraction.get("purge", "") @@ -382,7 +382,7 @@ async def deactivate_infraction( log.info(f"Marking infraction #{id_} as inactive (expired).") - expiry = dateutil.parser.isoparse(expiry).replace(tzinfo=None) if expiry else None + expiry = dateutil.parser.isoparse(expiry) if expiry else None created = time.format_infraction_with_duration(inserted_at, expiry) log_content = None @@ -503,5 +503,5 @@ def schedule_expiration(self, infraction: _utils.Infraction) -> None: At the time of expiration, the infraction is marked as inactive on the website and the expiration task is cancelled. """ - expiry = dateutil.parser.isoparse(infraction["expires_at"]).replace(tzinfo=None) + expiry = dateutil.parser.isoparse(infraction["expires_at"]) self.scheduler.schedule_at(expiry, infraction["id"], self.deactivate_infraction(infraction)) diff --git a/bot/exts/moderation/infraction/management.py b/bot/exts/moderation/infraction/management.py index 1f15d9950a..0a33ac5e28 100644 --- a/bot/exts/moderation/infraction/management.py +++ b/bot/exts/moderation/infraction/management.py @@ -1,6 +1,6 @@ import textwrap import typing as t -from datetime import datetime +from datetime import datetime, timezone import dateutil.parser import discord @@ -314,8 +314,11 @@ def infraction_to_string(self, infraction: t.Dict[str, t.Any]) -> str: if expires_at is None: duration = "*Permanent*" else: - date_from = datetime.fromtimestamp(float(time.DISCORD_TIMESTAMP_REGEX.match(created).group(1))) - date_to = dateutil.parser.isoparse(expires_at).replace(tzinfo=None) + date_from = datetime.fromtimestamp( + float(time.DISCORD_TIMESTAMP_REGEX.match(created).group(1)), + timezone.utc + ) + date_to = dateutil.parser.isoparse(expires_at) duration = humanize_delta(relativedelta(date_to, date_from)) lines = textwrap.dedent(f""" diff --git a/bot/exts/moderation/modlog.py b/bot/exts/moderation/modlog.py index fa3925f7cf..462f8533d0 100644 --- a/bot/exts/moderation/modlog.py +++ b/bot/exts/moderation/modlog.py @@ -2,7 +2,7 @@ import difflib import itertools import typing as t -from datetime import datetime +from datetime import datetime, timezone from itertools import zip_longest import discord @@ -58,7 +58,7 @@ async def upload_log( 'bot/deleted-messages', json={ 'actor': actor_id, - 'creation': datetime.utcnow().isoformat(), + 'creation': datetime.now(timezone.utc).isoformat(), 'deletedmessage_set': [ { 'id': message.id, @@ -404,8 +404,8 @@ async def on_member_join(self, member: discord.Member) -> None: if member.guild.id != GuildConstant.id: return - now = datetime.utcnow() - difference = abs(relativedelta(now, member.created_at.replace(tzinfo=None))) + now = datetime.now(timezone.utc) + difference = abs(relativedelta(now, member.created_at)) message = format_user(member) + "\n\n**Account age:** " + humanize_delta(difference) @@ -800,7 +800,10 @@ async def on_thread_update(self, before: Thread, after: Thread) -> None: icon, colour, f"Thread {action}", - f"Thread {after.mention} (`{after.id}`) from {after.parent.mention} (`{after.parent.id}`) was {action}" + ( + f"Thread {after.mention} ({after.name}, `{after.id}`) from {after.parent.mention} " + f"(`{after.parent.id}`) was {action}" + ) ) @Cog.listener() @@ -810,7 +813,10 @@ async def on_thread_delete(self, thread: Thread) -> None: Icons.hash_red, Colours.soft_red, "Thread deleted", - f"Thread {thread.mention} (`{thread.id}`) from {thread.parent.mention} (`{thread.parent.id}`) deleted" + ( + f"Thread {thread.mention} ({thread.name}, `{thread.id}`) from {thread.parent.mention} " + f"(`{thread.parent.id}`) deleted" + ) ) @Cog.listener() @@ -825,7 +831,10 @@ async def on_thread_join(self, thread: Thread) -> None: Icons.hash_green, Colours.soft_green, "Thread created", - f"Thread {thread.mention} (`{thread.id}`) from {thread.parent.mention} (`{thread.parent.id}`) created" + ( + f"Thread {thread.mention} ({thread.name}, `{thread.id}`) from {thread.parent.mention} " + f"(`{thread.parent.id}`) created" + ) ) @Cog.listener() diff --git a/bot/exts/moderation/modpings.py b/bot/exts/moderation/modpings.py index a7ccb81623..f67d8f6624 100644 --- a/bot/exts/moderation/modpings.py +++ b/bot/exts/moderation/modpings.py @@ -1,5 +1,6 @@ import datetime +import arrow from async_rediscache import RedisCache from dateutil.parser import isoparse from discord import Embed, Member @@ -57,7 +58,7 @@ async def reschedule_roles(self) -> None: if mod.id not in pings_off: await self.reapply_role(mod) else: - expiry = isoparse(pings_off[mod.id]).replace(tzinfo=None) + expiry = isoparse(pings_off[mod.id]) self._role_scheduler.schedule_at(expiry, mod.id, self.reapply_role(mod)) async def reapply_role(self, mod: Member) -> None: @@ -92,7 +93,7 @@ async def off_command(self, ctx: Context, duration: Expiry) -> None: The duration cannot be longer than 30 days. """ - delta = duration - datetime.datetime.utcnow() + delta = duration - arrow.utcnow() if delta > datetime.timedelta(days=30): await ctx.send(":x: Cannot remove the role for longer than 30 days.") return diff --git a/bot/exts/moderation/voice_gate.py b/bot/exts/moderation/voice_gate.py index 8fdc7c76bb..31799ec733 100644 --- a/bot/exts/moderation/voice_gate.py +++ b/bot/exts/moderation/voice_gate.py @@ -1,7 +1,8 @@ import asyncio from contextlib import suppress -from datetime import datetime, timedelta +from datetime import timedelta +import arrow import discord from async_rediscache import RedisCache from discord import Colour, Member, VoiceState @@ -166,8 +167,7 @@ async def voice_verify(self, ctx: Context, *_) -> None: checks = { "joined_at": ( - ctx.author.joined_at.replace(tzinfo=None) > datetime.utcnow() - - timedelta(days=GateConf.minimum_days_member) + ctx.author.joined_at > arrow.utcnow() - timedelta(days=GateConf.minimum_days_member) ), "total_messages": data["total_messages"] < GateConf.minimum_messages, "voice_banned": data["voice_banned"], diff --git a/bot/exts/moderation/watchchannels/_watchchannel.py b/bot/exts/moderation/watchchannels/_watchchannel.py index 8f97130cae..34d445912b 100644 --- a/bot/exts/moderation/watchchannels/_watchchannel.py +++ b/bot/exts/moderation/watchchannels/_watchchannel.py @@ -298,8 +298,7 @@ async def send_header(self, msg: Message) -> None: message_jump = f"in [#{msg.channel.name}]({msg.jump_url})" footer = f"Added {time_delta} by {actor} | Reason: {reason}" - embed = Embed(description=f"{msg.author.mention} {message_jump}") - embed.set_footer(text=textwrap.shorten(footer, width=256, placeholder="...")) + embed = Embed(description=f"{msg.author.mention} {message_jump}\n\n{footer}") await self.webhook_send(embed=embed, username=msg.author.display_name, avatar_url=msg.author.display_avatar.url) diff --git a/bot/exts/recruitment/talentpool/_review.py b/bot/exts/recruitment/talentpool/_review.py index dcf73c2cb2..d880c524cd 100644 --- a/bot/exts/recruitment/talentpool/_review.py +++ b/bot/exts/recruitment/talentpool/_review.py @@ -8,6 +8,7 @@ from datetime import datetime, timedelta from typing import List, Optional, Union +import arrow from dateutil.parser import isoparse from discord import Embed, Emoji, Member, Message, NoMoreItems, PartialMessage, TextChannel from discord.ext.commands import Context @@ -68,11 +69,11 @@ def schedule_review(self, user_id: int) -> None: log.trace(f"Scheduling review of user with ID {user_id}") user_data = self._pool.cache.get(user_id) - inserted_at = isoparse(user_data['inserted_at']).replace(tzinfo=None) + inserted_at = isoparse(user_data['inserted_at']) review_at = inserted_at + timedelta(days=MAX_DAYS_IN_POOL) # If it's over a day overdue, it's probably an old nomination and shouldn't be automatically reviewed. - if datetime.utcnow() - review_at < timedelta(days=1): + if arrow.utcnow() - review_at < timedelta(days=1): self._review_scheduler.schedule_at(review_at, user_id, self.post_review(user_id, update_database=True)) async def post_review(self, user_id: int, update_database: bool) -> None: @@ -347,7 +348,7 @@ async def _previous_nominations_review(self, member: Member) -> Optional[str]: nomination_times = f"{num_entries} times" if num_entries > 1 else "once" rejection_times = f"{len(history)} times" if len(history) > 1 else "once" - end_time = time_since(isoparse(history[0]['ended_at']).replace(tzinfo=None)) + end_time = time_since(isoparse(history[0]['ended_at'])) review = ( f"They were nominated **{nomination_times}** before" diff --git a/bot/exts/utils/internal.py b/bot/exts/utils/internal.py index f54d692b66..e7113c09c6 100644 --- a/bot/exts/utils/internal.py +++ b/bot/exts/utils/internal.py @@ -5,10 +5,10 @@ import textwrap import traceback from collections import Counter -from datetime import datetime from io import StringIO from typing import Any, Optional, Tuple +import arrow import discord from discord.ext.commands import Cog, Context, group, has_any_role, is_owner @@ -29,7 +29,7 @@ def __init__(self, bot: Bot): self.ln = 0 self.stdout = StringIO() - self.socket_since = datetime.utcnow() + self.socket_since = arrow.utcnow() self.socket_event_total = 0 self.socket_events = Counter() @@ -236,7 +236,7 @@ async def eval(self, ctx: Context, *, code: str) -> None: @has_any_role(Roles.admins, Roles.owners, Roles.core_developers) async def socketstats(self, ctx: Context) -> None: """Fetch information on the socket events received from Discord.""" - running_s = (datetime.utcnow() - self.socket_since).total_seconds() + running_s = (arrow.utcnow() - self.socket_since).total_seconds() per_s = self.socket_event_total / running_s diff --git a/bot/exts/utils/ping.py b/bot/exts/utils/ping.py index 43d371d875..9fb5b7b8fe 100644 --- a/bot/exts/utils/ping.py +++ b/bot/exts/utils/ping.py @@ -1,5 +1,4 @@ -from datetime import datetime - +import arrow from aiohttp import client_exceptions from discord import Embed from discord.ext import commands @@ -32,7 +31,7 @@ async def ping(self, ctx: commands.Context) -> None: """ # datetime.datetime objects do not have the "milliseconds" attribute. # It must be converted to seconds before converting to milliseconds. - bot_ping = (datetime.utcnow() - ctx.message.created_at.replace(tzinfo=None)).total_seconds() * 1000 + bot_ping = (arrow.utcnow() - ctx.message.created_at).total_seconds() * 1000 if bot_ping <= 0: bot_ping = "Your clock is out of sync, could not calculate ping." else: diff --git a/bot/exts/utils/reminders.py b/bot/exts/utils/reminders.py index c12932f854..86e4505fad 100644 --- a/bot/exts/utils/reminders.py +++ b/bot/exts/utils/reminders.py @@ -1,7 +1,7 @@ import random import textwrap import typing as t -from datetime import datetime +from datetime import datetime, timezone from operator import itemgetter import discord @@ -52,14 +52,14 @@ async def reschedule_reminders(self) -> None: params={'active': 'true'} ) - now = datetime.utcnow() + now = datetime.now(timezone.utc) for reminder in response: is_valid, *_ = self.ensure_valid_reminder(reminder) if not is_valid: continue - remind_at = isoparse(reminder['expiration']).replace(tzinfo=None) + remind_at = isoparse(reminder['expiration']) # If the reminder is already overdue ... if remind_at < now: @@ -144,7 +144,7 @@ async def get_mentionables(self, mention_ids: t.List[int]) -> t.Iterator[Mention def schedule_reminder(self, reminder: dict) -> None: """A coroutine which sends the reminder once the time is reached, and cancels the running task.""" - reminder_datetime = isoparse(reminder['expiration']).replace(tzinfo=None) + reminder_datetime = isoparse(reminder['expiration']) self.scheduler.schedule_at(reminder_datetime, reminder["id"], self.send_reminder(reminder)) async def _edit_reminder(self, reminder_id: int, payload: dict) -> dict: @@ -333,7 +333,7 @@ async def list_reminders(self, ctx: Context) -> None: for content, remind_at, id_, mentions in reminders: # Parse and humanize the time, make it pretty :D - remind_datetime = isoparse(remind_at).replace(tzinfo=None) + remind_datetime = isoparse(remind_at) time = discord_timestamp(remind_datetime, TimestampFormats.RELATIVE) mentions = ", ".join([ diff --git a/bot/monkey_patches.py b/bot/monkey_patches.py index e56a19da20..23482f7c37 100644 --- a/bot/monkey_patches.py +++ b/bot/monkey_patches.py @@ -1,5 +1,6 @@ -from datetime import datetime, timedelta +from datetime import timedelta +import arrow from discord import Forbidden, http from discord.ext import commands @@ -38,13 +39,13 @@ def patch_typing() -> None: async def honeybadger_type(self, channel_id: int) -> None: # noqa: ANN001 nonlocal last_403 - if last_403 and (datetime.utcnow() - last_403) < timedelta(minutes=5): + if last_403 and (arrow.utcnow() - last_403) < timedelta(minutes=5): log.warning("Not sending typing event, we got a 403 less than 5 minutes ago.") return try: await original(self, channel_id) except Forbidden: - last_403 = datetime.utcnow() + last_403 = arrow.utcnow() log.warning("Got a 403 from typing event!") pass diff --git a/bot/utils/channel.py b/bot/utils/channel.py index b9e2348578..954a10e562 100644 --- a/bot/utils/channel.py +++ b/bot/utils/channel.py @@ -1,3 +1,5 @@ +from typing import Union + import discord import bot @@ -16,8 +18,11 @@ def is_help_channel(channel: discord.TextChannel) -> bool: return any(is_in_category(channel, category) for category in categories) -def is_mod_channel(channel: discord.TextChannel) -> bool: - """True if `channel` is considered a mod channel.""" +def is_mod_channel(channel: Union[discord.TextChannel, discord.Thread]) -> bool: + """True if channel, or channel.parent for threads, is considered a mod channel.""" + if isinstance(channel, discord.Thread): + channel = channel.parent + if channel.id in constants.MODERATION_CHANNELS: log.trace(f"Channel #{channel} is a configured mod channel") return True diff --git a/bot/utils/checks.py b/bot/utils/checks.py index e7f2cfbdac..1882856843 100644 --- a/bot/utils/checks.py +++ b/bot/utils/checks.py @@ -1,4 +1,3 @@ -import datetime from typing import Callable, Container, Iterable, Optional, Union from discord.ext.commands import ( @@ -137,7 +136,7 @@ async def predicate(cog: Cog, ctx: Context) -> None: return # cooldown logic, taken from discord.py internals - current = ctx.message.created_at.replace(tzinfo=datetime.timezone.utc).timestamp() + current = ctx.message.created_at.timestamp() bucket = buckets.get_bucket(ctx.message) retry_after = bucket.update_rate_limit(current) if retry_after: diff --git a/bot/utils/time.py b/bot/utils/time.py index 8cf7d623be..eaa9b72e9b 100644 --- a/bot/utils/time.py +++ b/bot/utils/time.py @@ -3,6 +3,7 @@ from enum import Enum from typing import Optional, Union +import arrow import dateutil.parser from dateutil.relativedelta import relativedelta @@ -67,9 +68,9 @@ def discord_timestamp(timestamp: ValidTimestamp, format: TimestampFormats = Time # Convert each possible timestamp class to an integer. if isinstance(timestamp, datetime.datetime): - timestamp = (timestamp.replace(tzinfo=None) - datetime.datetime.utcfromtimestamp(0)).total_seconds() + timestamp = (timestamp - arrow.get(0)).total_seconds() elif isinstance(timestamp, datetime.date): - timestamp = (timestamp - datetime.date.fromtimestamp(0)).total_seconds() + timestamp = (timestamp - arrow.get(0)).total_seconds() elif isinstance(timestamp, datetime.timedelta): timestamp = timestamp.total_seconds() elif isinstance(timestamp, relativedelta): @@ -124,7 +125,7 @@ def humanize_delta(delta: relativedelta, precision: str = "seconds", max_units: def get_time_delta(time_string: str) -> str: """Returns the time in human-readable time delta format.""" - date_time = dateutil.parser.isoparse(time_string).replace(tzinfo=None) + date_time = dateutil.parser.isoparse(time_string) time_delta = time_since(date_time) return time_delta @@ -157,7 +158,7 @@ def parse_duration_string(duration: str) -> Optional[relativedelta]: def relativedelta_to_timedelta(delta: relativedelta) -> datetime.timedelta: """Converts a relativedelta object to a timedelta object.""" - utcnow = datetime.datetime.utcnow() + utcnow = arrow.utcnow() return utcnow + delta - utcnow @@ -196,8 +197,8 @@ def format_infraction_with_duration( date_to_formatted = format_infraction(date_to) - date_from = date_from or datetime.datetime.utcnow() - date_to = dateutil.parser.isoparse(date_to).replace(tzinfo=None, microsecond=0) + date_from = date_from or datetime.datetime.now(datetime.timezone.utc) + date_to = dateutil.parser.isoparse(date_to).replace(microsecond=0) delta = relativedelta(date_to, date_from) if absolute: @@ -215,15 +216,15 @@ def until_expiration( """ Get the remaining time until infraction's expiration, in a discord timestamp. - Returns a human-readable version of the remaining duration between datetime.utcnow() and an expiry. + Returns a human-readable version of the remaining duration between arrow.utcnow() and an expiry. Similar to time_since, except that this function doesn't error on a null input and return null if the expiry is in the paste """ if not expiry: return None - now = datetime.datetime.utcnow() - since = dateutil.parser.isoparse(expiry).replace(tzinfo=None, microsecond=0) + now = arrow.utcnow() + since = dateutil.parser.isoparse(expiry).replace(microsecond=0) if since < now: return None diff --git a/config-default.yml b/config-default.yml index b61d9c99cc..4a85ccc56b 100644 --- a/config-default.yml +++ b/config-default.yml @@ -207,6 +207,7 @@ guild: incidents_archive: 720668923636351037 mod_alerts: 473092532147060736 mods: &MODS 305126844661760000 + mod_meta: 775412552795947058 nominations: 822920136150745168 nomination_voting: 822853512709931008 organisation: &ORGANISATION 551789653284356126 diff --git a/poetry.lock b/poetry.lock index 16c599bd18..d91941d45e 100644 --- a/poetry.lock +++ b/poetry.lock @@ -722,7 +722,7 @@ python-versions = ">=2.7, !=3.0.*, !=3.1.*, !=3.2.*, !=3.3.*" [[package]] name = "pycares" -version = "4.0.0" +version = "4.1.2" description = "Python interface for c-ares" category = "main" optional = false @@ -902,7 +902,7 @@ python-versions = ">=2.7, !=3.0.*, !=3.1.*, !=3.2.*, !=3.3.*, !=3.4.*, !=3.5.*" [[package]] name = "rapidfuzz" -version = "1.7.1" +version = "1.8.0" description = "rapid fuzzy string matching" category = "main" optional = false @@ -1114,7 +1114,7 @@ multidict = ">=4.0" [metadata] lock-version = "1.1" python-versions = "3.9.*" -content-hash = "e37923739c35ef349d57e324579acfe304cc7e6fc20ddc54205fc89f171ae94f" +content-hash = "da321f13297501e62dd1eb362eccb586ea1a9c21ddb395e11a91b93a2f92e9d4" [metadata.files] aio-pika = [ @@ -1471,6 +1471,8 @@ lxml = [ {file = "lxml-4.6.3-cp27-cp27m-win_amd64.whl", hash = "sha256:8157dadbb09a34a6bd95a50690595e1fa0af1a99445e2744110e3dca7831c4ee"}, {file = "lxml-4.6.3-cp27-cp27mu-manylinux1_i686.whl", hash = "sha256:7728e05c35412ba36d3e9795ae8995e3c86958179c9770e65558ec3fdfd3724f"}, {file = "lxml-4.6.3-cp27-cp27mu-manylinux1_x86_64.whl", hash = "sha256:4bff24dfeea62f2e56f5bab929b4428ae6caba2d1eea0c2d6eb618e30a71e6d4"}, + {file = "lxml-4.6.3-cp310-cp310-manylinux_2_12_i686.manylinux2010_i686.manylinux_2_24_i686.whl", hash = "sha256:64812391546a18896adaa86c77c59a4998f33c24788cadc35789e55b727a37f4"}, + {file = "lxml-4.6.3-cp310-cp310-manylinux_2_17_x86_64.manylinux2014_x86_64.manylinux_2_24_x86_64.whl", hash = "sha256:c1a40c06fd5ba37ad39caa0b3144eb3772e813b5fb5b084198a985431c2f1e8d"}, {file = "lxml-4.6.3-cp35-cp35m-manylinux1_i686.whl", hash = "sha256:74f7d8d439b18fa4c385f3f5dfd11144bb87c1da034a466c5b5577d23a1d9b51"}, {file = "lxml-4.6.3-cp35-cp35m-manylinux1_x86_64.whl", hash = "sha256:f90ba11136bfdd25cae3951af8da2e95121c9b9b93727b1b896e3fa105b2f586"}, {file = "lxml-4.6.3-cp35-cp35m-manylinux2010_i686.whl", hash = "sha256:4c61b3a0db43a1607d6264166b230438f85bfed02e8cff20c22e564d0faff354"}, @@ -1674,39 +1676,37 @@ py = [ {file = "py-1.10.0.tar.gz", hash = "sha256:21b81bda15b66ef5e1a777a21c4dcd9c20ad3efd0b3f817e7a809035269e1bd3"}, ] pycares = [ - {file = "pycares-4.0.0-cp36-cp36m-macosx_10_9_x86_64.whl", hash = "sha256:db5a533111a3cfd481e7e4fb2bf8bef69f4fa100339803e0504dd5aecafb96a5"}, - {file = "pycares-4.0.0-cp36-cp36m-manylinux1_i686.whl", hash = "sha256:fdff88393c25016f417770d82678423fc7a56995abb2df3d2a1e55725db6977d"}, - {file = "pycares-4.0.0-cp36-cp36m-manylinux1_x86_64.whl", hash = "sha256:0aa97f900a7ffb259be77d640006585e2a907b0cd4edeee0e85cf16605995d5a"}, - {file = "pycares-4.0.0-cp36-cp36m-manylinux2010_i686.whl", hash = "sha256:a34b0e3e693dceb60b8a1169668d606c75cb100ceba0a2df53c234a0eb067fbc"}, - {file = "pycares-4.0.0-cp36-cp36m-manylinux2010_x86_64.whl", hash = "sha256:7661d6bbd51a337e7373cb356efa8be9b4655fda484e068f9455e939aec8d54e"}, - {file = "pycares-4.0.0-cp36-cp36m-manylinux2014_aarch64.whl", hash = "sha256:57315b8eb8fdbc56b3ad4932bc4b17132bb7c7fd2bd590f7fb84b6b522098aa9"}, - {file = "pycares-4.0.0-cp36-cp36m-win32.whl", hash = "sha256:dca9dc58845a9d083f302732a3130c68ded845ad5d463865d464e53c75a3dd45"}, - {file = "pycares-4.0.0-cp36-cp36m-win_amd64.whl", hash = "sha256:c95c964d5dd307e104b44b193095c67bb6b10c9eda1ffe7d44ab7a9e84c476d9"}, - {file = "pycares-4.0.0-cp37-cp37m-macosx_10_9_x86_64.whl", hash = "sha256:26e67e4f81c80a5955dcf6193f3d9bee3c491fc0056299b383b84d792252fba4"}, - {file = "pycares-4.0.0-cp37-cp37m-manylinux1_i686.whl", hash = "sha256:cd3011ffd5e1ad55880f7256791dbab9c43ebeda260474a968f19cd0319e1aef"}, - {file = "pycares-4.0.0-cp37-cp37m-manylinux1_x86_64.whl", hash = "sha256:1b959dd5921d207d759d421eece1b60416df33a7f862465739d5f2c363c2f523"}, - {file = "pycares-4.0.0-cp37-cp37m-manylinux2010_i686.whl", hash = "sha256:6f258c1b74c048a9501a25f732f11b401564005e5e3c18f1ca6cad0c3dc0fb19"}, - {file = "pycares-4.0.0-cp37-cp37m-manylinux2010_x86_64.whl", hash = "sha256:b17ef48729786e62b574c6431f675f4cb02b27691b49e7428a605a50cd59c072"}, - {file = "pycares-4.0.0-cp37-cp37m-manylinux2014_aarch64.whl", hash = "sha256:82b3259cb590ddd107a6d2dc52da2a2e9a986bf242e893d58c786af2f8191047"}, - {file = "pycares-4.0.0-cp37-cp37m-win32.whl", hash = "sha256:4876fc790ae32832ae270c4a010a1a77e12ddf8d8e6ad70ad0b0a9d506c985f7"}, - {file = "pycares-4.0.0-cp37-cp37m-win_amd64.whl", hash = "sha256:f60c04c5561b1ddf85ca4e626943cc09d7fb684e1adb22abb632095415a40fd7"}, - {file = "pycares-4.0.0-cp38-cp38-macosx_10_9_x86_64.whl", hash = "sha256:615406013cdcd1b445e5d1a551d276c6200b3abe77e534f8a7f7e1551208d14f"}, - {file = "pycares-4.0.0-cp38-cp38-manylinux1_i686.whl", hash = "sha256:6580aef5d1b29a88c3d72fe73c691eacfd454f86e74d3fdd18f4bad8e8def98b"}, - {file = "pycares-4.0.0-cp38-cp38-manylinux1_x86_64.whl", hash = "sha256:8ebb3ba0485f66cae8eed7ce3e9ed6f2c0bfd5e7319d5d0fbbb511064f17e1d4"}, - {file = "pycares-4.0.0-cp38-cp38-manylinux2010_i686.whl", hash = "sha256:c5362b7690ca481440f6b98395ac6df06aa50518ccb183c560464d1e5e2ab5d4"}, - {file = "pycares-4.0.0-cp38-cp38-manylinux2010_x86_64.whl", hash = "sha256:eb60be66accc9a9ea1018b591a1f5800cba83491d07e9acc8c56bc6e6607ab54"}, - {file = "pycares-4.0.0-cp38-cp38-manylinux2014_aarch64.whl", hash = "sha256:44896d6e191a6b5a914dbe3aa7c748481bf6ad19a9df33c1e76f8f2dc33fc8f0"}, - {file = "pycares-4.0.0-cp38-cp38-win32.whl", hash = "sha256:09b28fc7bc2cc05f7f69bf1636ddf46086e0a1837b62961e2092fcb40477320d"}, - {file = "pycares-4.0.0-cp38-cp38-win_amd64.whl", hash = "sha256:d4a5081e232c1d181883dcac4675807f3a6cf33911c4173fbea00c0523687ed4"}, - {file = "pycares-4.0.0-cp39-cp39-macosx_10_9_x86_64.whl", hash = "sha256:103353577a6266a53e71bfee4cf83825f1401fefa60f0fb8bdec35f13be6a5f2"}, - {file = "pycares-4.0.0-cp39-cp39-manylinux1_i686.whl", hash = "sha256:ad6caf580ee69806fc6534be93ddbb6e99bf94296d79ab351c37b2992b17abfd"}, - {file = "pycares-4.0.0-cp39-cp39-manylinux1_x86_64.whl", hash = "sha256:3d5e50c95849f6905d2a9dbf02ed03f82580173e3c5604a39e2ad054185631f1"}, - {file = "pycares-4.0.0-cp39-cp39-manylinux2010_i686.whl", hash = "sha256:53bc4f181b19576499b02cea4b45391e8dcbe30abd4cd01492f66bfc15615a13"}, - {file = "pycares-4.0.0-cp39-cp39-manylinux2010_x86_64.whl", hash = "sha256:d52f9c725d2a826d5ffa37681eb07ffb996bfe21788590ef257664a3898fc0b5"}, - {file = "pycares-4.0.0-cp39-cp39-manylinux2014_aarch64.whl", hash = "sha256:3c7fb8d34ee11971c39acfaf98d0fac66725385ccef3bfe1b174c92b210e1aa4"}, - {file = "pycares-4.0.0-cp39-cp39-win32.whl", hash = "sha256:e9773e07684a55f54657df05237267611a77b294ec3bacb5f851c4ffca38a465"}, - {file = "pycares-4.0.0-cp39-cp39-win_amd64.whl", hash = "sha256:38e54037f36c149146ff15f17a4a963fbdd0f9871d4a21cd94ff9f368140f57e"}, - {file = "pycares-4.0.0.tar.gz", hash = "sha256:d0154fc5753b088758fbec9bc137e1b24bb84fc0c6a09725c8bac25a342311cd"}, + {file = "pycares-4.1.2-cp310-cp310-macosx_10_9_x86_64.whl", hash = "sha256:71b99b9e041ae3356b859822c511f286f84c8889ec9ed1fbf6ac30fb4da13e4c"}, + {file = "pycares-4.1.2-cp310-cp310-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:c000942f5fc64e6e046aa61aa53b629b576ba11607d108909727c3c8f211a157"}, + {file = "pycares-4.1.2-cp310-cp310-manylinux_2_5_i686.manylinux1_i686.manylinux_2_12_i686.manylinux2010_i686.whl", hash = "sha256:b0e50ddc78252f2e2b6b5f2c73e5b2449dfb6bea7a5a0e21dfd1e2bcc9e17382"}, + {file = "pycares-4.1.2-cp310-cp310-manylinux_2_5_x86_64.manylinux1_x86_64.manylinux_2_12_x86_64.manylinux2010_x86_64.whl", hash = "sha256:6831e963a910b0a8cbdd2750ffcdf5f2bb0edb3f53ca69ff18484de2cc3807c4"}, + {file = "pycares-4.1.2-cp310-cp310-win32.whl", hash = "sha256:ad7b28e1b6bc68edd3d678373fa3af84e39d287090434f25055d21b4716b2fc6"}, + {file = "pycares-4.1.2-cp310-cp310-win_amd64.whl", hash = "sha256:27a6f09dbfb69bb79609724c0f90dfaa7c215876a7cd9f12d585574d1f922112"}, + {file = "pycares-4.1.2-cp36-cp36m-macosx_10_9_x86_64.whl", hash = "sha256:e5a060f5fa90ae245aa99a4a8ad13ec39c2340400de037c7e8d27b081e1a3c64"}, + {file = "pycares-4.1.2-cp36-cp36m-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:056330275dea42b7199494047a745e1d9785d39fb8c4cd469dca043532240b80"}, + {file = "pycares-4.1.2-cp36-cp36m-manylinux_2_5_i686.manylinux1_i686.manylinux_2_12_i686.manylinux2010_i686.whl", hash = "sha256:0aa897543a786daba74ec5e19638bd38b2b432d179a0e248eac1e62de5756207"}, + {file = "pycares-4.1.2-cp36-cp36m-manylinux_2_5_x86_64.manylinux1_x86_64.manylinux_2_12_x86_64.manylinux2010_x86_64.whl", hash = "sha256:cbceaa9b2c416aa931627466d3240aecfc905c292c842252e3d77b8630072505"}, + {file = "pycares-4.1.2-cp36-cp36m-win32.whl", hash = "sha256:112e1385c451069112d6b5ea1f9c378544f3c6b89882ff964e9a64be3336d7e4"}, + {file = "pycares-4.1.2-cp36-cp36m-win_amd64.whl", hash = "sha256:c6680f7fdc0f1163e8f6c2a11d11b9a0b524a61000d2a71f9ccd410f154fb171"}, + {file = "pycares-4.1.2-cp37-cp37m-macosx_10_9_x86_64.whl", hash = "sha256:58a41a2baabcd95266db776c510d349d417919407f03510fc87ac7488730d913"}, + {file = "pycares-4.1.2-cp37-cp37m-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:a810d01c9a426ee8b0f36969c2aef5fb966712be9d7e466920beb328cd9cefa3"}, + {file = "pycares-4.1.2-cp37-cp37m-manylinux_2_5_i686.manylinux1_i686.manylinux_2_12_i686.manylinux2010_i686.whl", hash = "sha256:b266cec81dcea2c3efbbd3dda00af8d7eb0693ae9e47e8706518334b21f27d4a"}, + {file = "pycares-4.1.2-cp37-cp37m-manylinux_2_5_x86_64.manylinux1_x86_64.manylinux_2_12_x86_64.manylinux2010_x86_64.whl", hash = "sha256:8319afe4838e09df267c421ca93da408f770b945ec6217dda72f1f6a493e37e4"}, + {file = "pycares-4.1.2-cp37-cp37m-win32.whl", hash = "sha256:4d5da840aa0d9b15fa51107f09270c563a348cb77b14ae9653d0bbdbe326fcc2"}, + {file = "pycares-4.1.2-cp37-cp37m-win_amd64.whl", hash = "sha256:5632f21d92cc0225ba5ff906e4e5dec415ef0b3df322c461d138190681cd5d89"}, + {file = "pycares-4.1.2-cp38-cp38-macosx_10_9_x86_64.whl", hash = "sha256:8fd1ff17a26bb004f0f6bb902ba7dddd810059096ae0cc3b45e4f5be46315d19"}, + {file = "pycares-4.1.2-cp38-cp38-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:439799be4b7576e907139a7f9b3c8a01b90d3e38af4af9cd1fc6c1ee9a42b9e6"}, + {file = "pycares-4.1.2-cp38-cp38-manylinux_2_5_i686.manylinux1_i686.manylinux_2_12_i686.manylinux2010_i686.whl", hash = "sha256:40079ed58efa91747c50aac4edf8ecc7e570132ab57dc0a4030eb0d016a6cab8"}, + {file = "pycares-4.1.2-cp38-cp38-manylinux_2_5_x86_64.manylinux1_x86_64.manylinux_2_12_x86_64.manylinux2010_x86_64.whl", hash = "sha256:4e190471a015f8225fa38069617192e06122771cce2b169ac7a60bfdbd3d4ab2"}, + {file = "pycares-4.1.2-cp38-cp38-win32.whl", hash = "sha256:2b837315ed08c7df009b67725fe1f50489e99de9089f58ec1b243dc612f172aa"}, + {file = "pycares-4.1.2-cp38-cp38-win_amd64.whl", hash = "sha256:c7eba3c8354b730a54d23237d0b6445a2f68570fa68d0848887da23a3f3b71f3"}, + {file = "pycares-4.1.2-cp39-cp39-macosx_10_9_x86_64.whl", hash = "sha256:2f5f84fe9f83eab9cd68544b165b74ba6e3412d029cc9ab20098d9c332869fc5"}, + {file = "pycares-4.1.2-cp39-cp39-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:569eef8597b5e02b1bc4644b9f272160304d8c9985357d7ecfcd054da97c0771"}, + {file = "pycares-4.1.2-cp39-cp39-manylinux_2_5_i686.manylinux1_i686.manylinux_2_12_i686.manylinux2010_i686.whl", hash = "sha256:e1489aa25d14dbf7176110ead937c01176ed5a0ebefd3b092bbd6b202241814c"}, + {file = "pycares-4.1.2-cp39-cp39-manylinux_2_5_x86_64.manylinux1_x86_64.manylinux_2_12_x86_64.manylinux2010_x86_64.whl", hash = "sha256:dc942692fca0e27081b7bb414bb971d34609c80df5e953f6d0c62ecc8019acd9"}, + {file = "pycares-4.1.2-cp39-cp39-win32.whl", hash = "sha256:ed71dc4290d9c3353945965604ef1f6a4de631733e9819a7ebc747220b27e641"}, + {file = "pycares-4.1.2-cp39-cp39-win_amd64.whl", hash = "sha256:ec00f3594ee775665167b1a1630edceefb1b1283af9ac57480dba2fb6fd6c360"}, + {file = "pycares-4.1.2.tar.gz", hash = "sha256:03490be0e7b51a0c8073f877bec347eff31003f64f57d9518d419d9369452837"}, ] pycodestyle = [ {file = "pycodestyle-2.7.0-py2.py3-none-any.whl", hash = "sha256:514f76d918fcc0b55c6680472f0a37970994e07bbb80725808c17089be302068"}, @@ -1792,57 +1792,64 @@ pyyaml = [ {file = "PyYAML-5.4.1.tar.gz", hash = "sha256:607774cbba28732bfa802b54baa7484215f530991055bb562efbed5b2f20a45e"}, ] rapidfuzz = [ - {file = "rapidfuzz-1.7.1-cp27-cp27m-macosx_10_9_x86_64.whl", hash = "sha256:1ca9888e867aed2bb8d51571270e5f8393d718bb189fe1a7c0b047b8fd72bad3"}, - {file = "rapidfuzz-1.7.1-cp27-cp27m-manylinux2010_i686.whl", hash = "sha256:f336cd32a2a72eb9d7694618c9065ef3a2af330ab7e54bc0ec69d3b2eb08080e"}, - {file = "rapidfuzz-1.7.1-cp27-cp27m-manylinux2010_x86_64.whl", hash = "sha256:76124767ac3d3213a1aad989f80b156b225defef8addc825a5b631d3164c3213"}, - {file = "rapidfuzz-1.7.1-cp27-cp27m-win32.whl", hash = "sha256:c1090deb95e5369fff47c223c0ed3472644efc56817e288ebeaaa34822a1235c"}, - {file = "rapidfuzz-1.7.1-cp27-cp27m-win_amd64.whl", hash = "sha256:83f94c89e8f16679e0def3c7afa6c9ba477d837fd01250d6a1e3fea12267ce24"}, - {file = "rapidfuzz-1.7.1-cp27-cp27mu-manylinux2010_i686.whl", hash = "sha256:cdd5962bd009b1457e280b5619d312cd6305b5b8afeff6c27869f98fee839c36"}, - {file = "rapidfuzz-1.7.1-cp27-cp27mu-manylinux2010_x86_64.whl", hash = "sha256:2940960e212b66f00fc58f9b4a13e6f80221141dcbaee9c51f97e0a1f30ff1ab"}, - {file = "rapidfuzz-1.7.1-cp35-cp35m-macosx_10_9_x86_64.whl", hash = "sha256:5ed4304a91043d27b92fe9af5eb87d1586548da6d03cbda5bbc98b00fee227cb"}, - {file = "rapidfuzz-1.7.1-cp35-cp35m-manylinux2010_i686.whl", hash = "sha256:be18495bd84bf2bd3e888270a3cd4dea868ff4b9b8ec6e540f0e195cda554140"}, - {file = "rapidfuzz-1.7.1-cp35-cp35m-manylinux2010_x86_64.whl", hash = "sha256:d5779e6f548b6f3edfbdfbeeda4158286684dcb2bae3515ce68c510ea48e1b4d"}, - {file = "rapidfuzz-1.7.1-cp35-cp35m-win32.whl", hash = "sha256:80d780c4f6da08eb6801489df54fdbdc5ef2b882bd73f9585ef6e0cf09f1690d"}, - {file = "rapidfuzz-1.7.1-cp35-cp35m-win_amd64.whl", hash = "sha256:3b205c63b8606c2b8595ba8403a8c3ebd39de9f7f44631a2f651f3efe106ae9a"}, - {file = "rapidfuzz-1.7.1-cp36-cp36m-manylinux_2_12_i686.manylinux2010_i686.whl", hash = "sha256:8f96588a8a7d021debb4c60d82b15a80995daa99159bbeddd8a37f68f75ee06c"}, - {file = "rapidfuzz-1.7.1-cp36-cp36m-manylinux_2_12_x86_64.manylinux2010_x86_64.whl", hash = "sha256:b8139116a937691dde17f27aafe774647808339305f4683b3a6d9bae6518aa2a"}, - {file = "rapidfuzz-1.7.1-cp36-cp36m-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:ba574801c8410cc1f2d690ef65f898f6a660bba22ec8213e0f34dd0f0590bc71"}, - {file = "rapidfuzz-1.7.1-cp36-cp36m-manylinux_2_17_ppc64le.manylinux2014_ppc64le.whl", hash = "sha256:2d5194e3cb638af0cc7c02daa61cef07e332fd3f790ec113006302131be9afa6"}, - {file = "rapidfuzz-1.7.1-cp36-cp36m-manylinux_2_17_s390x.manylinux2014_s390x.whl", hash = "sha256:2dd9d8eaae888b966422cbcba954390a63b4933d8c513ea0056fd6e42d421d08"}, - {file = "rapidfuzz-1.7.1-cp37-cp37m-macosx_10_9_x86_64.whl", hash = "sha256:3725c61b9cf57b6b7a765b92046e7d9e5ccce845835b523954b410a70dc32692"}, - {file = "rapidfuzz-1.7.1-cp37-cp37m-manylinux_2_12_i686.manylinux2010_i686.whl", hash = "sha256:e417961e5ca450d6c7448accc5a7e4e9ab0dd3c63729f76215d5e672785920fc"}, - {file = "rapidfuzz-1.7.1-cp37-cp37m-manylinux_2_12_x86_64.manylinux2010_x86_64.whl", hash = "sha256:26d756284c8c6274b5d558e759415bfb4016fcdf168159b34702c346875d8cc0"}, - {file = "rapidfuzz-1.7.1-cp37-cp37m-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:4887766f0dcc5df43fe4315df4b3c642829e06dc60d5bcb5e682fb76657e8ed1"}, - {file = "rapidfuzz-1.7.1-cp37-cp37m-manylinux_2_17_ppc64le.manylinux2014_ppc64le.whl", hash = "sha256:ec0a29671d59998b97998b757ab1c636dd3b7721eda41746ae897abe709681a9"}, - {file = "rapidfuzz-1.7.1-cp37-cp37m-manylinux_2_17_s390x.manylinux2014_s390x.whl", hash = "sha256:dff55750fecd8c0f07bc199e48427c86873be2d0e6a3a80df98972847287f5d3"}, - {file = "rapidfuzz-1.7.1-cp37-cp37m-win32.whl", hash = "sha256:e113f741bb18b0ddd14d714d80ce9c6d5322724f3023b920708e82491e7aef28"}, - {file = "rapidfuzz-1.7.1-cp37-cp37m-win_amd64.whl", hash = "sha256:ef20654be0aed240ee44c98ce02639c37422adc3e144d28c4b6d3da043d9fd20"}, - {file = "rapidfuzz-1.7.1-cp38-cp38-macosx_10_9_universal2.whl", hash = "sha256:9e27eb57745a4d2a390b056f6f490b712c2f54250c5d2c794dd76062065a8aef"}, - {file = "rapidfuzz-1.7.1-cp38-cp38-macosx_10_9_x86_64.whl", hash = "sha256:de2b0ebb67ee0b78973141dba91f574a325a3425664dbdbad37fd7aca7b28cab"}, - {file = "rapidfuzz-1.7.1-cp38-cp38-macosx_11_0_arm64.whl", hash = "sha256:88c65d91dcd3c0595112d16555536c60ac5bcab1a43e517e155a242a39525057"}, - {file = "rapidfuzz-1.7.1-cp38-cp38-manylinux_2_12_i686.manylinux2010_i686.whl", hash = "sha256:afd525a9b593cc1099f0210e116bcb4d9fc5585728d7bd929e6a4133dacd2d59"}, - {file = "rapidfuzz-1.7.1-cp38-cp38-manylinux_2_12_x86_64.manylinux2010_x86_64.whl", hash = "sha256:e6d77f104a8d67c01ae4248ced6f0d4ef05e63931afdf49c20decf962318877f"}, - {file = "rapidfuzz-1.7.1-cp38-cp38-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:7db9d6ad0ab80e9e0f66f157b8e31b1d04ce5fa767b936ca1c212b98092572b1"}, - {file = "rapidfuzz-1.7.1-cp38-cp38-manylinux_2_17_ppc64le.manylinux2014_ppc64le.whl", hash = "sha256:0195c57f4beea0e7691594f59faf62a4be3c818c1955a8b9b712f37adc479d2d"}, - {file = "rapidfuzz-1.7.1-cp38-cp38-manylinux_2_17_s390x.manylinux2014_s390x.whl", hash = "sha256:9ffca8c8b74d12cd36c051e9befa7c4eb2d34624ce71f22dbfc659af15bf4a1e"}, - {file = "rapidfuzz-1.7.1-cp38-cp38-win32.whl", hash = "sha256:234cb75aa1e21cabad6a8c0718f84e2bfafdd4756b5232d5739545f97e343e59"}, - {file = "rapidfuzz-1.7.1-cp38-cp38-win_amd64.whl", hash = "sha256:058977e93ab736071fcd8828fc6289ec026e9ca4a19f2a0967f9260e63910da8"}, - {file = "rapidfuzz-1.7.1-cp39-cp39-macosx_10_9_universal2.whl", hash = "sha256:9d02bb0724326826b1884cc9b9d9fd97ac352c18213f45e465a39ef069a33115"}, - {file = "rapidfuzz-1.7.1-cp39-cp39-macosx_10_9_x86_64.whl", hash = "sha256:212d6fa5b824aaa49a921c81d7cdc1d079b3545a30563ae14dc88e17918e76bf"}, - {file = "rapidfuzz-1.7.1-cp39-cp39-macosx_11_0_arm64.whl", hash = "sha256:a0cd8117deba10e2a1d6dccb6ff44a4c737adda3048dc45860c5f53cf64db14f"}, - {file = "rapidfuzz-1.7.1-cp39-cp39-manylinux_2_12_i686.manylinux2010_i686.whl", hash = "sha256:61faa47b6b5d5a0cbe9fa6369df44d3f9435c4cccdb4d38d9de437f18b69dc4d"}, - {file = "rapidfuzz-1.7.1-cp39-cp39-manylinux_2_12_x86_64.manylinux2010_x86_64.whl", hash = "sha256:1daa756be52a7ee60d553ba667cda3a188ee811c92a9c21df43a4cdadb1eb8ca"}, - {file = "rapidfuzz-1.7.1-cp39-cp39-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:c98ac10782dadf507e922963c8b8456a79151b4f10dbb08cfc86c1572db366dc"}, - {file = "rapidfuzz-1.7.1-cp39-cp39-manylinux_2_17_ppc64le.manylinux2014_ppc64le.whl", hash = "sha256:358d80061ca107df6c3e1f67fa7af0f94a62827cb9c44ac09a16e78b38f7c3d5"}, - {file = "rapidfuzz-1.7.1-cp39-cp39-manylinux_2_17_s390x.manylinux2014_s390x.whl", hash = "sha256:a5f90fc31d54fcd74a97d175892555786a8214a3cff43077463915b8a45a191d"}, - {file = "rapidfuzz-1.7.1-cp39-cp39-win32.whl", hash = "sha256:55dffdcdccea6f077a4f09164039411f01f621633be5883c58ceaf94f007a688"}, - {file = "rapidfuzz-1.7.1-cp39-cp39-win_amd64.whl", hash = "sha256:d712a7f680d2074b587650f81865ca838c04fcc6b77c9d2d742de0853aaa24ce"}, - {file = "rapidfuzz-1.7.1-pp27-pypy_73-macosx_10_9_x86_64.whl", hash = "sha256:729d73a8db5a2b444a19d4aa2be009b2e628d207d7c754f6d280e3c6a59b94cb"}, - {file = "rapidfuzz-1.7.1-pp27-pypy_73-manylinux2010_x86_64.whl", hash = "sha256:a1cabbc645395b6175cad79164d9ec621866a004b476e44cac534020b9f6bddb"}, - {file = "rapidfuzz-1.7.1-pp37-pypy37_pp73-manylinux_2_12_i686.manylinux2010_i686.whl", hash = "sha256:ae697294f456f7f76e5bd30db5a65e8b855e7e09f9a65e144efa1e2c5009553c"}, - {file = "rapidfuzz-1.7.1-pp37-pypy37_pp73-manylinux_2_12_x86_64.manylinux2010_x86_64.whl", hash = "sha256:e8ae51c1cf1f034f15216fec2e1eef658c8b3a9cbdcc1a053cc7133ede9d616d"}, - {file = "rapidfuzz-1.7.1-pp37-pypy37_pp73-win_amd64.whl", hash = "sha256:dccc072f2a0eeb98d46a79427ef793836ebc5184b1fe544b34607be10705ddc3"}, - {file = "rapidfuzz-1.7.1.tar.gz", hash = "sha256:99495c679174b2a02641f7dc2364a208135cacca77fc4825a86efbfe1e23b0ff"}, + {file = "rapidfuzz-1.8.0-cp27-cp27m-macosx_10_9_x86_64.whl", hash = "sha256:91f094562c683802e6c972bce27a692dad70d6cd1114e626b29d990c3704c653"}, + {file = "rapidfuzz-1.8.0-cp27-cp27m-manylinux2010_i686.whl", hash = "sha256:4a20682121e245cf5ad2dbdd771360763ea11b77520632a1034c4bb9ad1e854c"}, + {file = "rapidfuzz-1.8.0-cp27-cp27m-manylinux2010_x86_64.whl", hash = "sha256:8810e75d8f9c4453bbd6209c372bf97514359b0b5efff555caf85b15f8a9d862"}, + {file = "rapidfuzz-1.8.0-cp27-cp27m-win32.whl", hash = "sha256:00cf713d843735b5958d87294f08b05c653a593ced7c4120be34f5d26d7a320a"}, + {file = "rapidfuzz-1.8.0-cp27-cp27m-win_amd64.whl", hash = "sha256:2baca64e23a623e077f57e5470de21af2765af15aa1088676eb2d475e664eed0"}, + {file = "rapidfuzz-1.8.0-cp27-cp27mu-manylinux2010_i686.whl", hash = "sha256:9bf7a6c61bacedd84023be356e057e1d209dd6997cfaa3c1cee77aa21d642f88"}, + {file = "rapidfuzz-1.8.0-cp27-cp27mu-manylinux2010_x86_64.whl", hash = "sha256:61b6434e3341ca5158ecb371b1ceb4c1f6110563a72d28bdce4eb2a084493e47"}, + {file = "rapidfuzz-1.8.0-cp310-cp310-manylinux_2_12_i686.manylinux2010_i686.whl", hash = "sha256:e425e690383f6cf308e8c2e8d630fa9596f67d233344efd8fae11e70a9f5635f"}, + {file = "rapidfuzz-1.8.0-cp310-cp310-manylinux_2_12_x86_64.manylinux2010_x86_64.whl", hash = "sha256:93db5e693b76d616b09df27ca5c79e0dda169af7f1b8f5ab3262826d981e37e2"}, + {file = "rapidfuzz-1.8.0-cp310-cp310-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:1a8c4f76ed1c8a65892d98dc2913027c9acdb219d18f3a441cfa427a32861af9"}, + {file = "rapidfuzz-1.8.0-cp310-cp310-manylinux_2_17_ppc64le.manylinux2014_ppc64le.whl", hash = "sha256:71e217fd30901214cc96c0c15057278bafb7072aa9b2be4c97459c1fedf3e731"}, + {file = "rapidfuzz-1.8.0-cp310-cp310-manylinux_2_17_s390x.manylinux2014_s390x.whl", hash = "sha256:d579dd447b8e851462e79054b68f94b66b09df8b3abb2aa5ca07fe00912ef5e8"}, + {file = "rapidfuzz-1.8.0-cp310-cp310-win32.whl", hash = "sha256:5808064555273496dcd594d659bd28ee8d399149dd31575321034424455dc955"}, + {file = "rapidfuzz-1.8.0-cp310-cp310-win_amd64.whl", hash = "sha256:798fef1671ca66c78b47802228e9583f7ab32b99bdfe3984ebb1f96e93e38b5f"}, + {file = "rapidfuzz-1.8.0-cp35-cp35m-macosx_10_9_x86_64.whl", hash = "sha256:c9e0ed210831f5c73533bf11099ea7897db491e76c3443bef281d9c1c67d7f3a"}, + {file = "rapidfuzz-1.8.0-cp35-cp35m-manylinux2010_i686.whl", hash = "sha256:c819bb19eb615a31ddc9cb8248a285bf04f58158b53ce096451178631f99b652"}, + {file = "rapidfuzz-1.8.0-cp35-cp35m-manylinux2010_x86_64.whl", hash = "sha256:942ee45564f28ef70320d1229f02dc998bd93e3519c1f3a80f33ce144b51039c"}, + {file = "rapidfuzz-1.8.0-cp35-cp35m-win32.whl", hash = "sha256:7e6ae2e5a3bc9acc51e118f25d32b8efcd431c5d8deb408336dd2ed0f21d087c"}, + {file = "rapidfuzz-1.8.0-cp35-cp35m-win_amd64.whl", hash = "sha256:98901fba67c89ad2506f3946642cf6eb8f489592fb7eb307ebdf8bdb0c4e97f9"}, + {file = "rapidfuzz-1.8.0-cp36-cp36m-manylinux_2_12_i686.manylinux2010_i686.whl", hash = "sha256:705e1686f406a0c77ef323cdb7369b7cf9e68f2abfcb83ff5f1e0a5b21f5a534"}, + {file = "rapidfuzz-1.8.0-cp36-cp36m-manylinux_2_12_x86_64.manylinux2010_x86_64.whl", hash = "sha256:da0c5fe5fdbbd74206c1778af6b8c5ff8dfbe2dd04ae12bbe96642b358acefce"}, + {file = "rapidfuzz-1.8.0-cp36-cp36m-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:535253bc9224215131ae450aad6c9f7ef1b24f15c685045eab2b52511268bd06"}, + {file = "rapidfuzz-1.8.0-cp36-cp36m-manylinux_2_17_ppc64le.manylinux2014_ppc64le.whl", hash = "sha256:acdad83f07d886705fce164b0d1f4e3b56788a205602ed3a7fc8b10ceaf05fbf"}, + {file = "rapidfuzz-1.8.0-cp36-cp36m-manylinux_2_17_s390x.manylinux2014_s390x.whl", hash = "sha256:35097f649831f8375d6c65a237deccac3aceb573aa7fae1e5d3fa942e89de1c8"}, + {file = "rapidfuzz-1.8.0-cp37-cp37m-macosx_10_9_x86_64.whl", hash = "sha256:6f4db142e5b4b44314166a90e11603220db659bd2f9c23dd5db402c13eac8eb7"}, + {file = "rapidfuzz-1.8.0-cp37-cp37m-manylinux_2_12_i686.manylinux2010_i686.whl", hash = "sha256:19a3f55f27411d68360540484874beda0b428b062596d5f0f141663ef0738bfd"}, + {file = "rapidfuzz-1.8.0-cp37-cp37m-manylinux_2_12_x86_64.manylinux2010_x86_64.whl", hash = "sha256:22b4c1a7f6fe29bd8dae49f7d5ab085dc42c3964f1a78b6dca22fdf83b5c9bfa"}, + {file = "rapidfuzz-1.8.0-cp37-cp37m-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:a8bfb2fbc147904b78d5c510ee75dc8704b606e956df23f33a9e89abc03f45c3"}, + {file = "rapidfuzz-1.8.0-cp37-cp37m-manylinux_2_17_ppc64le.manylinux2014_ppc64le.whl", hash = "sha256:e6dc5111ebfed2c4f2e4d120a9b280ea13ea4fbb60b6915dd239817b4fc092ed"}, + {file = "rapidfuzz-1.8.0-cp37-cp37m-manylinux_2_17_s390x.manylinux2014_s390x.whl", hash = "sha256:db5ee2457d97cb967ffe08446a8c595c03fe747fdc2e145266713f9c516d1c4a"}, + {file = "rapidfuzz-1.8.0-cp37-cp37m-win32.whl", hash = "sha256:12c1b78cc15fc26f555a4bf66088d5afb6354b5a5aa149a123f01a15af6c411b"}, + {file = "rapidfuzz-1.8.0-cp37-cp37m-win_amd64.whl", hash = "sha256:693e9579048d8db4ff020715dd6f25aa315fd6445bc94e7400d7a94a227dad27"}, + {file = "rapidfuzz-1.8.0-cp38-cp38-macosx_10_9_universal2.whl", hash = "sha256:b4fe19df3edcf7de359448b872aec08e6592b4ca2d3df4d8ee57b5812d68bebf"}, + {file = "rapidfuzz-1.8.0-cp38-cp38-macosx_10_9_x86_64.whl", hash = "sha256:f3670b9df0e1f479637cad1577afca7766a02775dc08c14837cf495c82861d7c"}, + {file = "rapidfuzz-1.8.0-cp38-cp38-macosx_11_0_arm64.whl", hash = "sha256:61d118f36eb942649b0db344f7b7a19ad7e9b5749d831788187eb03b57ce1bfa"}, + {file = "rapidfuzz-1.8.0-cp38-cp38-manylinux_2_12_i686.manylinux2010_i686.whl", hash = "sha256:fce3a2c8a1d10da12aff4a0d367624e8ae9e15c1b84a5144843681d39be0c355"}, + {file = "rapidfuzz-1.8.0-cp38-cp38-manylinux_2_12_x86_64.manylinux2010_x86_64.whl", hash = "sha256:1577ef26e3647ccc4cc9754c34ffaa731639779f4d7779e91a761c72adac093e"}, + {file = "rapidfuzz-1.8.0-cp38-cp38-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:1fec9b7e60fde51990c3b48fc1aa9dba9ac3acaf78f623dbb645a6fe21a9654e"}, + {file = "rapidfuzz-1.8.0-cp38-cp38-manylinux_2_17_ppc64le.manylinux2014_ppc64le.whl", hash = "sha256:b954469d93858bc8b48129bc63fd644382a4df5f3fb1b4b290f48eac1d00a2da"}, + {file = "rapidfuzz-1.8.0-cp38-cp38-manylinux_2_17_s390x.manylinux2014_s390x.whl", hash = "sha256:190ba709069a7e5a6b39b7c8bc413a08cfa7f1f4defec5d974c4128b510e0234"}, + {file = "rapidfuzz-1.8.0-cp38-cp38-win32.whl", hash = "sha256:97b2d13d6323649b43d1b113681e4013ba230bd6e9827cc832dcebee447d7250"}, + {file = "rapidfuzz-1.8.0-cp38-cp38-win_amd64.whl", hash = "sha256:81c3091209b75f6611efe2af18834180946d4ce28f41ca8d44fce816187840d2"}, + {file = "rapidfuzz-1.8.0-cp39-cp39-macosx_10_9_universal2.whl", hash = "sha256:d610afa33e92aa0481a514ffda3ec51ca5df3c684c1c1c795307589c62025931"}, + {file = "rapidfuzz-1.8.0-cp39-cp39-macosx_10_9_x86_64.whl", hash = "sha256:d976f33ca6b5fabbb095c0a662f5b86baf706184fc24c7f125d4ddb54b8bf036"}, + {file = "rapidfuzz-1.8.0-cp39-cp39-macosx_11_0_arm64.whl", hash = "sha256:0f5ca7bca2af598d4ddcf5b93b64b50654a9ff684e6f18d865f6e13fee442b3e"}, + {file = "rapidfuzz-1.8.0-cp39-cp39-manylinux_2_12_i686.manylinux2010_i686.whl", hash = "sha256:6dc2aac5ea6b0306dcd28a6d1a89d35ed2c6ac426f2673ee1b92cf3f1d0fd5cd"}, + {file = "rapidfuzz-1.8.0-cp39-cp39-manylinux_2_12_x86_64.manylinux2010_x86_64.whl", hash = "sha256:f145c9831c0454a696a3136a6380ea4e01434e9cc2f2bc10d032864c16d1d0e5"}, + {file = "rapidfuzz-1.8.0-cp39-cp39-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:f4ce53291575b56c9d45add73ea013f43bafcea55eee9d5139aa759918d7685f"}, + {file = "rapidfuzz-1.8.0-cp39-cp39-manylinux_2_17_ppc64le.manylinux2014_ppc64le.whl", hash = "sha256:de5773a39c00a0f23cfc5da9e0e5fd0fb512b0ebe23dc7289a38e1f9a4b5cefc"}, + {file = "rapidfuzz-1.8.0-cp39-cp39-manylinux_2_17_s390x.manylinux2014_s390x.whl", hash = "sha256:87a802e55792bfbe192e2d557f38867dbe3671b49b3d5ecd873859c7460746ba"}, + {file = "rapidfuzz-1.8.0-cp39-cp39-win32.whl", hash = "sha256:9391abf1121df831316222f28cea37397a0f72bd7978f3be6e7da29a7821e4e5"}, + {file = "rapidfuzz-1.8.0-cp39-cp39-win_amd64.whl", hash = "sha256:9eeca1b436042b5523dcf314f5822b1131597898c1d967f140d1917541a8a3d1"}, + {file = "rapidfuzz-1.8.0-pp27-pypy_73-macosx_10_9_x86_64.whl", hash = "sha256:a01f2495aca479b49d3b3a8863d6ba9bea2043447a1ced74ae5ec5270059cbc1"}, + {file = "rapidfuzz-1.8.0-pp27-pypy_73-manylinux2010_x86_64.whl", hash = "sha256:b7d4b1a5d16817f8cdb34365c7b58ae22d5cf1b3207720bb2fa0b55968bdb034"}, + {file = "rapidfuzz-1.8.0-pp37-pypy37_pp73-manylinux_2_12_i686.manylinux2010_i686.whl", hash = "sha256:c738d0d7f1744646d48d19b4c775926082bcefebd2460f45ca383a0e882f5672"}, + {file = "rapidfuzz-1.8.0-pp37-pypy37_pp73-manylinux_2_12_x86_64.manylinux2010_x86_64.whl", hash = "sha256:0fb9c6078c17c12b52e66b7d0a2a1674f6bbbdc6a76e454c8479b95147018123"}, + {file = "rapidfuzz-1.8.0-pp37-pypy37_pp73-win_amd64.whl", hash = "sha256:1482b385d83670eb069577c9667f72b41eec4f005aee32f1a4ff4e71e88afde2"}, + {file = "rapidfuzz-1.8.0.tar.gz", hash = "sha256:83fff37acf0367314879231264169dcbc5e7de969a94f4b82055d06a7fddab9a"}, ] redis = [ {file = "redis-3.5.3-py2.py3-none-any.whl", hash = "sha256:432b788c4530cfe16d8d943a09d40ca6c16149727e4afe8c2c9d5580c59d9f24"}, diff --git a/pyproject.toml b/pyproject.toml index e227ffaa67..563bf4a274 100644 --- a/pyproject.toml +++ b/pyproject.toml @@ -45,7 +45,7 @@ flake8-isort = "~=4.0" pep8-naming = "~=0.9" pre-commit = "~=2.1" taskipy = "~=1.7.0" -pip-licenses = "~=3.5.2" +pip-licenses = "~=3.5.3" python-dotenv = "~=0.17.1" pytest = "~=6.2.4" pytest-cov = "~=2.12.1" diff --git a/tests/bot/test_converters.py b/tests/bot/test_converters.py index ef6c8e19eb..988b3857bc 100644 --- a/tests/bot/test_converters.py +++ b/tests/bot/test_converters.py @@ -1,6 +1,6 @@ -import datetime import re import unittest +from datetime import MAXYEAR, datetime, timezone from unittest.mock import MagicMock, patch from dateutil.relativedelta import relativedelta @@ -17,7 +17,7 @@ def setUpClass(cls): cls.context = MagicMock cls.context.author = 'bob' - cls.fixed_utc_now = datetime.datetime.fromisoformat('2019-01-01T00:00:00') + cls.fixed_utc_now = datetime.fromisoformat('2019-01-01T00:00:00+00:00') async def test_tag_name_converter_for_invalid(self): """TagNameConverter should raise the correct exception for invalid tag names.""" @@ -111,7 +111,7 @@ async def test_duration_converter_for_valid(self): expected_datetime = self.fixed_utc_now + relativedelta(**duration_dict) with patch('bot.converters.datetime') as mock_datetime: - mock_datetime.utcnow.return_value = self.fixed_utc_now + mock_datetime.now.return_value = self.fixed_utc_now with self.subTest(duration=duration, duration_dict=duration_dict): converted_datetime = await converter.convert(self.context, duration) @@ -157,52 +157,53 @@ async def test_duration_converter_for_invalid(self): async def test_duration_converter_out_of_range(self, mock_datetime): """Duration converter should raise BadArgument if datetime raises a ValueError.""" mock_datetime.__add__.side_effect = ValueError - mock_datetime.utcnow.return_value = mock_datetime + mock_datetime.now.return_value = mock_datetime - duration = f"{datetime.MAXYEAR}y" + duration = f"{MAXYEAR}y" exception_message = f"`{duration}` results in a datetime outside the supported range." with self.assertRaisesRegex(BadArgument, re.escape(exception_message)): await Duration().convert(self.context, duration) async def test_isodatetime_converter_for_valid(self): """ISODateTime converter returns correct datetime for valid datetime string.""" + utc = timezone.utc test_values = ( # `YYYY-mm-ddTHH:MM:SSZ` | `YYYY-mm-dd HH:MM:SSZ` - ('2019-09-02T02:03:05Z', datetime.datetime(2019, 9, 2, 2, 3, 5)), - ('2019-09-02 02:03:05Z', datetime.datetime(2019, 9, 2, 2, 3, 5)), + ('2019-09-02T02:03:05Z', datetime(2019, 9, 2, 2, 3, 5, tzinfo=utc)), + ('2019-09-02 02:03:05Z', datetime(2019, 9, 2, 2, 3, 5, tzinfo=utc)), # `YYYY-mm-ddTHH:MM:SS±HH:MM` | `YYYY-mm-dd HH:MM:SS±HH:MM` - ('2019-09-02T03:18:05+01:15', datetime.datetime(2019, 9, 2, 2, 3, 5)), - ('2019-09-02 03:18:05+01:15', datetime.datetime(2019, 9, 2, 2, 3, 5)), - ('2019-09-02T00:48:05-01:15', datetime.datetime(2019, 9, 2, 2, 3, 5)), - ('2019-09-02 00:48:05-01:15', datetime.datetime(2019, 9, 2, 2, 3, 5)), + ('2019-09-02T03:18:05+01:15', datetime(2019, 9, 2, 2, 3, 5, tzinfo=utc)), + ('2019-09-02 03:18:05+01:15', datetime(2019, 9, 2, 2, 3, 5, tzinfo=utc)), + ('2019-09-02T00:48:05-01:15', datetime(2019, 9, 2, 2, 3, 5, tzinfo=utc)), + ('2019-09-02 00:48:05-01:15', datetime(2019, 9, 2, 2, 3, 5, tzinfo=utc)), # `YYYY-mm-ddTHH:MM:SS±HHMM` | `YYYY-mm-dd HH:MM:SS±HHMM` - ('2019-09-02T03:18:05+0115', datetime.datetime(2019, 9, 2, 2, 3, 5)), - ('2019-09-02 03:18:05+0115', datetime.datetime(2019, 9, 2, 2, 3, 5)), - ('2019-09-02T00:48:05-0115', datetime.datetime(2019, 9, 2, 2, 3, 5)), - ('2019-09-02 00:48:05-0115', datetime.datetime(2019, 9, 2, 2, 3, 5)), + ('2019-09-02T03:18:05+0115', datetime(2019, 9, 2, 2, 3, 5, tzinfo=utc)), + ('2019-09-02 03:18:05+0115', datetime(2019, 9, 2, 2, 3, 5, tzinfo=utc)), + ('2019-09-02T00:48:05-0115', datetime(2019, 9, 2, 2, 3, 5, tzinfo=utc)), + ('2019-09-02 00:48:05-0115', datetime(2019, 9, 2, 2, 3, 5, tzinfo=utc)), # `YYYY-mm-ddTHH:MM:SS±HH` | `YYYY-mm-dd HH:MM:SS±HH` - ('2019-09-02 03:03:05+01', datetime.datetime(2019, 9, 2, 2, 3, 5)), - ('2019-09-02T01:03:05-01', datetime.datetime(2019, 9, 2, 2, 3, 5)), + ('2019-09-02 03:03:05+01', datetime(2019, 9, 2, 2, 3, 5, tzinfo=utc)), + ('2019-09-02T01:03:05-01', datetime(2019, 9, 2, 2, 3, 5, tzinfo=utc)), # `YYYY-mm-ddTHH:MM:SS` | `YYYY-mm-dd HH:MM:SS` - ('2019-09-02T02:03:05', datetime.datetime(2019, 9, 2, 2, 3, 5)), - ('2019-09-02 02:03:05', datetime.datetime(2019, 9, 2, 2, 3, 5)), + ('2019-09-02T02:03:05', datetime(2019, 9, 2, 2, 3, 5, tzinfo=utc)), + ('2019-09-02 02:03:05', datetime(2019, 9, 2, 2, 3, 5, tzinfo=utc)), # `YYYY-mm-ddTHH:MM` | `YYYY-mm-dd HH:MM` - ('2019-11-12T09:15', datetime.datetime(2019, 11, 12, 9, 15)), - ('2019-11-12 09:15', datetime.datetime(2019, 11, 12, 9, 15)), + ('2019-11-12T09:15', datetime(2019, 11, 12, 9, 15, tzinfo=utc)), + ('2019-11-12 09:15', datetime(2019, 11, 12, 9, 15, tzinfo=utc)), # `YYYY-mm-dd` - ('2019-04-01', datetime.datetime(2019, 4, 1)), + ('2019-04-01', datetime(2019, 4, 1, tzinfo=utc)), # `YYYY-mm` - ('2019-02-01', datetime.datetime(2019, 2, 1)), + ('2019-02-01', datetime(2019, 2, 1, tzinfo=utc)), # `YYYY` - ('2025', datetime.datetime(2025, 1, 1)), + ('2025', datetime(2025, 1, 1, tzinfo=utc)), ) converter = ISODateTime() @@ -210,7 +211,6 @@ async def test_isodatetime_converter_for_valid(self): for datetime_string, expected_dt in test_values: with self.subTest(datetime_string=datetime_string, expected_dt=expected_dt): converted_dt = await converter.convert(self.context, datetime_string) - self.assertIsNone(converted_dt.tzinfo) self.assertEqual(converted_dt, expected_dt) async def test_isodatetime_converter_for_invalid(self): diff --git a/tests/bot/utils/test_time.py b/tests/bot/utils/test_time.py index 8edffd1c9e..a3dcbfc0a6 100644 --- a/tests/bot/utils/test_time.py +++ b/tests/bot/utils/test_time.py @@ -72,9 +72,9 @@ def test_format_infraction_with_duration_none_expiry(self): def test_format_infraction_with_duration_custom_units(self): """format_infraction_with_duration should work for custom max_units.""" test_cases = ( - ('3000-12-12T00:01:00Z', datetime(3000, 12, 11, 12, 5, 5), 6, + ('3000-12-12T00:01:00Z', datetime(3000, 12, 11, 12, 5, 5, tzinfo=timezone.utc), 6, ' (11 hours, 55 minutes and 55 seconds)'), - ('3000-11-23T20:09:00Z', datetime(3000, 4, 25, 20, 15), 20, + ('3000-11-23T20:09:00Z', datetime(3000, 4, 25, 20, 15, tzinfo=timezone.utc), 20, ' (6 months, 28 days, 23 hours and 54 minutes)') ) @@ -84,16 +84,21 @@ def test_format_infraction_with_duration_custom_units(self): def test_format_infraction_with_duration_normal_usage(self): """format_infraction_with_duration should work for normal usage, across various durations.""" + utc = timezone.utc test_cases = ( - ('2019-12-12T00:01:00Z', datetime(2019, 12, 11, 12, 0, 5), 2, ' (12 hours and 55 seconds)'), - ('2019-12-12T00:01:00Z', datetime(2019, 12, 11, 12, 0, 5), 1, ' (12 hours)'), - ('2019-12-12T00:00:00Z', datetime(2019, 12, 11, 23, 59), 2, ' (1 minute)'), - ('2019-11-23T20:09:00Z', datetime(2019, 11, 15, 20, 15), 2, ' (7 days and 23 hours)'), - ('2019-11-23T20:09:00Z', datetime(2019, 4, 25, 20, 15), 2, ' (6 months and 28 days)'), - ('2019-11-23T20:58:00Z', datetime(2019, 11, 23, 20, 53), 2, ' (5 minutes)'), - ('2019-11-24T00:00:00Z', datetime(2019, 11, 23, 23, 59, 0), 2, ' (1 minute)'), - ('2019-11-23T23:59:00Z', datetime(2017, 7, 21, 23, 0), 2, ' (2 years and 4 months)'), - ('2019-11-23T23:59:00Z', datetime(2019, 11, 23, 23, 49, 5), 2, + ('2019-12-12T00:01:00Z', datetime(2019, 12, 11, 12, 0, 5, tzinfo=utc), 2, + ' (12 hours and 55 seconds)'), + ('2019-12-12T00:01:00Z', datetime(2019, 12, 11, 12, 0, 5, tzinfo=utc), 1, ' (12 hours)'), + ('2019-12-12T00:00:00Z', datetime(2019, 12, 11, 23, 59, tzinfo=utc), 2, ' (1 minute)'), + ('2019-11-23T20:09:00Z', datetime(2019, 11, 15, 20, 15, tzinfo=utc), 2, + ' (7 days and 23 hours)'), + ('2019-11-23T20:09:00Z', datetime(2019, 4, 25, 20, 15, tzinfo=utc), 2, + ' (6 months and 28 days)'), + ('2019-11-23T20:58:00Z', datetime(2019, 11, 23, 20, 53, tzinfo=utc), 2, ' (5 minutes)'), + ('2019-11-24T00:00:00Z', datetime(2019, 11, 23, 23, 59, 0, tzinfo=utc), 2, ' (1 minute)'), + ('2019-11-23T23:59:00Z', datetime(2017, 7, 21, 23, 0, tzinfo=utc), 2, + ' (2 years and 4 months)'), + ('2019-11-23T23:59:00Z', datetime(2019, 11, 23, 23, 49, 5, tzinfo=utc), 2, ' (9 minutes and 55 seconds)'), (None, datetime(2019, 11, 23, 23, 49, 5), 2, None), )