Skip to content

Commit

Permalink
Revert "Revert "Merge PR#8552: Replace datetime.utcnow() with `date…
Browse files Browse the repository at this point in the history
…time.now(tz=timezone.utc)`""

This reverts commit 6ed9298.
  • Loading branch information
makyen committed Mar 25, 2024
1 parent 6ed9298 commit 7de2948
Show file tree
Hide file tree
Showing 10 changed files with 36 additions and 36 deletions.
10 changes: 5 additions & 5 deletions bodyfetcher.py
Original file line number Diff line number Diff line change
Expand Up @@ -5,7 +5,7 @@
import copy
from itertools import chain
from operator import itemgetter
from datetime import datetime
from datetime import datetime, timezone

import requests
import psutil
Expand Down Expand Up @@ -182,7 +182,7 @@ def add_to_queue(self, hostname, question_id, should_check_site=False, source=No

# This line only works if we are using a dict in the self.queue[hostname] object, which we
# should be with the previous conversion code.
self.queue[hostname][str(question_id)] = datetime.utcnow()
self.queue[hostname][str(question_id)] = datetime.now(tz=timezone.utc)
flovis_dict = None
if GlobalVars.flovis is not None:
flovis_dict = {sk: list(sq.keys()) for sk, sq in self.queue.items()}
Expand Down Expand Up @@ -379,7 +379,7 @@ def get_first_queue_item_to_process(self, thread_stats):
return None
self.cpu_starvation_warning_thread_launched()
special_sites = []
is_time_sensitive_time = datetime.utcnow().hour in range(4, 12)
is_time_sensitive_time = datetime.now(tz=timezone.utc).hour in range(4, 12)
with self.queue_lock:
sites_in_queue = {site: len(values) for site, values in self.queue.items()}
# Get sites listed in special cases and as time_sensitive
Expand Down Expand Up @@ -495,7 +495,7 @@ def make_api_call_for_site(self, site, new_posts, thread_stats):
{'site': site, 'posts': list(new_posts.keys())})

# Add queue timing data
pop_time = datetime.utcnow()
pop_time = datetime.now(tz=timezone.utc)
post_add_times = [(pop_time - v).total_seconds() for k, v in new_posts.items()]
Tasks.do(add_queue_timing_data, site, post_add_times)

Expand Down Expand Up @@ -570,7 +570,7 @@ def make_api_call_for_site(self, site, new_posts, thread_stats):
if GlobalVars.api_backoff_time > time.time():
time.sleep(GlobalVars.api_backoff_time - time.time() + 2)
try:
time_request_made = datetime.utcnow().strftime('%H:%M:%S')
time_request_made = datetime.now(tz=timezone.utc).strftime('%H:%M:%S')
response = requests.get(url, params=params, timeout=20).json()
response_timestamp = time.time()
except (requests.exceptions.Timeout, requests.ConnectionError, Exception):
Expand Down
6 changes: 3 additions & 3 deletions chatcommands.py
Original file line number Diff line number Diff line change
Expand Up @@ -7,7 +7,7 @@
from globalvars import GlobalVars
import findspam
# noinspection PyUnresolvedReferences
from datetime import datetime
from datetime import datetime, timezone
from apigetpost import api_get_post, PostData
import datahandling
from datahandling import *
Expand Down Expand Up @@ -849,7 +849,7 @@ def wut():
def hats():
wb_start = datetime(2018, 12, 12, 0, 0, 0)
wb_end = datetime(2019, 1, 2, 0, 0, 0)
now = datetime.utcnow()
now = datetime.now(tz=timezone.utc)
return_string = ""
if wb_start > now:
diff = wb_start - now
Expand Down Expand Up @@ -1421,7 +1421,7 @@ def status():
Returns the amount of time the application has been running
:return: A string
"""
now = datetime.utcnow()
now = datetime.now(tz=timezone.utc)
diff = now - GlobalVars.startup_utc_date

return 'Running since {time} UTC ({relative})'.format(time=GlobalVars.startup_utc, relative=td_format(diff))
Expand Down
6 changes: 3 additions & 3 deletions chatexchange_extension.py
Original file line number Diff line number Diff line change
@@ -1,16 +1,16 @@
# coding=utf-8
from chatexchange import client, events, rooms
import sys
from datetime import datetime
from datetime import datetime, timezone
from helpers import log


class Room(rooms.Room):
def watch_socket(self, event_callback):
self._client.last_activity = datetime.utcnow()
self._client.last_activity = datetime.now(tz=timezone.utc)

def on_activity(activity):
self._client.last_activity = datetime.utcnow()
self._client.last_activity = datetime.now(tz=timezone.utc)

for event in self._events_from_activity(activity, self.id):
if isinstance(event, events.MessageEdited):
Expand Down
4 changes: 2 additions & 2 deletions datahandling.py
Original file line number Diff line number Diff line change
Expand Up @@ -4,7 +4,7 @@
import sys
import zlib
import base64
from datetime import datetime
from datetime import datetime, timezone
import json
import time
import math
Expand Down Expand Up @@ -249,7 +249,7 @@ def is_code_privileged(site, user_id):


def update_reason_weights():
d = {'last_updated': datetime.utcnow().date()}
d = {'last_updated': datetime.now(tz=timezone.utc).date()}
items = metasmoke.Metasmoke.get_reason_weights()
if not items:
return # No update
Expand Down
4 changes: 2 additions & 2 deletions excepthook.py
Original file line number Diff line number Diff line change
@@ -1,5 +1,5 @@
# coding=utf-8
from datetime import datetime
from datetime import datetime, timezone
import os
import traceback
import threading
Expand All @@ -13,7 +13,7 @@

# noinspection PyProtectedMember
def uncaught_exception(exctype, value, tb):
delta = datetime.utcnow() - GlobalVars.startup_utc_date
delta = datetime.now(tz=timezone.utc) - GlobalVars.startup_utc_date
log_exception(exctype, value, tb)
if delta.total_seconds() < 180 and exctype not in \
{KeyboardInterrupt, SystemExit, requests.ConnectionError, WebSocketConnectionClosedException}:
Expand Down
12 changes: 6 additions & 6 deletions findspam.py
Original file line number Diff line number Diff line change
Expand Up @@ -7,7 +7,7 @@
from urllib.parse import urlparse, unquote_plus
from itertools import chain
from collections import Counter
from datetime import datetime
from datetime import datetime, timezone
from string import punctuation
import time
import os
Expand Down Expand Up @@ -1358,7 +1358,7 @@ def purge_cache(cachevar, limit):
'''
oldest = sorted(cachevar, key=lambda k: cachevar[k]['timestamp'])[0:limit + 1]
remaining = oldest.pop()
now = datetime.utcnow()
now = datetime.now(tz=timezone.utc)
log('debug', 'purge_cache({0}): age of oldest entry is {1}'.format(
limit, now - cachevar[oldest[0]]['timestamp']))
log('debug', 'purge_cache({0}): oldest remaining entry is {1}'.format(
Expand All @@ -1372,7 +1372,7 @@ def purge_cache(cachevar, limit):
def dns_query(label, qtype):
# If there's no cache then assume *now* is important
try:
starttime = datetime.utcnow()
starttime = datetime.now(tz=timezone.utc)
# Extend lifetime if we are running a test
extra_params = dict()
if "pytest" in sys.modules:
Expand All @@ -1382,11 +1382,11 @@ def dns_query(label, qtype):
if str(exc).startswith('None of DNS query names exist:'):
log('debug', 'DNS label {0} not found; skipping'.format(label))
else:
endtime = datetime.utcnow()
endtime = datetime.now(tz=timezone.utc)
log('warning', 'DNS error {0} (duration: {1})'.format(
exc, endtime - starttime))
return None
endtime = datetime.utcnow()
endtime = datetime.now(tz=timezone.utc)
return answer


Expand Down Expand Up @@ -1652,7 +1652,7 @@ def post_links(post):
log('debug', 'LINK_CACHE purged')

linkset = set(links)
LINK_CACHE[post] = {'links': linkset, 'timestamp': datetime.utcnow()}
LINK_CACHE[post] = {'links': linkset, 'timestamp': datetime.now(tz=timezone.utc)}
return linkset


Expand Down
8 changes: 4 additions & 4 deletions globalvars.py
Original file line number Diff line number Diff line change
Expand Up @@ -3,7 +3,7 @@
import sys
import os
from collections import namedtuple
from datetime import datetime
from datetime import datetime, timezone
from html.parser import HTMLParser
from html import unescape
from hashlib import md5
Expand Down Expand Up @@ -81,7 +81,7 @@ class GlobalVars:
watched_keywords = {}
ignored_posts = []
auto_ignored_posts = []
startup_utc_date = datetime.utcnow()
startup_utc_date = datetime.now(tz=timezone.utc)
startup_utc = startup_utc_date.strftime("%H:%M:%S")
latest_questions = []
latest_questions_lock = threading.Lock()
Expand Down Expand Up @@ -268,7 +268,7 @@ def _reset(stats_set_key):
""" Resets/clears/creates post scanning data in a stats set without getting the rw_lock """
GlobalVars.PostScanStat.stats[stats_set_key] = {}
GlobalVars.PostScanStat.stats[stats_set_key]['stats'] = GlobalVars.PostScanStat.default_stats.copy()
GlobalVars.PostScanStat.stats[stats_set_key]['start_timestamp'] = datetime.utcnow()
GlobalVars.PostScanStat.stats[stats_set_key]['start_timestamp'] = datetime.now(tz=timezone.utc)

@staticmethod
def reset(stats_set_key):
Expand All @@ -280,7 +280,7 @@ def reset(stats_set_key):
def lock(stats_set_key):
""" Locks post scanning data in a stats set """
with GlobalVars.PostScanStat.rw_lock:
GlobalVars.PostScanStat.stats[stats_set_key]['locked_timestamp'] = datetime.utcnow()
GlobalVars.PostScanStat.stats[stats_set_key]['locked_timestamp'] = datetime.now(tz=timezone.utc)

@staticmethod
def unlock(stats_set_key):
Expand Down
10 changes: 5 additions & 5 deletions helpers.py
Original file line number Diff line number Diff line change
Expand Up @@ -2,7 +2,7 @@
import os
import sys
import traceback
from datetime import datetime
from datetime import datetime, timezone
import importlib
import threading
# termcolor doesn't work properly in PowerShell or cmd on Windows, so use colorama.
Expand Down Expand Up @@ -80,7 +80,7 @@ def get_db(cls):

@classmethod
def add_current_exception(cls):
now = datetime.utcnow()
now = datetime.now(tz=timezone.utc)
exctype, value, traceback_or_message = sys.exc_info()
tr = get_traceback_from_traceback_or_message(traceback_or_message)
cls.add(now.timestamp(), exctype.__name__, str(value), tr)
Expand Down Expand Up @@ -175,7 +175,7 @@ def log(log_level, *args, and_file=False, no_exception=False):
return

color = levels[log_level][1] if log_level in levels else 'white'
log_str = "{} {}".format(colored("[{}]".format(datetime.utcnow().isoformat()[11:-3]),
log_str = "{} {}".format(colored("[{}]".format(datetime.now(tz=timezone.utc).isoformat()[11:-9]),
color, attrs=['bold']),
redact_passwords(" ".join([str(x) for x in args])))
print(log_str, file=sys.stderr)
Expand All @@ -198,7 +198,7 @@ def log_file(log_level, *args):
if levels[log_level] < Helpers.min_log_level:
return

log_str = redact_passwords("[{}] {}: {}".format(datetime.utcnow().strftime("%Y-%m-%d %H:%M:%S"),
log_str = redact_passwords("[{}] {}: {}".format(datetime.now(tz=timezone.utc).strftime("%Y-%m-%d %H:%M:%S"),
log_level.upper(), " ".join([str(x) for x in args])))
with open("errorLogs.txt", "a", encoding="utf-8") as f:
print(log_str, file=f)
Expand All @@ -213,7 +213,7 @@ def get_traceback_from_traceback_or_message(traceback_or_message):

def log_exception(exctype, value, traceback_or_message, and_file=False, *, log_level=None):
log_level = 'error' if log_level is None else log_level
now = datetime.utcnow()
now = datetime.now(tz=timezone.utc)
tr = get_traceback_from_traceback_or_message(traceback_or_message)
exception_only = ''.join(traceback.format_exception_only(exctype, value)).strip()
logged_msg = "{exception}\n{now} UTC\n{row}\n\n".format(exception=exception_only, now=now, row=tr)
Expand Down
6 changes: 3 additions & 3 deletions spamhandling.py
Original file line number Diff line number Diff line change
Expand Up @@ -5,7 +5,7 @@
import datahandling
import chatcommunicate
from globalvars import GlobalVars
from datetime import datetime, timedelta
from datetime import datetime, timedelta, timezone
import regex
import parsing
import metasmoke
Expand All @@ -27,7 +27,7 @@ def should_whitelist_prevent_alert(user_url, reasons):
def sum_weight(reasons: list):
if not GlobalVars.reason_weights:
datahandling.update_reason_weights()
now = datetime.utcnow() - timedelta(minutes=15)
now = datetime.now(tz=timezone.utc) - timedelta(minutes=15)
if 'last_updated' not in GlobalVars.reason_weights or \
(now.date() != GlobalVars.reason_weights['last_updated'] and now.hour >= 1):
Tasks.do(datahandling.update_reason_weights)
Expand Down Expand Up @@ -114,7 +114,7 @@ def handle_spam(post, reasons, why):
"repeating words in title" in reasons or
"repeating words in body" in reasons or
"repeating words in answer" in reasons):
datahandling.add_auto_ignored_post((post.post_id, post.post_site, datetime.utcnow()))
datahandling.add_auto_ignored_post((post.post_id, post.post_site, datetime.now(tz=timezone.utc)))

if why is not None and why != "":
datahandling.add_why(post.post_site, post.post_id, why)
Expand Down
6 changes: 3 additions & 3 deletions ws.py
Original file line number Diff line number Diff line change
Expand Up @@ -21,7 +21,7 @@
import traceback
from bodyfetcher import BodyFetcher
import chatcommunicate
from datetime import datetime
from datetime import datetime, timezone
from spamhandling import check_if_spam_json
from globalvars import GlobalVars
from datahandling import (load_pickle, PICKLE_STORAGE, load_files, filter_auto_ignored_posts,
Expand Down Expand Up @@ -230,7 +230,7 @@ def check_socket_connections():
socket_failure = False
with chatcommunicate._clients_lock:
for client in chatcommunicate._clients.values():
if client.last_activity and (datetime.utcnow() - client.last_activity).total_seconds() >= 60:
if client.last_activity and (datetime.now(tz=timezone.utc) - client.last_activity).total_seconds() >= 60:
socket_failure = True
if socket_failure:
exit_mode("socket_failure")
Expand Down Expand Up @@ -313,7 +313,7 @@ def init_se_websocket_or_reboot(max_tries, tell_debug_room_on_error=False):

except Exception as e:
exc_type, exc_obj, exc_tb = sys.exc_info()
now = datetime.utcnow()
now = datetime.now(tz=timezone.utc)
delta = now - GlobalVars.startup_utc_date
seconds = delta.total_seconds()
tr = traceback.format_exc()
Expand Down

0 comments on commit 7de2948

Please sign in to comment.