Skip to content

Commit

Permalink
Merge pull request #6260 from Gallaecio/bandit
Browse files Browse the repository at this point in the history
bandit: allow-list false positives
  • Loading branch information
wRAR committed Feb 29, 2024
2 parents 198f5cf + 31cbbb5 commit b80128f
Show file tree
Hide file tree
Showing 14 changed files with 21 additions and 28 deletions.
11 changes: 1 addition & 10 deletions .bandit.yml
Original file line number Diff line number Diff line change
@@ -1,19 +1,10 @@
skips:
- B101 # assert_used
- B105 # hardcoded_password_string
- B301 # pickle
- B307 # eval
- B311 # random
- B101 # assert_used, needed for mypy
- B320 # xml_bad_etree
- B321 # ftplib, https://github.com/scrapy/scrapy/issues/4180
- B324 # hashlib "Use of weak SHA1 hash for security"
- B402 # import_ftplib, https://github.com/scrapy/scrapy/issues/4180
- B403 # import_pickle
- B404 # import_subprocess
- B406 # import_xml_sax
- B410 # import_lxml
- B411 # import_xmlrpclib, https://github.com/PyCQA/bandit/issues/1082
- B503 # ssl_with_bad_defaults
- B603 # subprocess_without_shell_equals_true
- B605 # start_process_with_a_shell
exclude_dirs: ['tests']
6 changes: 4 additions & 2 deletions scrapy/commands/bench.py
Original file line number Diff line number Diff line change
@@ -1,4 +1,4 @@
import subprocess
import subprocess # nosec
import sys
import time
from urllib.parse import urlencode
Expand Down Expand Up @@ -29,7 +29,9 @@ def __enter__(self):
from scrapy.utils.test import get_testenv

pargs = [sys.executable, "-u", "-m", "scrapy.utils.benchserver"]
self.proc = subprocess.Popen(pargs, stdout=subprocess.PIPE, env=get_testenv())
self.proc = subprocess.Popen(
pargs, stdout=subprocess.PIPE, env=get_testenv()
) # nosec
self.proc.stdout.readline()

def __exit__(self, exc_type, exc_value, traceback):
Expand Down
2 changes: 1 addition & 1 deletion scrapy/commands/edit.py
Original file line number Diff line number Diff line change
Expand Up @@ -37,4 +37,4 @@ def run(self, args, opts):

sfile = sys.modules[spidercls.__module__].__file__
sfile = sfile.replace(".pyc", ".py")
self.exitcode = os.system(f'{editor} "{sfile}"')
self.exitcode = os.system(f'{editor} "{sfile}"') # nosec
2 changes: 1 addition & 1 deletion scrapy/commands/genspider.py
Original file line number Diff line number Diff line change
Expand Up @@ -113,7 +113,7 @@ def run(self, args, opts):
if template_file:
self._genspider(module, name, url, opts.template, template_file)
if opts.edit:
self.exitcode = os.system(f'scrapy edit "{name}"')
self.exitcode = os.system(f'scrapy edit "{name}"') # nosec

def _genspider(self, module, name, url, template_name, template_file):
"""Generate the spider module, based on the given template"""
Expand Down
2 changes: 1 addition & 1 deletion scrapy/core/downloader/__init__.py
Original file line number Diff line number Diff line change
Expand Up @@ -40,7 +40,7 @@ def free_transfer_slots(self) -> int:

def download_delay(self) -> float:
if self.randomize_delay:
return random.uniform(0.5 * self.delay, 1.5 * self.delay)
return random.uniform(0.5 * self.delay, 1.5 * self.delay) # nosec
return self.delay

def close(self) -> None:
Expand Down
4 changes: 2 additions & 2 deletions scrapy/exporters.py
Original file line number Diff line number Diff line change
Expand Up @@ -5,10 +5,10 @@
import csv
import io
import marshal
import pickle
import pickle # nosec
import pprint
from collections.abc import Mapping
from xml.sax.saxutils import XMLGenerator
from xml.sax.saxutils import XMLGenerator # nosec

from itemadapter import ItemAdapter, is_item

Expand Down
6 changes: 3 additions & 3 deletions scrapy/extensions/httpcache.py
Original file line number Diff line number Diff line change
@@ -1,6 +1,6 @@
import gzip
import logging
import pickle
import pickle # nosec
from email.utils import mktime_tz, parsedate_tz
from importlib import import_module
from pathlib import Path
Expand Down Expand Up @@ -274,7 +274,7 @@ def _read_data(self, spider, request):
if 0 < self.expiration_secs < time() - float(ts):
return # expired

return pickle.loads(db[f"{key}_data"])
return pickle.loads(db[f"{key}_data"]) # nosec


class FilesystemCacheStorage:
Expand Down Expand Up @@ -352,7 +352,7 @@ def _read_meta(self, spider: Spider, request: Request):
if 0 < self.expiration_secs < time() - mtime:
return # expired
with self._open(metapath, "rb") as f:
return pickle.load(f)
return pickle.load(f) # nosec


def parse_cachecontrol(header):
Expand Down
4 changes: 2 additions & 2 deletions scrapy/extensions/spiderstate.py
Original file line number Diff line number Diff line change
@@ -1,4 +1,4 @@
import pickle
import pickle # nosec
from pathlib import Path

from scrapy import signals
Expand Down Expand Up @@ -31,7 +31,7 @@ def spider_closed(self, spider):
def spider_opened(self, spider):
if self.jobdir and Path(self.statefn).exists():
with Path(self.statefn).open("rb") as f:
spider.state = pickle.load(f)
spider.state = pickle.load(f) # nosec
else:
spider.state = {}

Expand Down
2 changes: 1 addition & 1 deletion scrapy/pqueues.py
Original file line number Diff line number Diff line change
Expand Up @@ -20,7 +20,7 @@ def _path_safe(text):
pathable_slot = "".join([c if c.isalnum() or c in "-._" else "_" for c in text])
# as we replace some letters we can get collision for different slots
# add we add unique part
unique_slot = hashlib.md5(text.encode("utf8")).hexdigest()
unique_slot = hashlib.md5(text.encode("utf8")).hexdigest() # nosec
return "-".join([pathable_slot, unique_slot])


Expand Down
2 changes: 1 addition & 1 deletion scrapy/settings/default_settings.py
Original file line number Diff line number Diff line change
Expand Up @@ -177,7 +177,7 @@
FILES_STORE_GCS_ACL = ""

FTP_USER = "anonymous"
FTP_PASSWORD = "guest"
FTP_PASSWORD = "guest" # nosec
FTP_PASSIVE_MODE = True

GCS_PROJECT_ID = None
Expand Down
2 changes: 1 addition & 1 deletion scrapy/shell.py
Original file line number Diff line number Diff line change
Expand Up @@ -50,7 +50,7 @@ def start(self, url=None, request=None, response=None, spider=None, redirect=Tru
else:
self.populate_vars()
if self.code:
print(eval(self.code, globals(), self.vars))
print(eval(self.code, globals(), self.vars)) # nosec
else:
"""
Detect interactive shell setting in scrapy.cfg
Expand Down
2 changes: 1 addition & 1 deletion scrapy/squeues.py
Original file line number Diff line number Diff line change
Expand Up @@ -3,7 +3,7 @@
"""

import marshal
import pickle
import pickle # nosec
from os import PathLike
from pathlib import Path
from typing import Union
Expand Down
2 changes: 1 addition & 1 deletion scrapy/utils/benchserver.py
Original file line number Diff line number Diff line change
Expand Up @@ -14,7 +14,7 @@ def getChild(self, name, request):
def render(self, request):
total = _getarg(request, b"total", 100, int)
show = _getarg(request, b"show", 10, int)
nlist = [random.randint(1, total) for _ in range(show)]
nlist = [random.randint(1, total) for _ in range(show)] # nosec
request.write(b"<html><head></head><body>")
args = request.args.copy()
for nl in nlist:
Expand Down
2 changes: 1 addition & 1 deletion scrapy/utils/engine.py
Original file line number Diff line number Diff line change
Expand Up @@ -30,7 +30,7 @@ def get_engine_status(engine: "ExecutionEngine") -> List[Tuple[str, Any]]:
checks: List[Tuple[str, Any]] = []
for test in tests:
try:
checks += [(test, eval(test))]
checks += [(test, eval(test))] # nosec
except Exception as e:
checks += [(test, f"{type(e).__name__} (exception)")]

Expand Down

0 comments on commit b80128f

Please sign in to comment.