Skip to content

Commit d5eefd3

Browse files
committed
Enforced PEP-8 guidelines
1 parent f652e15 commit d5eefd3

File tree

153 files changed

+3289
-2547
lines changed

Some content is hidden

Large Commits have some content hidden by default. Use the searchbox below for content that may be hidden.

153 files changed

+3289
-2547
lines changed

Diff for: Amazon-Price-Alert/amazon_scraper.py

+14-16
Original file line numberDiff line numberDiff line change
@@ -43,11 +43,11 @@ def get_title(self):
4343
# Stores the price of the product after filtering the string and
4444
# converting it to an integer
4545
def get_price(self):
46-
price_raw = self.soup.find(
47-
'span', id='priceblock_ourprice').text.strip()
46+
price_raw = self.soup.find('span',
47+
id='priceblock_ourprice').text.strip()
4848
price_filtered = price_raw[2:len(price_raw) - 3]
49-
self.product_price = int(
50-
''.join([x for x in price_filtered if x != ',']))
49+
self.product_price = int(''.join(
50+
[x for x in price_filtered if x != ',']))
5151
return
5252

5353
# Prints product title
@@ -116,8 +116,7 @@ def send_email(self):
116116

117117

118118
def main():
119-
url = input(
120-
"Paste the link of the Amazon product:")
119+
url = input("Paste the link of the Amazon product:")
121120
budget = int(input("Enter you budget price:"))
122121
u_email = input("Enter your email:")
123122
inp_str = ("How frequuently would you like to check the price?"
@@ -130,16 +129,15 @@ def main():
130129
time_delay = 3 * 60 * 60
131130
else:
132131
time_delay = 6 * 60 * 60
133-
msg = (
134-
"Great! Now just sit back and relax."
135-
"Minimize this program and be sure "
136-
"that it is running.\nAdditionally, ensure that there"
137-
"is stable internet connection "
138-
"during the time this program runs.\nIf the price of the "
139-
"product falls within your budget, "
140-
"you will recieve an email regarding the same and this"
141-
"program will auto-close.\nThank you for using "
142-
"C3PO scraper! Beep-bop bop-beep.")
132+
msg = ("Great! Now just sit back and relax."
133+
"Minimize this program and be sure "
134+
"that it is running.\nAdditionally, ensure that there"
135+
"is stable internet connection "
136+
"during the time this program runs.\nIf the price of the "
137+
"product falls within your budget, "
138+
"you will recieve an email regarding the same and this"
139+
"program will auto-close.\nThank you for using "
140+
"C3PO scraper! Beep-bop bop-beep.")
143141
print(msg)
144142
c3po = Scraper(url, budget, u_email)
145143
while True:

Diff for: Amazon-Price-Tracker/amazonprice.py

+29-18
Original file line numberDiff line numberDiff line change
@@ -11,16 +11,12 @@
1111

1212
# get your browser information by searching "my user agent"
1313
user_agent = input("Enter your User-Agent string here\n")
14-
headers = {
15-
"User-Agent": f'{user_agent}'
16-
17-
}
14+
headers = {"User-Agent": f'{user_agent}'}
1815
Url = input("Drop the Url of product you wish to buy...!\n")
1916

2017
page = requests.get(Url, headers=headers)
2118
soup = BeautifulSoup(page.content, "html.parser")
2219

23-
2420
# print(soup)
2521

2622

@@ -41,26 +37,39 @@ def mail_sending(mail_id, title, password):
4137
def check_price():
4238
title = soup.find(id="productTitle").get_text().strip()
4339
try:
44-
price = soup.find(id="priceblock_ourprice_row").get_text().strip()[:20].replace('₹', '').replace(' ',
45-
'').replace(
46-
'Price:', '').replace('\n', '').replace('\xa0', '').replace(',', '').replace('Fu', '')
40+
price = soup.find(
41+
id="priceblock_ourprice_row").get_text().strip()[:20].replace(
42+
'₹', '').replace(' ', '').replace('Price:', '').replace(
43+
'\n', '').replace('\xa0',
44+
'').replace(',', '').replace('Fu', '')
4745

4846
except:
4947
try:
50-
price = soup.find(id="priceblock_dealprice").get_text().strip()[:20].replace('₹', '').replace(' ',
51-
'').replace(
52-
'Price:', '').replace('\n', '').replace('\xa0', '').replace(',', '').replace('Fu', '')
48+
price = soup.find(
49+
id="priceblock_dealprice").get_text().strip()[:20].replace(
50+
'₹', '').replace(' ', '').replace('Price:', '').replace(
51+
'\n', '').replace('\xa0',
52+
'').replace(',',
53+
'').replace('Fu', '')
5354

5455
except:
5556
try:
56-
price = soup.find(id="priceblock_ourprice").get_text().strip()[:20].replace('₹', '').replace(' ',
57-
'').replace(
58-
'Price:', '').replace('\n', '').replace('\xa0', '').replace(',', '').replace('Fu', '')
57+
price = soup.find(
58+
id="priceblock_ourprice").get_text().strip()[:20].replace(
59+
'₹',
60+
'').replace(' ', '').replace('Price:', '').replace(
61+
'\n',
62+
'').replace('\xa0',
63+
'').replace(',', '').replace('Fu', '')
5964

6065
except:
61-
price = soup.find(id="priceblock_ourprice_lbl").get_text().strip()[:20].replace('₹', '').replace(' ',
62-
'').replace(
63-
'Price:', '').replace('\n', '').replace('\xa0', '').replace(',', '').replace('Fu', '')
66+
price = soup.find(id="priceblock_ourprice_lbl").get_text(
67+
).strip()[:20].replace('₹', '').replace(' ', '').replace(
68+
'Price:',
69+
'').replace('\n',
70+
'').replace('\xa0',
71+
'').replace(',',
72+
'').replace('Fu', '')
6473

6574
fixed_price = float(price)
6675
print(title)
@@ -69,7 +78,9 @@ def check_price():
6978
your_price = y_price.replace(',', '')
7079
mail_id = input("Please enter your email id: ")
7180
password = input("Enter your app password here: ")
72-
print("Thank You! You'll receive an email as soon as the price of product drops...!")
81+
print(
82+
"Thank You! You'll receive an email as soon as the price of product drops...!"
83+
)
7384
# print(price)
7485
if fixed_price <= float(your_price):
7586
mail_sending(mail_id, title, password)

Diff for: Anime-Tracker/anime_tracker.py

+14-9
Original file line numberDiff line numberDiff line change
@@ -1,5 +1,3 @@
1-
2-
31
try:
42
import requests
53
from bs4 import BeautifulSoup
@@ -12,18 +10,23 @@
1210

1311
# mainly bs4 lib is used for extracting html from web pages
1412

13+
1514
def details(soup):
1615

17-
info = soup.find('div', {'class': 'pure-1 md-3-5'}) # selecting div with class pure...
18-
print("\nAbout the Anime : \n", "\t\t", info.find('p').getText(), "\n") # now extracting the text for p tag of the div
16+
# selecting div with class pure...
17+
info = soup.find('div', {'class': 'pure-1 md-3-5'})
18+
# now extracting the text for p tag of the div
19+
print("\nAbout the Anime : \n", "\t\t", info.find('p').getText(), "\n")
1920

2021
total_episodes = soup.find('div', {'class': 'pure-1 md-1-5'})
2122
print("\nTotal number of episodes :\t",
22-
re.sub("[^0-9]", "", total_episodes.find('span').getText())) # usimg regex for only selecting numbers
23+
re.sub(
24+
"[^0-9]", "",
25+
total_episodes.find(
26+
'span').getText())) # usimg regex for only selecting numbers
2327

2428
Active_years = soup.find('span', {'class': 'iconYear'})
25-
print("\n Years Active (From-To)\t:\t",
26-
Active_years.getText(), "-\n")
29+
print("\n Years Active (From-To)\t:\t", Active_years.getText(), "-\n")
2730

2831
rating = soup.find('div', {'class': 'avgRating'})
2932
print("Rating : ", rating.find('span').getText())
@@ -42,7 +45,8 @@ def details(soup):
4245
def entry():
4346
print("\nType complete name>>\n")
4447
anime_name = input(
45-
"[+] Enter the name of the Anime : ").strip().title().replace(" ", "-")
48+
"[+] Enter the name of the Anime : ").strip().title().replace(
49+
" ", "-")
4650

4751
print("\n")
4852
print(anime_name)
@@ -51,7 +55,8 @@ def entry():
5155
source_code = requests.get(search_url)
5256
content = source_code.content
5357
global soup
54-
soup = BeautifulSoup(content, features="html.parser") # to parse the selectd HTML
58+
# to parse the selectd HTML
59+
soup = BeautifulSoup(content, features="html.parser")
5560
# print(soup.prettify)
5661

5762
try:

Diff for: Auto-Fill-Google-Forms/test.py

+13-9
Original file line numberDiff line numberDiff line change
@@ -1,21 +1,25 @@
11
import csv
22
import time
3-
from selenium import webdriver
3+
from selenium import webdriver
44
from selenium.webdriver.common.keys import Keys
55

66
inputName = '/html/body/div/div[2]/form/div[2]/div/div[2]/div[1]/div/div/div[2]/div/div[1]/div/div[1]/input'
77
inputEmailID = '/html/body/div/div[2]/form/div[2]/div/div[2]/div[2]/div/div/div[2]/div/div[1]/div/div[1]/input'
88
inputPhone = '/html/body/div/div[2]/form/div[2]/div/div[2]/div[3]/div/div/div[2]/div/div[1]/div/div[1]/input'
99

10-
#Submit Button Xpath
10+
# Submit Button Xpath
1111
Submit = '/html/body/div/div[2]/form/div[2]/div/div[3]/div[1]/div/div'
1212

13+
1314
def sleep():
1415
time.sleep(3)
1516

1617

17-
browswer = webdriver.Firefox(executable_path = 'C:\geckodriver-v0.28.0-win64\geckodriver.exe')
18-
browswer.get('https://docs.google.com/forms/d/e/1FAIpQLScBejWF809oacjZlvkciXREi50fyHcq75l988KDJo3ycG7xkg/viewform')
18+
browswer = webdriver.Firefox(
19+
executable_path='C:\geckodriver-v0.28.0-win64\geckodriver.exe')
20+
browswer.get(
21+
'https://docs.google.com/forms/d/e/1FAIpQLScBejWF809oacjZlvkciXREi50fyHcq75l988KDJo3ycG7xkg/viewform'
22+
)
1923
name = []
2024
email = []
2125
phone = []
@@ -25,20 +29,20 @@ def sleep():
2529
name.append(row['name'])
2630
email.append(row['email'])
2731
phone.append(row['phone_number'])
28-
29-
#print(name,email,phone)
32+
33+
# print(name,email,phone)
3034
i = 0
31-
35+
3236
while i < len(name):
3337
browswer.find_element_by_xpath(inputName).send_keys(name[i])
3438
browswer.find_element_by_xpath(inputEmailID).send_keys(email[i])
3539
browswer.find_element_by_xpath(inputPhone).send_keys(phone[i])
3640
sleep()
3741
browswer.find_element_by_xpath(Submit).click()
38-
i+=1
42+
i += 1
3943
sleep()
4044
browswer.back()
4145
sleep()
4246

43-
#print(name,email,phone)
47+
# print(name,email,phone)
4448
browswer.quit()

Diff for: Auto_Backup/Auto_Backup.py

+16-7
Original file line numberDiff line numberDiff line change
@@ -17,12 +17,22 @@ def parse_input():
1717
1818
"""
1919
parser = argparse.ArgumentParser()
20-
parser.add_argument('-t', '--target', nargs=1, required=True,
20+
parser.add_argument('-t',
21+
'--target',
22+
nargs=1,
23+
required=True,
2124
help='Target Backup folder')
22-
parser.add_argument('-s', '--source', nargs='+', required=True,
25+
parser.add_argument('-s',
26+
'--source',
27+
nargs='+',
28+
required=True,
2329
help='Source Files to be added')
24-
parser.add_argument('-c', '--compress', nargs=1, type=int,
25-
help='Gzip threshold in bytes, Deafault 1024KB', default=[1024000])
30+
parser.add_argument('-c',
31+
'--compress',
32+
nargs=1,
33+
type=int,
34+
help='Gzip threshold in bytes, Deafault 1024KB',
35+
default=[1024000])
2636
# Default Threshold is 1024KB
2737

2838
# Help is triggered when there is no Input Provided
@@ -127,8 +137,8 @@ def sync_root(root, arg):
127137
for path, _, files in os.walk(root):
128138
for source in files:
129139
source = path + '/' + source
130-
threads.append(threaded_sync_file(source,
131-
target + source, compress))
140+
threads.append(
141+
threaded_sync_file(source, target + source, compress))
132142
# sync_file(source, target + source, compress)
133143
for thread in threads:
134144
thread.join()
@@ -142,7 +152,6 @@ def sync_root(root, arg):
142152
sync_root(root, arg)
143153
print('______________________________________________________________')
144154
print('------------------------- Done Done! -------------------------')
145-
146155
"""
147156
Example Usage-
148157
> python Auto_Backup.py --target ./Backup_Folder --source ./Source_Folder

Diff for: Automatic Certificate Generator/main.py

+7-4
Original file line numberDiff line numberDiff line change
@@ -5,9 +5,12 @@
55

66
# Implementation to generate certificate
77
df = pd.read_csv('list.csv')
8-
font = ImageFont.truetype('arial.ttf',60)
9-
for index,j in df.iterrows():
8+
font = ImageFont.truetype('arial.ttf', 60)
9+
for index, j in df.iterrows():
1010
img = Image.open('certificate.png')
1111
draw = ImageDraw.Draw(img)
12-
draw.text(xy=(150,250),text='{}'.format(j['name']),fill=(0,0,0),font=font) # customization
13-
img.save('pictures/{}.png'.format(j['name']))
12+
draw.text(xy=(150, 250),
13+
text='{}'.format(j['name']),
14+
fill=(0, 0, 0),
15+
font=font) # customization
16+
img.save('pictures/{}.png'.format(j['name']))

Diff for: BITCOIN-price-tracker/tracker.py

+10-3
Original file line numberDiff line numberDiff line change
@@ -3,15 +3,22 @@
33
import time
44

55
# create a function to get price of cryptocurrency
6+
7+
68
def get_latest_crypto_price(coin):
7-
url = 'https://www.google.com/search?q='+(coin)+'price'
9+
url = 'https://www.google.com/search?q=' + (coin) + 'price'
810
# make a request to the website
911
HTML = requests.get(url)
1012
# Parsse the HTML
1113
soup = BeautifulSoup(HTML.text, 'html.parser')
1214
# find the current price
13-
texti = soup.find('div', attrs={'class':'BNeawe iBp4i AP7Wnd'}).find({'div':'BNeawe iBp4i AP7Wnd'}).text
15+
texti = soup.find('div', attrs={
16+
'class': 'BNeawe iBp4i AP7Wnd'
17+
}).find({
18+
'div': 'BNeawe iBp4i AP7Wnd'
19+
}).text
1420
return texti
1521

22+
1623
price = get_latest_crypto_price('bitcoin')
17-
print('BITCOIN price : ' + price)
24+
print('BITCOIN price : ' + price)

0 commit comments

Comments
 (0)