From b5bab95519992d480d69cc4ccd642b4f8f601bb3 Mon Sep 17 00:00:00 2001 From: Roopak Mallik <70304320+RoopakMallik@users.noreply.github.com> Date: Sat, 27 May 2023 22:54:12 +0530 Subject: [PATCH 1/3] This is a bot for web scrapping --- Web Scrapper Bot/Readme.md | 9 +++++++ Web Scrapper Bot/web_scrapping_bot.py | 37 +++++++++++++++++++++++++++ 2 files changed, 46 insertions(+) create mode 100644 Web Scrapper Bot/Readme.md create mode 100644 Web Scrapper Bot/web_scrapping_bot.py diff --git a/Web Scrapper Bot/Readme.md b/Web Scrapper Bot/Readme.md new file mode 100644 index 0000000..b540bce --- /dev/null +++ b/Web Scrapper Bot/Readme.md @@ -0,0 +1,9 @@ +# This is a Web Scrapping Bot + +# Language, Libraries and Modules used- +- requests +- BeautifulSoup +- datetime + +# Explanation +- The requests module gets the raw HTML data from websites and the Beautiful Soup library is used to parse the information to get the exact data we require. \ No newline at end of file diff --git a/Web Scrapper Bot/web_scrapping_bot.py b/Web Scrapper Bot/web_scrapping_bot.py new file mode 100644 index 0000000..1413664 --- /dev/null +++ b/Web Scrapper Bot/web_scrapping_bot.py @@ -0,0 +1,37 @@ +# importing the libraries and modules required +import requests +from bs4 import BeautifulSoup +from datetime import datetime +import time + +while(True): + now = datetime.now() + + # time of web-scrapping + current_time = now.strftime("%H:%M:%S") + print(f'At time : {current_time} IST') + + response = requests.get('https://coinmarketcap.com/') + text = response.text + html_data = BeautifulSoup(text, 'html.parser') + headings = html_data.find_all('tr')[0] + headings_list = [] + for x in headings: + headings_list.append(x.text) + headings_list = headings_list[:10] + + data = [] + + for x in range(1, 6): + row = html_data.find_all('tr')[x] + column_value = row.find_all('td') + dict = {} + + for i in range(10): + dict[headings_list[i]] = column_value[i].text + data.append(dict) + + for values in data: + print(values) + print('') + time.sleep(600) From 9f1e4439a4631036d24a7b08d40b4c28469bdb76 Mon Sep 17 00:00:00 2001 From: Roopak Mallik <70304320+RoopakMallik@users.noreply.github.com> Date: Sun, 28 May 2023 13:14:49 +0530 Subject: [PATCH 2/3] Added a Excel Automation Bot --- Excel_Automation_Python/main.py | 56 +++++++++++++++++++++++++++++++++ 1 file changed, 56 insertions(+) create mode 100644 Excel_Automation_Python/main.py diff --git a/Excel_Automation_Python/main.py b/Excel_Automation_Python/main.py new file mode 100644 index 0000000..00487d0 --- /dev/null +++ b/Excel_Automation_Python/main.py @@ -0,0 +1,56 @@ +from openpyxl import Workbook, load_workbook +from openpyxl.utils import get_column_letter +from openpyxl.styles import Font + +data = { + "James": { + "English": 65, + "Physics": 78, + "Computer": 98, + "History": 89 + }, + "Rhea": { + "English": 55, + "Physics": 77, + "Computer": 87, + "History": 95 + }, + "Harsh": { + "English": 100, + "Physics": 45, + "Computer": 75, + "History": 92 + }, + "Suman": { + "English": 30, + "Physics": 25, + "Computer": 45, + "History": 100 + }, + "Ryan": { + "English": 90, + "Physics": 100, + "Computer": 92, + "History": 60 + } +} + +wb = Workbook() +ws = wb.active +ws.title = "Student Marks" + +headings = ['Name'] + list(data['James'].keys()) +ws.append(headings) + +for person in data: + marks = list(data[person].values()) + ws.append([person] + marks) + +for col in range(2, len(data['James']) + 2): + char = get_column_letter(col) + ws[char + "7"] = f"=SUM({char + '2'}:{char + '6'})/{len(data)}" + +for col in range(1, 6): + ws[get_column_letter(col) + '1'].font = Font(bold=True, color="0099CCFF") + +wb.save("StudentMarks.xlsx") From 01a12bb89e48895fd2f3e9557c3ee30b121c11ec Mon Sep 17 00:00:00 2001 From: Roopak Mallik <70304320+RoopakMallik@users.noreply.github.com> Date: Sun, 28 May 2023 18:30:11 +0530 Subject: [PATCH 3/3] Added Requirements file --- Web Scrapper Bot/Readme.md | 9 --------- Web Scrapper Bot/requirements.txt | Bin 0 -> 6662 bytes 2 files changed, 9 deletions(-) delete mode 100644 Web Scrapper Bot/Readme.md create mode 100644 Web Scrapper Bot/requirements.txt diff --git a/Web Scrapper Bot/Readme.md b/Web Scrapper Bot/Readme.md deleted file mode 100644 index b540bce..0000000 --- a/Web Scrapper Bot/Readme.md +++ /dev/null @@ -1,9 +0,0 @@ -# This is a Web Scrapping Bot - -# Language, Libraries and Modules used- -- requests -- BeautifulSoup -- datetime - -# Explanation -- The requests module gets the raw HTML data from websites and the Beautiful Soup library is used to parse the information to get the exact data we require. \ No newline at end of file diff --git a/Web Scrapper Bot/requirements.txt b/Web Scrapper Bot/requirements.txt new file mode 100644 index 0000000000000000000000000000000000000000..218ead7144dae3cf0ec3f2f72556c5c4be50a992 GIT binary patch literal 6662 zcmai&TW?!c5QX z!kXg;(Q?R2h{3O0zGXlAAYSiTZ)n_wr;X8U9s$!`_hH zQD=b+!W;Z?j*?GEolB0Mms#%fzs~0o8@1^=16!~$r&5l5A3AQPgqfQXRR4W&Dm_X$ z$3B#+sEEEI!{@zXa$iM5RZ(g<20oy(j>wTHey0?D19WT&bWZg#mzKhhoLVOJxS|rl zRYZJj7t%m~JxHF~g5}_CzhH?wQsx2{C&%#$o*Sv32}^4Rt+{>HA5Rr7;4N}N(QBk zy&{<_R?kx7!Ea-AE_8A?80L=5-wAIz&9cK9-AGwC6MzX02K?1CZ6kdknA?JTpzA!* zGDD^vkGOFkvvew()^BxGJV6r-be2-)agBSFgmJD?xUHhziKz6RjlFYDucv3ilr>pI2 z(q~Z}Yy#wrZJ_!*o+a9S6zPdG^#*WVM>dD?SC1T8&U;T8>zd47&s>75ecm~1FwZ>& zB+XKI!3Jaox#}#pSMEo_(%kagHxln(rD3nST?pq}MW0+XhtQD{ME_pW^zG!Kbs@!k z;a$NT$xUGtc;YJ^fjj1R<^;TR!!Di3xLaJw_e*KpNcTcN&MW79Jn2h@O3WBR02x=? zR+%zZ%mk#;m*QQ@^qGkU*xA=YWx#fJQMC9RIZ!~$Di~`#kRU5axvoKim`94~Anx#P z820r<2TA6^F!GwqY_D{6UVgk{utnV1OOJ;VXR>%MYqgCQOX;##!DjT3*GW{Ry?$<* z!PmOK(_Lzsdbm^8&@$|KND%i@9cCqc*6K zd-+9X*Wv;a6<~Nv_Prpms z<5_^Narjp<@7f2w=@Kn`Pd?HQuH~;Y23z4zVskG)U@QHj=BIhk=W2h0qE3*`9V)FN z?=ArbwQ-@>l}DYL?|8_k5GncGK}N!ujoBMhT5)d7&b}2YiJF1oX$2gsyV)zhi`miq zU5U|5eaz6UvS8+&NXm1AyFQ~9if4G^sX?axYCmYU{StZbyhyzd%J5B>A7>|HzmdMD z+Yz2u7XOe8`V)idZ7};rneYUt$v)|In#VHFza(IW*@w@2RqtH?=I2zk!6 zzY85iJdXM1TA}{=E{GmqucE>lhOx1r!TV`HANbE^O(i)t{?C(#Z3*1!B?dcOE3P|q z?_=)L#}-|T_=lHYlPujM*&gnz;E5&s^SmYY%}d4Q3LOQ@t5;MH=-=km&2Gmww$tym zxN^{c^W{$ucSotcfLh9~gf4btc#o#quFV1%kTTRG8%xc}ac}pb*7;5t`-iS-jaWtP zZvdbFKFnP1Tj*y77BTDS_4x`M_LQSgSn7xVkZc{xkFrkJV9MHmZ*9e?{x>*h(sLJa zv&(QJzeA=5sv9uRHbNVO*sr_;cz$H;W;BwQVST3B!9Smy6J`~ajlTTW3kfonr)(5_ zW6k8GzCL%{bgiPv9r9K=@CzhgpVpGQ3OJ<~VA$t_&x#GTS%D>NumtvzV$WfwSPnaA zG|Mg7p9W9&9AX5|zV7}F>t*M#&JXgP7Y*O%QYl&Y