Skip to content

Commit

Permalink
Fixing ImageRaider(#28) and BrokenPipe Error
Browse files Browse the repository at this point in the history
  • Loading branch information
ThoughtfulDev committed Jul 15, 2018
1 parent 456c77d commit 739191b
Show file tree
Hide file tree
Showing 3 changed files with 56 additions and 14 deletions.
4 changes: 2 additions & 2 deletions config.json
@@ -1,6 +1,6 @@
{
"DEFAULTS": {
"SLEEP_DELAY": "5",
"SLEEP_DELAY": "7",
"GOOGLE_IMG_PAGES": "3"
},
"WEBDRIVER": {
Expand All @@ -14,4 +14,4 @@
"plus.google.com"
],
"INSTA_VALIDATION_MAX_IMAGES": "5"
}
}
62 changes: 52 additions & 10 deletions grabber/google.py
Expand Up @@ -2,6 +2,9 @@
import os
from pathlib import Path
from selenium.common.exceptions import NoSuchElementException
from selenium.webdriver.support.ui import WebDriverWait
from selenium.webdriver.common.by import By
from selenium.webdriver.support import expected_conditions as EC
from selenium.webdriver.common.keys import Keys
import utils.config as cfg
import utils.console as console
Expand Down Expand Up @@ -38,16 +41,32 @@ def collectLinks(self, img_url):
input.send_keys(img_url)
input.send_keys(Keys.RETURN)
console.subtask('Searching for Image...')
time.sleep(cfg.timeout())
time.sleep(cfg.timeout() * 2)
pred_error = False
try:
pred = driver.find_element_by_xpath("/html/body/div[5]/div[3]/div[3]/div[1]/div[2]/div/div[2]/div[1]/div/div[2]/a")
pred = pred.text
except NoSuchElementException:
console.subfailure('No Prediction given sry...')
pred = None
pred_error = True
except BrokenPipeError:
#just try again...
try:
pred = driver.find_element_by_xpath("/html/body/div[5]/div[3]/div[3]/div[1]/div[2]/div/div[2]/div[1]/div/div[2]/a")
except NoSuchElementException:
console.subfailure('Broken pipe Error. This is not a Problem...moving on!')
console.subfailure('No Prediction given sry...')
pred = None
pred_error = True

if not pred_error:
pred = pred.text
self.predictions.append(pred)

link_name=driver.find_elements_by_xpath(".//h3[@class='r']/a")
try:
link_name=driver.find_elements_by_xpath(".//h3[@class='r']/a")
except BrokenPipeError:
link_name=driver.find_elements_by_xpath(".//h3[@class='r']/a")
console.subtask("Collecting Links...(Page 1)")
for link in link_name:
href = link.get_attribute('href')
Expand All @@ -61,8 +80,11 @@ def collectLinks(self, img_url):
page_n = driver.find_element_by_link_text(str(num))
page_n.click()
time.sleep(cfg.timeout())
console.subtask("Collecting Links...(Page {0})".format(num))
link_name=driver.find_elements_by_xpath(".//h3[@class='r']/a")
console.subtask("Collecting Links...(Page {0})".format(num))
try:
link_name=driver.find_elements_by_xpath(".//h3[@class='r']/a")
except BrokenPipeError:
link_name=driver.find_elements_by_xpath(".//h3[@class='r']/a")
for link in link_name:
href = link.get_attribute('href')
if filterLink(href):
Expand All @@ -88,21 +110,38 @@ def collectLinksLocal(self):
elems.click()
time.sleep(1)
elems = driver.find_element_by_xpath('/html/body/div[1]/div[3]/div[3]/div/div[2]/form/div[1]/div/a')

elems.click()
time.sleep(1)
console.subtask("Inserting Path")
input_box = driver.find_element_by_xpath('//*[@id="qbfile"]')
p_i = os.path.join(os.getcwd(), str_p)
input_box.send_keys(p_i)
time.sleep(cfg.timeout() * 2)
pred_error = False
try:
pred = driver.find_element_by_xpath("/html/body/div[6]/div[3]/div[3]/div[1]/div[2]/div/div[2]/div[1]/div/div[2]/a")
pred = pred.text
pred = driver.find_element_by_xpath("/html/body/div[5]/div[3]/div[3]/div[1]/div[2]/div/div[2]/div[1]/div/div[2]/a")
except NoSuchElementException:
console.subfailure('No Prediction given sry...')
pred = None
pred_error = True
except BrokenPipeError:
#just try again...
try:
pred = driver.find_element_by_xpath("/html/body/div[5]/div[3]/div[3]/div[1]/div[2]/div/div[2]/div[1]/div/div[2]/a")
except NoSuchElementException:
console.subfailure('Broken pipe Error. This is not a Problem...moving on!')
console.subfailure('No Prediction given sry...')
pred = None
pred_error = True

if not pred_error:
pred = pred.text
self.predictions.append(pred)
link_name=driver.find_elements_by_xpath(".//h3[@class='r']/a")
try:
link_name=driver.find_elements_by_xpath(".//h3[@class='r']/a")
except BrokenPipeError:
link_name=driver.find_elements_by_xpath(".//h3[@class='r']/a")
console.subtask("Collecting Links...(Page 1)")
for link in link_name:
href = link.get_attribute('href')
Expand All @@ -116,8 +155,11 @@ def collectLinksLocal(self):
page_n = driver.find_element_by_link_text(str(num))
page_n.click()
time.sleep(cfg.timeout())
console.subtask("Collecting Links...(Page {0})".format(num))
link_name=driver.find_elements_by_xpath(".//h3[@class='r']/a")
console.subtask("Collecting Links...(Page {0})".format(num))
try:
link_name=driver.find_elements_by_xpath(".//h3[@class='r']/a")
except BrokenPipeError:
link_name=driver.find_elements_by_xpath(".//h3[@class='r']/a")
for link in link_name:
href = link.get_attribute('href')
if filterLink(href):
Expand Down
4 changes: 2 additions & 2 deletions grabber/imageraider.py
Expand Up @@ -30,15 +30,15 @@ def insertImageLinks(self, images):
input.send_keys(i)
input.send_keys(Keys.RETURN)
console.subtask('Submitting...')
btn = self.driver.find_elements_by_xpath('/html/body/div[3]/div/div/article/div/div[1]/form/span/input')[0]
btn = self.driver.find_elements_by_xpath('/html/body/div[4]/div/div/article/div/div[1]/form/span/input')[0]
btn.click()

def uploadLocalImage(self, img):
self.driver.get("https://www.imageraider.com/")
input = self.driver.find_elements_by_xpath('//*[@id="file"]')[0]
p_i = os.path.join(os.getcwd(), img)
input.send_keys(p_i)
btn = self.driver.find_elements_by_xpath('/html/body/div[3]/div/div/article/div/div[1]/span/form/input[3]')[0]
btn = self.driver.find_elements_by_xpath('/html/body/div[4]/div/div/article/div/div[1]/span/form/input[3]')[0]
btn.click()

def downloadCSV(self):
Expand Down

0 comments on commit 739191b

Please sign in to comment.