Skip to content

Commit

Permalink
Minor fixes in offline doc scripts
Browse files Browse the repository at this point in the history
  • Loading branch information
yorikvanhavre committed Mar 8, 2015
1 parent 0f562ed commit f3922e5
Show file tree
Hide file tree
Showing 2 changed files with 3 additions and 3 deletions.
2 changes: 1 addition & 1 deletion src/Tools/offlinedoc/buildwikiindex.py
Expand Up @@ -38,7 +38,7 @@

URL = "http://www.freecadweb.org/wiki" #default URL if no URL is passed
INDEX = "Online_Help_Toc" # the start page from where to crawl the wiki
NORETRIEVE = ['Manual','Developer_hub','Power_users_hub','Users_hub','Source_documentation', 'User_hub','Main_Page','About_this_site','Interesting_links','Syndication_feeds'] # pages that won't be fetched (kept online)
NORETRIEVE = ['Manual','Developer_hub','Power_users_hub','Users_hub','Source_documentation', 'User_hub','Main_Page','About_this_site','Interesting_links','Syndication_feeds','FreeCAD:General_disclaimer','FreeCAD:About','FreeCAD:Privacy_policy','Introduction_to_python'] # pages that won't be fetched (kept online)
GETTRANSLATIONS = False # Set true if you want to get the translations too.
MAXFAIL = 3 # max number of retries if download fails
VERBOSE = True # to display what's going on. Otherwise, runs totally silent.
Expand Down
4 changes: 2 additions & 2 deletions src/Tools/offlinedoc/downloadwiki.py
Expand Up @@ -37,7 +37,7 @@

DEFAULTURL = "http://www.freecadweb.org/wiki" #default URL if no URL is passed
INDEX = "Online_Help_Toc" # the start page from where to crawl the wiki
NORETRIEVE = ['Manual','Developer_hub','Power_users_hub','Users_hub','Source_documentation', 'User_hub','Main_Page','About_this_site'] # pages that won't be fetched (kept online)
NORETRIEVE = ['Manual','Developer_hub','Power_users_hub','Users_hub','Source_documentation', 'User_hub','Main_Page','About_this_site','FreeCAD:General_disclaimer','FreeCAD:About','FreeCAD:Privacy_policy','Introduction_to_python'] # pages that won't be fetched (kept online)
GETTRANSLATIONS = False # Set true if you want to get the translations too.
MAXFAIL = 3 # max number of retries if download fails
VERBOSE = True # to display what's going on. Otherwise, runs totally silent.
Expand Down Expand Up @@ -137,7 +137,7 @@ def crawl():

def get(page):
"downloads a single page, returns the other pages it links to"
if page[-4:] in [".png",".jpg",".svg",".gif","jpeg"]:
if page[-4:] in [".png",".jpg",".svg",".gif","jpeg",".PNG",".JPG"]:
fetchimage(page)
elif not exists(page):
html = fetchpage(page)
Expand Down

0 comments on commit f3922e5

Please sign in to comment.