Skip to content

Commit

Permalink
Removing wrong configs from the file
Browse files Browse the repository at this point in the history
  • Loading branch information
Elias Medawar committed Mar 24, 2023
1 parent 45d48f8 commit d5818c1
Show file tree
Hide file tree
Showing 2 changed files with 11 additions and 10 deletions.
16 changes: 8 additions & 8 deletions admin_ui/backend.yml
Expand Up @@ -8,16 +8,16 @@ services:
- ${WEBAPP_STORAGE_HOME}/uploads:/directus/uploads
- ${WEBAPP_STORAGE_HOME}/database:/directus/database
environment:
KEY: '255d861b-5ea1-5996-9aa3-922530ec40b1'
SECRET: '6116487b-cda1-52c2-b5b5-c8022c45e263'
KEY: '<Random UID>'
SECRET: '<Random UID>'

DB_CLIENT: 'mysql'
DB_HOST: 'directus.mysql.database.azure.com'
DB_PORT: '3306'
DB_DATABASE: 'directus'
DB_HOST: '<your host >'
DB_PORT: '<your port>'
DB_DATABASE: '<your db>'
DB_USER: 'directus'
DB_PASSWORD: 'assatusd1r3ctu5#'
DB_PASSWORD: '<Your password>'


ADMIN_EMAIL: 'admin@example.com'
ADMIN_PASSWORD: 'd1r3ctu5'
ADMIN_EMAIL: '<the mail adress to login in the gui>'
ADMIN_PASSWORD: '<the defaul password to be changed after 1 deployment>'
5 changes: 3 additions & 2 deletions scraping/web_crawler.py
Expand Up @@ -8,7 +8,7 @@
import csv
import time

def get_text_from_url(url):
def get_source_code_from_url(url):
response = requests.get(url)
if response.status_code == 200:
soup = BeautifulSoup(response.content, 'html.parser')
Expand All @@ -20,11 +20,12 @@ def get_text_from_url(url):

def get_question_answers_from_url(url,qa):
print(f"Visiting: {url}")
soup = get_text_from_url(url)
soup = get_source_code_from_url(url)
if soup:
parse_question_answer_from_html(soup,qa, url)


# This method can be adapted by yourself to have your own logic
def parse_question_answer_from_html (web_response,qa,url):
domain_eak ="eak.admin.ch";
domain_ahv_iv = "ahv-iv.ch";
Expand Down

0 comments on commit d5818c1

Please sign in to comment.