This will install all the dependencies required for setting up the outreach portal 2.0.
Here we use the setuptools
module from the standard lib, to make a
setup.py
file, which will install all the python library dependencies.
from setuptools import setup
requires = [
'flask',
'Flask-SQLAlchemy',
'oursql',
'flask-cors',
'flask-testing',
'requests',
'Flask-OAuthlib'
]
setup(
name='Outreach',
version='2.0',
install_requires=requires
)
from src.app import create_app
from src.db import *
import src.config as config
def create_roles():
role1 = Role(name=Name("admin"))
role1.save()
role2 = Role(name=Name("OC"))
role2.save()
role3 = Role(name=Name("NC"))
role3.save()
role4 = Role(name=Name("dummyrole"))
role4.save()
user1 = User(name=Name("admin"), email=Email("outreach-admin@vlabs.ac.in"), role = role1)
user1.save()
user2 = User(name=Name("dummyuseroc"), email=Email("dummyuseroc@vlabs.ac.in"), role = role2 )
user2.save()
user3 = User(name=Name("dummyusernc"), email=Email("dummyusernc@vlabs.ac.in"), role = role3 )
user3.save()
centre = NodalCentre(name="dummycentre", location="dummylocation",
created_by=user1)
centre.save()
def create_status():
status1 = Status(name=Name("Upcoming"))
status1.save()
status2 = Status(name=Name("Pending for Approval"))
status2.save()
status3 = Status(name=Name("Approved"))
status3.save()
status4 = Status(name=Name("Rejected"))
status4.save()
status5 = Status(name=Name("Pending for Upload"))
status5.save()
status6 = Status(name=Name("Cancel"))
status6.save()
if __name__ == "__main__":
db.create_all(app=create_app(config))
create_roles()
create_status()
#!/usr/bin/python
import MySQLdb
import sys
from src.app import create_app
from src.db import *
import src.config as config
import requests
def populate_users():
print "Populating users table.."
cursor1.execute("select * from va_users")
data = cursor1.fetchall()
for row in data:
if row[1] == "Prof. (Dr.) K. Kamal":
name = Name("Prof.Dr.K. Kamal")
else:
name = Name(row[1].strip())
email = Email(row[2].strip())
if row[5] == 1:
role = Role.get_by_id(2)
elif row[5] == 2:
role = Role.get_by_id(3)
elif row[5] == None:
role = Role.get_by_id(4)
else:
continue
created = str(row[14])
if row[15] == None:
last_active = "Null"
else:
last_active = str(row[15])
new_user = User(name=name, email=email, role=role, created=created,
last_active=last_active)
# print new_user.name, new_user.email
new_user.save()
def populate_workshops():
print "Populating workshops table.."
cursor1.execute("select * from workshop")
data = cursor1.fetchall()
for row in data:
name = row[1].strip()
location = row[2].strip()
participants_attended = int(row[5])
no_of_sessions = 0
date = ''
labs_planned = len(row[9].split(","))
if row[12] == 3 or row[12] == 4 or row[12] == 5:
no_of_participants_expected = 1000
elif row[12] == 13:
no_of_participants_expected = 100
else:
no_of_participants_expected = 0
if row[12] == 3:
user = User.query.filter_by(name="Rakesh T").first()
status = Status.get_by_id(3)
elif row[12] == 5:
user = User.query.filter_by(name="P Saikumar").first()
status = Status.get_by_id(3)
elif row[12] == 4:
user = User.query.filter_by(name="M Srinath Reddy").first()
status = Status.get_by_id(3)
elif row[12] == 13:
user = User.get_by_id(4)
status = Status.get_by_id(3)
else:
continue
if row[0] == 3 or row[0] == 4 or row[0] == 5:
experiments_conducted = 10000
elif row[0] == 35 or row[0] == 41 or row[0] == 46:
experiments_conducted = 22
elif row[0] == 32 or row[0] == 33:
experiments_conducted = 2000
elif row[0] == 11 or row[0] == 28:
experiments_conducted = 3000
elif row[0] == 20 or row[0] == 27:
experiments_conducted = 6000
elif row[0] == 12 or row[0] == 15:
experiments_conducted = 10
elif row[0] == 13:
experiments_conducted = 30
elif row[0] == 10:
experiments_conducted = 5000
elif row[0] == 19:
experiments_conducted = 2973
elif row[0] == 21:
experiments_conducted = 1080
elif row[0] == 22:
experiments_conducted = 3750
elif row[0] == 23:
experiments_conducted = 800
elif row[0] == 24:
experiments_conducted = 2880
elif row[0] == 25:
experiments_conducted = 2700
elif row[0] == 26:
experiments_conducted = 584
elif row[0] == 34:
experiments_conducted = 12
elif row[0] == 36:
experiments_conducted = 37
elif row[0] == 39:
experiments_conducted = 23
elif row[0] == 42:
experiments_conducted = 66
else:
experiments_conducted = 0
participating_institutes = row[3].strip()
if row[4] == '' or row[5] == '' or row[6] == '' or row[9] == '':
pass
else:
no_of_sessions = int(row[6])
date = str(row[4])
new_workshop = Workshop(name=name, location=location, user=user,
participating_institutes=participating_institutes,
participants_attended = participants_attended,
no_of_sessions=no_of_sessions,
no_of_participants_expected = no_of_participants_expected,
experiments_conducted=experiments_conducted,
labs_planned=labs_planned,
status=status, date=date)
new_workshop.save()
def populate_nodal_centres():
print "Populating nodal centres table.."
cursor1.execute("select * from va_users")
data = cursor1.fetchall()
for row in data:
name = ''
location = ''
# user = User.query.filter_by(name=row[1]).first()
if row[8] == 1:
user = User.query.filter_by(name="sripathi").first()
elif row[8] == 2:
user = User.query.filter_by(name="Geeta Bose").first()
elif row[8] == 6:
user = User.query.filter_by(name="karunakar").first()
elif row[8] == 8:
user = User.query.filter_by(name="Kantesh Balani").first()
elif row[8] == 9:
user = User.query.filter_by(name="Pushpdeep Mishra").first()
elif row[8] == 16:
user = User.query.filter_by(name="Yogesh").first()
elif row[8] == 20:
user = User.query.filter_by(name="Sanjeet Kumar").first()
elif row[8] == 22:
user = User.query.filter_by(name="Dr Vinod Kumar").first()
else:
continue
if row[6] == 'TASK':
name = 'TASK'
location = 'Null'
elif row[6] == None:
name = 'Null'
location = 'Null'
elif ',' in row[6]:
name_location = row[6].split(",")
name = name_location[0].strip()
location = name_location[1].strip()
else:
name = row[6]
location = 'Null'
#new_centre.save()
if user == None:
user = User.query.filter_by(name="dummyuseroc").first()
new_centre = NodalCentre(name=name, location=location, created_by=user)
new_centre.save()
# print new_centre.name, new_centre.location, new_centre.created_by.name
def populate_nodal_coordinator_details():
print "Populating nodal coordinator details table.."
cursor1.execute("select * from va_users")
data = cursor1.fetchall()
for row in data:
if row[9] == None:
target_workshops = 0
else:
target_workshops = int(row[9])
if row[10] == None or row[10] == '':
target_participants = 0
else:
target_participants = int(row[10])
if row[11] == None:
target_experiments = 0
else:
target_experiments = int(row[11])
nodal_centre = NodalCentre.get_by_id(1)
# user1 = User.query.filter_by(name=row[1]).first()
# user2 = User.get_by_id(2)
if row[1] == 'test':
continue
else:
user1 = User.query.filter_by(name=row[1]).first()
# if row[8] == None:
# continue
if row[8] == 1:
user2 = User.query.filter_by(name="sripathi").first()
elif row[8] == 2:
user2 = User.query.filter_by(name="Geeta Bose").first()
elif row[8] == 6:
user2 = User.query.filter_by(name="karunakar").first()
elif row[8] == 8:
user2 = User.query.filter_by(name="Kantesh Balani").first()
elif row[8] == 9:
user2 = User.query.filter_by(name="Pushpdeep Mishra").first()
elif row[8] == 16:
user2 = User.query.filter_by(name="Yogesh").first()
elif row[8] == 20:
user2 = User.query.filter_by(name="Sanjeet Kumar").first()
elif row[8] == 22:
user2 = User.query.filter_by(name="Dr Vinod Kumar").first()
else:
continue
if row[6] == "Modern Institute of Technology and Research Centre, Alwar":
nodal_centre = NodalCentre.query.filter_by(name="Modern Institute of Technology and Research Centre").first()
if row[6] == "Noida Institute of Engineering and Technology, Greater Noida":
nodal_centre = NodalCentre.query.filter_by(name="Noida Institute of Engineering and Technology").first()
if row[6] == "Asia Pacific Institute of Information Technology, Panipat":
nodal_centre = NodalCentre.query.filter_by(name="Asia Pacific Institute of Information Technology").first()
if row[6] == "Meerut Institute of Technology, Meerut":
nodal_centre = NodalCentre.query.filter_by(name="Meerut Institute of Technology").first()
if row[6] == "Desh Bhagat University, Punjab":
nodal_centre = NodalCentre.query.filter_by(name="Desh Bhagat University").first()
if row[6] == "Lingaya's University, Faridabad":
nodal_centre = NodalCentre.query.filter_by(name="Lingaya's University").first()
if row[6] == "Krishna Institute of Engineering and Technology, Ghaziabad":
nodal_centre = NodalCentre.query.filter_by(name="Krishna Institute of Engineering and Technology").first()
if row[6] == "Dronacharya College of Engineering, Gurgaon":
nodal_centre = NodalCentre.query.filter_by(name="Dronacharya College of Engineering").first()
if row[6] == "Dronacharya Group of Institution, Greater Noida":
nodal_centre = NodalCentre.query.filter_by(name="Dronacharya Group of Institution").first()
if row[6] == "IIMT College of Engineering, Greater Noida":
nodal_centre = NodalCentre.query.filter_by(name="IIMT College of Engineering").first()
if row[6] == "Chandigarh University, Gauruan, Punjab.":
nodal_centre = NodalCentre.query.filter_by(name="Chandigarh University, Gauruan").first()
if row[6] == "Swami Vivekanand College of Engineering, Indore":
nodal_centre = NodalCentre.query.filter_by(name="Swami Vivekanand College of Engineering").first()
if row[6] == "Global Group of Institutions, Lucknow":
nodal_centre = NodalCentre.query.filter_by(name="Global Group of Institutions").first()
if row[6] == "Dr. Ambedkar Institute of Technology for Handicapped, Kanpur":
nodal_centre = NodalCentre.query.filter_by(name="Dr. Ambedkar Institute of Technology for Handicapped").first()
if row[6] == "Hindustan Institute of Technology and Management, Agra":
nodal_centre = NodalCentre.query.filter_by(name="Hindustan Institute of Technology and Management").first()
if row[6] == "Pranveer Singh Institute of Technology, Kanpur":
nodal_centre = NodalCentre.query.filter_by(name="Pranveer Singh Institute of Technology").first()
if row[6] == "Saraswati Gyan Mandir Inter College, Indira Nagar, Kanpur":
nodal_centre = NodalCentre.query.filter_by(name="Saraswati Gyan Mandir Inter College, Indira Nagar").first()
if row[6] == "Kendriya Vidyalaya, IIT Kanpur":
nodal_centre = NodalCentre.query.filter_by(name="Kendriya Vidyalaya").first()
if row[6] == "Babu Banarasi Das University, Lucknow":
nodal_centre = NodalCentre.query.filter_by(name="Babu Banarasi Das University").first()
if row[6] == "Krishna Engineering College, Ghaziabad":
nodal_centre = NodalCentre.query.filter_by(name="Krishna Engineering College").first()
if row[6] == "Bharat Institute of Technology, Meerut":
nodal_centre = NodalCentre.query.filter_by(name="Bharat Institute of Technology").first()
if row[6] == "JSSATE, Noida":
nodal_centre = NodalCentre.query.filter_by(name="JSSATE").first()
if row[6] == "Seth Anandram Jaipuria, Kanpur":
nodal_centre = NodalCentre.query.filter_by(name="Seth Anandram Jaipuria").first()
if row[6] == "KV Cant, Kanpur":
nodal_centre = NodalCentre.query.filter_by(name="KV Cant").first()
if nodal_centre == None:
nodal_centre = NodalCentre.query.filter_by(name="dummycentre").first()
if user2 == None:
user2 = User.query.filter_by(name="dummyuseroc").first()
if user1 == None:
user1 = User.query.filter_by(name="dummyusernc").first()
nodal_coordinator_detail = NodalCoordinatorDetail(user=user1,
target_workshops=target_workshops,
created_by=user2,
nodal_centre=nodal_centre,
target_participants=target_participants,
target_experiments=target_experiments)
print nodal_coordinator_detail.user.name, nodal_coordinator_detail.target_workshops,
print nodal_coordinator_detail.created_by.name, nodal_coordinator_detail.nodal_centre.name,
print nodal_coordinator_detail.target_participants, nodal_coordinator_detail.target_experiments
nodal_coordinator_detail.save()
def populate_workshop_reports():
print "Populating workshop reports table.."
cursor1.execute("select * from workshop_report")
data = cursor1.fetchall()
for row in data:
if row[1] == 11:
workshop = Workshop.get_by_id(6)
elif row[1] == 12:
workshop = Workshop.get_by_id(7)
else:
workshop = Workshop.get_by_id(row[1])
if row[5] != None:
name = "Attendance Sheet"
path = "/static/uploads/" + row[5]
new_workshop_report = WorkshopReport(name=name, workshop=workshop,
path=path)
new_workshop_report.save()
if row[8] != None:
name = "Workshop Photos"
path = "/static/uploads/" + row[8]
new_workshop_report = WorkshopReport(name=name, workshop=workshop,
path=path)
new_workshop_report.save()
if row[7] != None:
name = "College Report"
path = "/static/uploads/" + row[7]
new_workshop_report = WorkshopReport(name=name, workshop=workshop,
path=path)
new_workshop_report.save()
def populate_reference_documents():
print "Populating reference documents table.."
user = User.query.filter_by(name="admin").first()
name1 = Name("Prerequisites of Workshop")
path1 = "/static/uploads/1440048323-17.pdf"
new_reference_document1 = ReferenceDocument(name=name1, user=user,
path=path1)
new_reference_document1.save()
name2 = Name("Eligibility System Configuration Infrastructure")
path2 = "/static/uploads/1440048369-12.doc"
new_reference_document2 = ReferenceDocument(name=name2, user=user,
path=path2)
new_reference_document2.save()
name3 = Name("Virtual Labs Handout")
path3 = "/static/uploads/1440044856-79.pdf"
new_reference_document3 = ReferenceDocument(name=name3, user=user,
path=path3)
new_reference_document3.save()
name4 = Name("Feedback Form")
path4 = "/static/uploads/1440045014-23.pdf"
new_reference_document4 = ReferenceDocument(name=name4, user=user,
path=path4)
new_reference_document4.save()
name5 = Name("Attendance Sheet")
path5 = "/static/uploads/1440045279-57.pdf"
new_reference_document5 = ReferenceDocument(name=name5, user=user,
path=path5)
new_reference_document5.save()
name6 = Name("College Report Format")
path6 = "/static/uploads/1437747400-13.docx"
new_reference_document6 = ReferenceDocument(name=name6, user=user,
path=path6)
new_reference_document6.save()
user7 = User.get_by_id(4)
name7 = Name("Attendance Sheet")
path7 = "/static/uploads/Screenshot-1.png"
new_reference_document7 = ReferenceDocument(name=name7, user=user7,
path=path7)
new_reference_document7.save()
user8 = User.get_by_id(4)
name8 = Name("College Report")
path8 = "/static/uploads/APSEC2015-Visa-Invitation letter to Dongjin Yu.pdf"
new_reference_document8 = ReferenceDocument(name=name8, user=user8,
path=path8)
new_reference_document8.save()
user9 = User.get_by_id(6)
name9 = Name("College Report")
path9 = "/static/uploads/IMG_20151003_213714039_HDR.jpg"
new_reference_document9 = ReferenceDocument(name=name9, user=user9,
path=path9)
new_reference_document9.save()
user10 = User.get_by_id(6)
name10 = Name("College Report")
path10 = "/static/uploads/IMG_20151003_213448124_HDR.jpg"
new_reference_document10 = ReferenceDocument(name=name10, user=user10,
path=path10)
new_reference_document10.save()
user11 = User.get_by_id(6)
name11 = Name("College Report")
path11 = "/static/uploads/Bits Clg report.pdf"
new_reference_document11 = ReferenceDocument(name=name11, user=user11,
path=path11)
new_reference_document11.save()
user12 = User.get_by_id(15)
name12 = Name("College Report")
path12 = "/static/uploads/APSEC2015-Visa-Invitation letter to Dongjin Yu.pdf"
new_reference_document12 = ReferenceDocument(name=name12, user=user12,
path=path12)
new_reference_document12.save()
if __name__ == "__main__":
connection1 = MySQLdb.connect(host="localhost", user="root",
passwd="root", db="old_outreach")
cursor1 = connection1.cursor()
connection2 = MySQLdb.connect(host="localhost", user="root",
passwd="root", db="outreach")
cursor2 = connection2.cursor()
db.create_all(app=create_app(config))
populate_users()
populate_workshops()
populate_nodal_centres()
populate_nodal_coordinator_details()
populate_workshop_reports()
populate_reference_documents()
cursor1.close()
connection1.close()
cursor2.close()
connection2.close()
import requests
import json
import yaml
src_url = "http://outreach-map.base4.vlabs.ac.in/nodal_centres"
src_url_by_id = src_url+"/"
dest_url = "http://outreach-map-test.base4.vlabs.ac.in/nodal_centres/"
ncs = requests.get(src_url).json()
for nc in ncs:
a_url = src_url_by_id + str(nc['id'])
nc = requests.get(a_url).json()
nc_x = yaml.safe_load(json.dumps(nc))
name = nc_x['name']
location = nc_x['location']
pincode = nc_x['pincode']
lattitude = nc_x['lattitude']
longitude = nc_x['longitude']
payload = {'name': name,
'location': location,
'pincode' : pincode,
'lattitude': lattitude,
'longitude': longitude
}
headers = {'email':'dummyuser@gmail.com', 'key':'vlead123'}
b_url = dest_url+str(nc['id'])
response = requests.put(b_url, data=json.dumps(payload),
headers=headers)
print nc['id']
print response.status_code
#!/usr/bin/python
import MySQLdb
from src.app import create_app
from src.db import *
import src.config as config
def populate_users():
print "Populating users table.."
cursor1.execute("alter table users add phone varchar(10)")
if __name__ == "__main__":
connection1 = MySQLdb.connect(host="localhost", user="root",
passwd="root", db="outreach")
cursor1 = connection1.cursor()
db.create_all(app=create_app(config))
populate_users()
cursor1.close()
connection1.close()
#!/usr/bin/python
import MySQLdb
from src.app import create_app
from src.db import *
import src.config as config
def populate_analytics():
print "Populating analytics table.."
cursor1.execute("
CREATE TABLE analytics (
id INT PRIMARY KEY AUTO_INCREMENT,
name VARCHAR(128) NOT NULL,
total_value INT NOT NULL
);")
cursor1.execute("INSERT INTO analytics SELECT 1, "WORKSHOPS RUN", COUNT(*) FROM workshops where status_id=3;")
cursor1.execute("INSERT INTO analytics SELECT 2, "UPCOMING WORKSHOPS", COUNT(*) FROM workshops where status_id=1;")
cursor1.execute("INSERT INTO analytics SELECT 3, "NODAL CENTRES", COUNT(*) FROM nodal_centres;")
cursor1.execute("INSERT INTO analytics SELECT 4, "PARTICIPANTS ATTENDED", SUM(participants_attended) FROM workshops;")
cursor1.execute("INSERT INTO analytics SELECT 5, "USAGE", SUM(experiments_conducted) FROM workshops;")
cursor1.execute("
DROP PROCEDURE refresh_analytics_now;
DELIMITER $$
CREATE PROCEDURE refresh_analytics_now (
OUT rc INT
)
BEGIN
TRUNCATE TABLE analytics;
INSERT INTO analytics SELECT 1, "WORKSHOPS RUN", COUNT(*) FROM workshops where status_id=3;
INSERT INTO analytics SELECT 2, "UPCOMING WORKSHOPS", COUNT(*) FROM workshops where status_id=1;
INSERT INTO analytics SELECT 3, "NODAL CENTRES", COUNT(*) FROM nodal_centres;
INSERT INTO analytics SELECT 4, "PARTICIPANTS ATTENDED", SUM(participants_attended) FROM workshops;
INSERT INTO analytics SELECT 5, "USAGE", SUM(experiments_conducted) FROM workshops;
SET rc = 0;
END;
$$
DELIMITER ;
")
cursor1.execute("CALL refresh_analytics_now(@rc);")
if __name__ == "__main__":
connection1 = MySQLdb.connect(host="localhost", user="root",
passwd="root", db="outreach")
cursor1 = connection1.cursor()
db.create_all(app=create_app(config))
populate_analytics()
cursor1.close()
connection1.close()
Following shell script will execute the
refresh_analytics_now
procedure which defined in the
above section to update the analytics.
#!/bin/bash
mysql --user="root" --password="root" --database="outreach" --execute="CALL refresh_institute_analytics_now(@rc);CALL refresh_analytics_now(@rc);CALL refresh_nc_analytics_now(@rc);"
The following commands will set the cronjob for updating analytics for every 4 hours.
cd /var/www/ chmod 755 update_analytics.sh crontab -l crontab -e * */4 * * * /var/www/update_analytics.sh
Install all dependencies, including the OS related packages, Python packages, setup the database, configure the webserver, and finally deploy the application.
#!/bin/bash
# Shell script to install deb package dependencies as well as python package
# dependencies for dataservice.
# if any proxy server
#PROXY=""
# file to store the generated password
DB_PASS_FILE="db_pass.txt"
if [[ `id -u` -ne 0 ]]; then
echo "You have to execute this script as super user!"
exit 1;
fi
# Update the packages
echo "Updating package cache.."
apt-get -y update
if [[ $? -ne 0 ]]; then
echo "Updating package cache failed!"
exit 1;
fi
echo "Installing MySQL database.."
if [ ! -f $DB_PASS_FILE ]; then
# generate a random password for the database and store it in the $DB_PASS_FILE
# file
# DBPASS=$(date +%s | sha256sum | head -c 32)
DBPASS="root"
echo $DBPASS > $DB_PASS_FILE
fi
# Install MySQL Server in a Non-Interactive mode.
echo "mysql-server mysql-server/root_password password $DBPASS" | sudo debconf-set-selections
echo "mysql-server mysql-server/root_password_again password $DBPASS" | sudo debconf-set-selections
apt-get install -y mysql-server
if [[ $? -ne 0 ]]; then
echo "FATAL: MySQL installation failed!"
exit 1;
fi
# Install pre-requsite dependencies: python-dev, mysqld-dev, setuptools,
# apache, mod_wsgi etc.
echo "Installing PIP"
apt-get remove -y python-pip
apt-get purge python-pip
mkdir -p build/pip
cd build/pip
wget https://pypi.python.org/packages/11/b6/abcb525026a4be042b486df43905d6893fb04f05aac21c32c638e939e447/pip-9.0.1.tar.gz
tar xvf pip-9.0.1.tar.gz
cd pip-9.0.1
python setup.py install
if [ $? -ne 0 ]; then
echo "FATAL: PIP installation failed!"
exit 1;
fi
echo "Installing pre-requisite dependencies.."
apt-get install -y python-dev libmysqld-dev python-setuptools apache2 libapache2-mod-wsgi python-mysqldb
if [[ $? -ne 0 ]]; then
echo "FATAL: Installing pre-requisite dependencies failed!"
exit 1;
fi
echo "Enabling the mod WSGI on apache"
a2enmod wsgi
if [[ $? -ne 0 ]]; then
echo "FATAL: Unable to enable mod wsgi!"
exit 1;
fi
# Installing python dependencies
echo "Installing dependencies.."
#export http_proxy=$PROXY
#export https_proxy=$PROXY
#python setup.py install
mkdir -p build/oursql
cd build/oursql
wget https://pypi.python.org/packages/8c/88/9f53a314a2af6f56c0a1249c5673ee384b85dc791bac5c1228772ced3502/oursql-0.9.3.2.tar.gz#md5=ade5959a6571b1626966d47f3ab2d315
tar xvf oursql-0.9.3.2.tar.gz
cd oursql-0.9.3.2
python setup.py install
pip install Flask Flask-SQLAlchemy oursql requests flask-cors flask-testing Flask-OAuthlib
if [[ $? -ne 0 ]]; then
echo "FATAL: Installation failed!"
exit 1;
fi
exit 0
The following program configures the application, configures the web server to use WSGI and use the application scripts, and finally calls the database setup scripts to actually setup the database with tables.
#!/bin/bash
# Configure the application in the deployment environment
# 1. Update the config.py file with appropriate values
# 2. Update the apache config to server via WSGI
# 3. Run the database setup scripts to setup the database
if [[ `id -u` -ne 0 ]]; then
echo "You have to execute this script as super user!"
exit 1;
fi
ABS_PATH_DS=$( cd "$( dirname "${BASH_SOURCE[0]}" )" && pwd )
update_app_config () {
CONFIG_FILE="src/config.py"
DB_USER="root"
DB_PASS=$(cat db_pass.txt)
DB_NAME="outreach"
DB_SERVER="localhost"
# the list of white-listed IPs for POST/PUT requests to data service
WHITELIST_IPS="['127.0.0.1']"
# the list of allowed domains for CORS
ALLOWED_ORIGINS="['*']"
echo "Updating config.py.."
# Update parts of the DB URI
sed -i "s/<userid>/$DB_USER/" $ABS_PATH_DS/$CONFIG_FILE
sed -i "s/<password>/$DB_PASS/" $ABS_PATH_DS/$CONFIG_FILE
sed -i "s/<servername>/$DB_SERVER/" $ABS_PATH_DS/$CONFIG_FILE
sed -i "s/<db_name>/$DB_NAME/" $ABS_PATH_DS/$CONFIG_FILE
# update SQLALCHEMY_ECHO
sed -i "s/^SQLALCHEMY_ECHO.*$/SQLALCHEMY_ECHO = False/" $ABS_PATH_DS/$CONFIG_FILE
# update WHITELIST_IPS
#sed -i "s/^WHITELIST_IPS.*$/WHITELIST_IPS = $WHITELIST_IPS/" $CONFIG_FILE
# update ALLOWED_ORIGINS
#sed -i "s/^ALLOWED_ORIGINS.*$/ALLOWED_ORIGINS = $ALLOWED_ORIGINS/" $CONFIG_FILE
# NOTE: this is hardcoded now..somehow the log file when dynamically created
# is owned by root. then the app fails to run.. hence the following is
# necessary
touch /var/www/outreach.log
chmod a+w /var/www/outreach.log
}
update_apache_config() {
PROC_NAME="outreach"
WSGI_SCRIPT="outreach.wsgi"
APACHE_VHOST_FILE="/etc/apache2/sites-available/000-default.conf"
sed -i "/<\/VirtualHost>/i \
WSGIScriptAlias / $ABS_PATH_DS/$WSGI_SCRIPT
" $APACHE_VHOST_FILE
#sed -i '/<\/VirtualHost>/i \
# WSGIDaemonProcess $PROC_NAME user=www-data group=www-data threads=5
# WSGIScriptAlias / $ABS_PATH_DS/$WSGI_SCRIPT
# <Directory $ABS_PATH_DS>
# WSGIProcessGroup $PROC_NAME
# WSGIApplicationGroup %{GLOBAL}
# Order deny,allow
# Allow from all
# </Directory>
#' $APACHE_VHOST_FILE
}
setup_db() {
echo "Creating database: $DB_NAME"
mysql -u $DB_USER -p$DB_PASS -Bse "create database $DB_NAME;"
if [[ $? -ne 0 ]]; then
echo "Failed to create database $DB_NAME"
exit 1;
fi
}
update_app_config
if [[ $? -ne 0 ]]; then
echo "FATAL: Failed to update application config.py"
exit 1;
fi
update_apache_config
if [[ $? -ne 0 ]]; then
echo "FATAL: Failed to update apache config"
exit 1;
fi
service apache2 restart
setup_db
exit 0;
To protect server from the hackers following rules have been set on the apache server. Steps were taken from referrence
- Disable server version
- Deny unwanted http methods requests
- Mitigate clickjacking.
- Set CORS to only allowed domains
- Enable XSS protection.
- Install LibreOffice and use the following commands to convert files from doc and docx to pdf.
sudo apt-get install libreoffice-writer lowriter --headless --convert-to pdf *.doc lowriter --headless --convert-to pdf *.docx rm -rf *.doc rm -rf *.docx
- Also Update the files path in database using following commands.
mysql -u root -proot use outreach; update workshop_reports set path='%.pdf' where path like '%.doc'; update workshop_reports set path='%.pdf' where path like'%.docx'; update reference_documents set path='%.pdf' where path like '%.doc'; update reference_documents set path='%.pdf' where path like '%.docx';
sudo apt-get intall apache2 -Done sudo apt-get install make -Done sudo apt-get install emacs - Done git clone https://github.com/vlead/outreach-portal.git - Done cd outreach-portal/scripts rsync -avz apache2.conf /etc/apache2/ rsync -avz security.conf /etc/apache2/conf-available/security.conf apt-get install libapache2-modsecurity -y a2enmod headers a2enmod rewrite service apache2 restart apt-get install -y python-dev libmysqld-dev python-setuptools apache2 libapache2-mod-wsgi python-mysqldb sudo apt-get install python-pip -Done pip install Flask Flask-SQLAlchemy oursql requests flask-cors flask-testing Flask-OAuthlib - Done sudo apt-get install mysql-server cd .. make cp -R build/code/* /var/www/ chmod -R 755 /var/www mkdir /var/www/logs chmod -R 777 /var/www/logs mkdir /var/www/src/static/uploads chmod -R 777 /var/www/src/static/uploads
emacs /etc/apache2/sites-available/000-default.conf search for virtual host and add below line WSGIScriptAlias / /var/www/outreach.wsgi </VirtualHost> Also update the document root if /var/www/html to /var/www
cd /var/www/src/ vim config.py take a copy of config cp config.py ../../
mysql -u root -proot create database outreach; mysql -u root -proot outreach < outreach-25-02-2019.sql
# Edit this file to introduce tasks to be run by cron.
#
# Each task to run has to be defined through a single line
# indicating with different fields when the task will be run
# and what command to run for the task
#
# To define the time you can provide concrete values for
# minute (m), hour (h), day of month (dom), month (mon),
# and day of week (dow) or use '*' in these fields (for 'any').#
# Notice that tasks will be started based on the cron's system
# daemon's notion of time and timezones.
#
# Output of the crontab jobs (including errors) is sent through
# email to the user the crontab file belongs to (unless redirected).
#
# For example, you can run a backup of all your user accounts
# at 5 a.m every week with:
# 0 5 * * 1 tar -zcf /var/backups/home.tgz /home/
#
# For more information see the manual pages of crontab(5) and cron(8)
#
# m h dom mon dow command
* */6 * * * sh /root/labs/outreach-portal/scripts/backup
*/30 * * * * sh /var/www/update_analytics.sh
* */12 * * * sh /root/disk-space.sh
* */4 * * * aws s3 sync /var/www/src/static/uploads s3://outreachuploads/
Unlike 14.04, 16.04 now splits up the configuration so it
is not all in the /etc/mysql/my.cnf
file, but most of it is
in directories that are included as shown in the default
my.cnf
file below:
#
# The MySQL database server configuration file.
#
# You can copy this to one of:
# - "/etc/mysql/my.cnf" to set global options,
# - "~/.my.cnf" to set user-specific options.
#
# One can use all long options that the program supports.
# Run program with --help to get a list of available options and with
# --print-defaults to see which it would actually understand and use.
#
# For explanations see
# http://dev.mysql.com/doc/mysql/en/server-system-variables.html
#
# * IMPORTANT: Additional settings that can override those from this file!
# The files must end with '.cnf', otherwise they'll be ignored.
#
!includedir /etc/mysql/conf.d/
!includedir /etc/mysql/mysql.conf.d/
Most of the settings you are looking for are now found in
/etc/mysql/mysql.conf.d/mysqld.cnf
. The contents is shown
below:
#
# The MySQL database server configuration file.
#
# You can copy this to one of:
# - "/etc/mysql/my.cnf" to set global options,
# - "~/.my.cnf" to set user-specific options.
#
# One can use all long options that the program supports.
# Run program with --help to get a list of available options and with
# --print-defaults to see which it would actually understand and use.
#
# For explanations see
# http://dev.mysql.com/doc/mysql/en/server-system-variables.html
# This will be passed to all mysql clients
# It has been reported that passwords should be enclosed with ticks/quotes
# escpecially if they contain "#" chars...
# Remember to edit /etc/mysql/debian.cnf when changing the socket location.
# Here is entries for some specific programs
# The following values assume you have at least 32M ram
[mysqld_safe]
socket = /var/run/mysqld/mysqld.sock
nice = 0
[mysqld]
#
# * Basic Settings
#
user = mysql
pid-file = /var/run/mysqld/mysqld.pid
socket = /var/run/mysqld/mysqld.sock
port = 3306
basedir = /usr
datadir = /var/lib/mysql
tmpdir = /tmp
lc-messages-dir = /usr/share/mysql
skip-external-locking
#
# Instead of skip-networking the default is now to listen only on
# localhost which is more compatible and is not less secure.
bind-address = 127.0.0.1
#
# * Fine Tuning
#
key_buffer_size = 16M
max_allowed_packet = 16M
thread_stack = 192K
thread_cache_size = 8
# This replaces the startup script and checks MyISAM tables if needed
# the first time they are touched
myisam-recover-options = BACKUP
#max_connections = 100
#table_cache = 64
#thread_concurrency = 10
#
# * Query Cache Configuration
#
query_cache_limit = 1M
query_cache_size = 16M
#
# * Logging and Replication
#
# Both location gets rotated by the cronjob.
# Be aware that this log type is a performance killer.
# As of 5.1 you can enable the log at runtime!
#general_log_file = /var/log/mysql/mysql.log
#general_log = 1
#
# Error log - should be very few entries.
#
log_error = /var/log/mysql/error.log
#
# Here you can see queries with especially long duration
#log_slow_queries = /var/log/mysql/mysql-slow.log
#long_query_time = 2
#log-queries-not-using-indexes
#
# The following can be used as easy to replay backup logs or for replication.
# note: if you are setting up a replication slave, see README.Debian about
# other settings you may need to change.
#server-id = 1
#log_bin = /var/log/mysql/mysql-bin.log
expire_logs_days = 10
max_binlog_size = 100M
#binlog_do_db = include_database_name
#binlog_ignore_db = include_database_name
#
# * InnoDB
#
# InnoDB is enabled by default with a 10MB datafile in /var/lib/mysql/.
# Read the manual for more InnoDB related options. There are many!
#
# * Security Features
#
# Read the manual, too, if you want chroot!
# chroot = /var/lib/mysql/
#
# For generating SSL certificates I recommend the OpenSSL GUI "tinyca".
#
# ssl-ca=/etc/mysql/cacert.pem
# ssl-cert=/etc/mysql/server-cert.pem
# ssl-key=/etc/mysql/server-key.pem
- Install awscli
sudo apt-get install awscli
- Copy IAM user credentials who has read-write access to s3 bucket.
Access Key Id Secret Access Key <key id> <secret key>
- configure AWS credentials as follows
emacs ~/.aws/credentials
copy the below content
[default] aws_access_key_id=AKIAIOSFODNN7EXAMPLE aws_secret_access_key=wJalrXUtnFEMI/K7MDENG/bPxRfiCYEXAMPLEKEY
- configure AWS config as follows
~/.aws/config
[default] region=us-west-2 output=json
- Run
aws configure
and enter above credentials. For region, use =’us-east-1¡¯=. - Check that you have access to s3 bucket with
aws s3 ls
. - Copy entire directory. Recommend using
sync
rather thancp
.
aws s3 sync /var/www/src/static/uploads s3://outreachuploads/