Skip to content

Commit b86fe26

Browse files
author
NevmerzhitskyYura
authored
Merge 467f41e into b256ea9
2 parents b256ea9 + 467f41e commit b86fe26

File tree

9 files changed

+97
-11
lines changed

9 files changed

+97
-11
lines changed

docker-compose.yml

Lines changed: 14 additions & 0 deletions
Original file line numberDiff line numberDiff line change
@@ -52,6 +52,7 @@ services:
5252
condition: service_healthy
5353
scheduler:
5454
condition: service_started
55+
5556
env_file:
5657
- docker.env
5758
ports:
@@ -93,3 +94,16 @@ services:
9394
environment:
9495
LOGGING_CONF: /worker/utils/logging.conf
9596
restart: on-failure
97+
98+
nginx:
99+
build:
100+
context: .
101+
dockerfile: nginx.Dockerfile
102+
container_name: nginx_server
103+
ports:
104+
- 8080:80
105+
106+
restart: on-failure
107+
108+
109+

nginx.Dockerfile

Lines changed: 6 additions & 0 deletions
Original file line numberDiff line numberDiff line change
@@ -0,0 +1,6 @@
1+
FROM nginx:latest
2+
3+
COPY ./stock_dashboard_client/build /var/www
4+
COPY nginx.conf /etc/nginx/nginx.conf
5+
EXPOSE 80
6+
ENTRYPOINT ["nginx","-g","daemon off;"]

nginx.conf

Lines changed: 48 additions & 0 deletions
Original file line numberDiff line numberDiff line change
@@ -0,0 +1,48 @@
1+
# auto detects a good number of processes to run
2+
worker_processes auto;
3+
4+
#Provides the configuration file context in which the directives that affect connection processing are specified.
5+
events {
6+
# Sets the maximum number of simultaneous connections that can be opened by a worker process.
7+
worker_connections 8;
8+
# Tells the worker to accept multiple connections at a time
9+
multi_accept on;
10+
}
11+
12+
13+
http {
14+
server {
15+
# listen on port 80
16+
listen 80;
17+
# where the root here
18+
root /var/www;
19+
# what file to server as index
20+
index index.html index.htm;
21+
22+
location / {
23+
# First attempt to serve request as file, then
24+
# as directory, then fall back to redirecting to index.html
25+
try_files $uri $uri/ /index.html;
26+
}
27+
28+
# Media: images, icons, video, audio, HTC
29+
location ~* \.(?:jpg|jpeg|gif|png|ico|cur|gz|svg|svgz|mp4|ogg|ogv|webm|htc)$ {
30+
expires 1M;
31+
access_log off;
32+
add_header Cache-Control "public";
33+
}
34+
35+
# Javascript and CSS files
36+
location ~* \.(?:css|js)$ {
37+
try_files $uri =404;
38+
expires 1y;
39+
access_log off;
40+
add_header Cache-Control "public";
41+
}
42+
43+
# Any route containing a file extension (e.g. /devicesfile.js)
44+
location ~ ^.+\..+$ {
45+
try_files $uri =404;
46+
}
47+
}
48+
}

stock_dashboard_api/logging.conf

Lines changed: 6 additions & 0 deletions
Original file line numberDiff line numberDiff line change
@@ -23,6 +23,12 @@ handlers=consoleHandler
2323
qualname=workers
2424
propagate=0
2525

26+
[logger_scheduler]
27+
level=INFO
28+
handlers=consoleHandler
29+
qualname=scheduler
30+
propagate=0
31+
2632
[logger_views]
2733
level=INFO
2834
handlers=consoleHandler

stock_dashboard_api/models/stock_model.py

Lines changed: 1 addition & 1 deletion
Original file line numberDiff line numberDiff line change
@@ -296,7 +296,7 @@ def _fill_gaps_in_data(self, datetime_from, datetime_to, stock_data):
296296
self.name,
297297
gap[datetime_from_index].isoformat(),
298298
gap[datetime_to_index].isoformat())
299-
publish_task(rmq_task.historical_data_task())
299+
scheduler_publish_task(rmq_task.historical_data_task())
300300

301301
@classmethod
302302
def _get_gaps_in_data(cls, datetime_from: datetime, datetime_to: datetime, stock_data: list):

stock_dashboard_api/utils/pool.py

Lines changed: 0 additions & 3 deletions
Original file line numberDiff line numberDiff line change
@@ -32,7 +32,6 @@ def __init__(self):
3232
self.cursor = None
3333

3434
def __enter__(self):
35-
logger.info('Get connection from pool %s', id(self.conn))
3635
for _ in range(TRAILS):
3736
try:
3837
self.conn = Connection.connection_pool.getconn()
@@ -51,11 +50,9 @@ def __exit__(self, exc_type, exc_val, exc_tb):
5150
self.conn.rollback()
5251
self.cursor.close()
5352
Connection.connection_pool.putconn(self.conn)
54-
logger.info('All changes was commited')
5553
self.conn.commit()
5654
self.cursor.close()
5755
Connection.connection_pool.putconn(self.conn)
58-
logger.info('Put connection to pool %s', id(self.conn))
5956

6057

6158
def pool_manager():

workers/scheduler.py

Lines changed: 10 additions & 2 deletions
Original file line numberDiff line numberDiff line change
@@ -10,7 +10,7 @@
1010
from utils.check_new_stocks import check_new_stocks
1111
from utils.constants import FETCH_DATA_FOR_PERIOD_TASK, FETCH_NEW_STOCK_TASK, FETCH_HISTORICAL_DATA_TASK
1212
from utils.db_service import get_all_stocks_in_use, get_stocks_data_last_record, stock_get_id
13-
from utils.logger import workers_logger as logger
13+
from utils.logger import scheduler_logger as logger
1414
from utils.worker_queue import worker_publish_task
1515
from utils.worker_task import Task
1616

@@ -32,7 +32,7 @@ def connect_rmq():
3232
channel.queue_declare(queue='scheduler_queue', durable=True)
3333
channel.queue_declare(queue='worker_queue', durable=True)
3434
channel.basic_consume(queue='scheduler_queue', on_message_callback=scheduler_function)
35-
logger.info('Sheduler connect was created')
35+
logger.info('Sheduler connection was created')
3636
channel.start_consuming()
3737

3838

@@ -73,6 +73,14 @@ def fetch_historical_data(data: dict):
7373
data['to'] = (start + datetime.timedelta(days=remainder)).isoformat()
7474
logger.info(f'Send task {data} to workers')
7575
worker_publish_task(json.dumps(data))
76+
data['from'] = start.isoformat()
77+
data['to'] = finish.isoformat()
78+
logger.info(f'Send task {data} to workers')
79+
worker_publish_task(json.dumps(data))
80+
81+
82+
83+
7684

7785

7886
def scheduler_function(ch, method, properties, body): # pylint: disable=C0103, W0613

workers/utils/logger.py

Lines changed: 1 addition & 1 deletion
Original file line numberDiff line numberDiff line change
@@ -3,7 +3,7 @@
33
from logging.config import fileConfig
44

55
LOGGING_CONF = os.getenv('LOGGING_CONF')
6-
print(LOGGING_CONF)
76
fileConfig(LOGGING_CONF, disable_existing_loggers=True)
87
pool_logger = logging.getLogger('pool')
98
workers_logger = logging.getLogger('workers')
9+
scheduler_logger = logging.getLogger('scheduler')

workers/worker.py

Lines changed: 11 additions & 4 deletions
Original file line numberDiff line numberDiff line change
@@ -1,18 +1,24 @@
1+
import datetime
12
import json
23
import os
3-
import time
44

55
import pika
66

7-
from utils.db_service import insert_stock_data
7+
from utils.db_service import insert_stock_data, stock_get_id
8+
from utils.logger import workers_logger as logger
89
from utils.yahoo_finance import data_for_stocks_data_update
910

1011

1112
def worker_function(ch, method, properties, body): # pylint: disable=C0103, W0613
1213
body = json.loads(body)
13-
data = data_for_stocks_data_update(body["stock_name"], body["from"], body["to"])
14+
logger.info(f'Worker Task {body} was received')
15+
stock_id = stock_get_id(body['stock_name'])
16+
start = datetime.datetime.fromisoformat(body['from'])
17+
finish = datetime.datetime.fromisoformat(body['to'])
18+
data = data_for_stocks_data_update(body["stock_name"], start, finish)
1419
for stock in data:
15-
insert_stock_data(stock["stock_id"], stock["price"], stock["created_at"])
20+
insert_stock_data(stock_id, stock["price"], stock["created_at"])
21+
logger.info(f'Worker Task {body} was succeseful done')
1622
ch.basic_ack(delivery_tag=method.delivery_tag)
1723

1824

@@ -23,4 +29,5 @@ def worker_function(ch, method, properties, body): # pylint: disable=C0103, W0
2329
channel = connection.channel()
2430
channel.queue_declare(queue='worker_queue', durable=True)
2531
channel.basic_consume(queue='worker_queue', on_message_callback=worker_function)
32+
logger.info('Worker connection was created')
2633
channel.start_consuming()

0 commit comments

Comments
 (0)