-
Notifications
You must be signed in to change notification settings - Fork 841
Closed as not planned
Description
Problem 1: main_counter does not update in http://localhost:8001/metrics when runs on local env(windows). However, worker_counters are updated correctly.
Problem 2: http://localhost:8001/metrics is empty when runs in docker services. It is port forwarded. The response of the request is empty instead of error.
latest version is used
#main.py
import shutil
import multiprocessing
import threading
import time
import os
import sys
from prometheus_client import multiprocess, CollectorRegistry, start_http_server, Counter, REGISTRY
import logging
tmp_db_dir = os.path.join(
os.path.dirname(__file__), "prometheus_tmp_db")
os.environ["PROMETHEUS_MULTIPROC_DIR"] = tmp_db_dir
port = int(os.getenv("PROMETHEUS_PORT", 8001))
registry = CollectorRegistry()
multiprocess.MultiProcessCollector(registry)
main_counter = Counter("show_start", "show start in main")
worker_counter = Counter("worker_counter", "worker's counter to inc")
def test_counter(worker):
logger = logging.Logger(f"test_mp{worker}", logging.DEBUG)
logger.addHandler(logging.StreamHandler(sys.stdout))
while True:
worker_counter.inc()
time.sleep(5)
logger.debug(f"[worker_{worker}] inc()")
def producer(process_name: str):
producer_thread = threading.Thread(
target=lambda: test_counter(process_name))
producer_thread.start()
producer_thread.join()
def consumer(process_name: str):
consumer_thread = threading.Thread(
target=lambda: test_counter(process_name))
consumer_thread.start()
consumer_thread.join()
if __name__ == '__main__':
logger = logging.Logger("test_mp_main", logging.DEBUG)
logger.addHandler(logging.StreamHandler(sys.stdout))
if os.path.exists(tmp_db_dir):
shutil.rmtree(tmp_db_dir)
os.mkdir(tmp_db_dir)
start_http_server(port, registry=registry)
#statmg = MetricsManager()
# Number of processes to create
num_processes_producer = int(os.getenv("PROD_PROCESS", 1))
num_processes_consumer = int(os.getenv("CONS_PROCESS", 1))
# Create a list to hold the process objects
processes = []
# Start the processes
for i in range(num_processes_producer):
process = multiprocessing.Process(
target=producer, args=(f"p{i}",))
processes.append(process)
process.start()
for i in range(num_processes_consumer):
process = multiprocessing.Process(
target=consumer, args=(f"c{i}",))
processes.append(process)
process.start()
show = 10
while show:
show -= 1
main_counter.inc()
logger.debug("main show cnt")
time.sleep(1)
# Wait for all processes to complete
for process in processes:
process.join()
# multiprocess.mark_process_dead(multiprocessing.current_process().pid)
#dockerfile
FROM python:3
WORKDIR /test
COPY . .
RUN pip install -r requirements.txt
CMD [ "python", "main.py"]
Metadata
Metadata
Assignees
Labels
No labels