From db6e0fcbaa20784d9aab53ef7b44bb59a5b26256 Mon Sep 17 00:00:00 2001 From: mrbreo Date: Sat, 21 Dec 2024 16:18:59 +0000 Subject: [PATCH 1/3] =?UTF-8?q?chore:=20=E2=99=BB=EF=B8=8F=20=20updated=20?= =?UTF-8?q?bittensor=20version?= MIME-Version: 1.0 Content-Type: text/plain; charset=UTF-8 Content-Transfer-Encoding: 8bit --- logicnet/base/validator.py | 7 +++++-- logicnet/utils/regex_helper.py | 10 ++++++++++ logicnet/validator/rewarder.py | 13 +++++++++++-- neurons/validator/validator.py | 6 ++++-- requirements.txt | 6 +++--- 5 files changed, 33 insertions(+), 9 deletions(-) create mode 100644 logicnet/utils/regex_helper.py diff --git a/logicnet/base/validator.py b/logicnet/base/validator.py index 2163139d..2d33db13 100644 --- a/logicnet/base/validator.py +++ b/logicnet/base/validator.py @@ -27,7 +27,7 @@ def __init__(self, config=None): # Set up initial scoring weights for validation bt.logging.info("\033[1;32m⚖️ Building validation weights.\033[0m") - self.scores = torch.zeros_like(self.metagraph.S, dtype=torch.float32) + self.scores = torch.zeros_like(self.metagraph.S.clone().detach(), dtype=torch.float32) # Init sync with the network. Updates the metagraph. self.resync_metagraph() @@ -205,12 +205,15 @@ def set_weights(self): bt.logging.trace("top10 values", raw_weights.sort()[0]) bt.logging.trace("top10 uids", raw_weights.sort()[1]) + # Convert uids to a PyTorch tensor before processing + uids_tensor = self.metagraph.uids.clone().detach() + # Process the raw weights to final_weights via subtensor limitations. ( processed_weight_uids, processed_weights, ) = bt.utils.weight_utils.process_weights_for_netuid( - uids=self.metagraph.uids.to("cpu"), + uids=uids_tensor.to("cpu"), weights=raw_weights.to("cpu"), netuid=self.config.netuid, subtensor=self.subtensor, diff --git a/logicnet/utils/regex_helper.py b/logicnet/utils/regex_helper.py new file mode 100644 index 00000000..3308724d --- /dev/null +++ b/logicnet/utils/regex_helper.py @@ -0,0 +1,10 @@ +import re + +def extract_numerical_part(text): + # Use regex to find the first occurrence of a number + match = re.search(r'[-+]?\d*\.?\d+|\d+', text) + if match: + return match.group(0) + else: + # Return a specific message or None if no numerical value is found + return "No numerical value found" \ No newline at end of file diff --git a/logicnet/validator/rewarder.py b/logicnet/validator/rewarder.py index 1010dad4..a1f3d07b 100644 --- a/logicnet/validator/rewarder.py +++ b/logicnet/validator/rewarder.py @@ -6,6 +6,7 @@ from logicnet.protocol import LogicSynapse from sentence_transformers import SentenceTransformer from logicnet.utils.model_selector import model_selector +from logicnet.utils.regex_helper import extract_numerical_part SIMILARITY_WEIGHT = 0.2 CORRECTNESS_WEIGHT = 0.8 @@ -235,8 +236,16 @@ def _compare_numerical_answers(self, ground_truth: str, miner_answer: str): for char in formatting_chars: ground_truth = ground_truth.replace(char, '') miner_answer = miner_answer.replace(char, '') - gt_value = sympy.sympify(ground_truth.strip()) - miner_value = sympy.sympify(miner_answer.strip()) + + # Extract numerical values + gt_value_str = extract_numerical_part(ground_truth) + miner_value_str = extract_numerical_part(miner_answer) + + if gt_value_str is None or miner_value_str is None: + raise ValueError("No numerical value found in one of the answers.") + + gt_value = sympy.sympify(gt_value_str) + miner_value = sympy.sympify(miner_value_str) abs_difference = abs(gt_value - miner_value) epsilon = 1e-8 diff --git a/neurons/validator/validator.py b/neurons/validator/validator.py index fe3077f1..b326620c 100644 --- a/neurons/validator/validator.py +++ b/neurons/validator/validator.py @@ -1,4 +1,6 @@ import os +from dotenv import load_dotenv +load_dotenv() import time import threading import datetime @@ -193,7 +195,7 @@ def async_query_and_reward( ) if not synapse: continue - base_synapse = synapse.copy() + base_synapse = synapse.model_copy() synapse = synapse.miner_synapse() bt.logging.info(f"\033[1;34m🧠 Synapse to be sent to miners: {synapse}\033[0m") axons = [self.metagraph.axons[int(uid)] for uid in uids] @@ -378,7 +380,7 @@ def load_state(self): bt.logging.info( "\033[1;32m🧠 Loading validator state from: " + path + "\033[0m" ) - state = torch.load(path) + state = torch.load(path, weights_only=True) # Set weights_only=True self.step = state["step"] all_uids_info = state["all_uids_info"] for k, v in all_uids_info.items(): diff --git a/requirements.txt b/requirements.txt index 54752a79..2a703f44 100644 --- a/requirements.txt +++ b/requirements.txt @@ -1,11 +1,11 @@ -bittensor==6.9.4 +bittensor==8.5.1 Pillow==10.2.0 PyYAML==6.0.1 -setuptools==68.0.0 +setuptools==70.0.0 slowapi==0.1.8 tqdm==4.65.0 httpx==0.26.0 -numpy==1.26.4 +numpy==2.0.1 openai==1.35.14 sentence-transformers==3.0.1 python-dotenv==1.0.1 From 8a5fec66001b63453d7d2fcbb8f75bef73a668c0 Mon Sep 17 00:00:00 2001 From: mrbreo Date: Sat, 21 Dec 2024 16:25:26 +0000 Subject: [PATCH 2/3] =?UTF-8?q?docs:=20=E2=99=BB=EF=B8=8F=20=20updated=20v?= =?UTF-8?q?alidator=20readme?= MIME-Version: 1.0 Content-Type: text/plain; charset=UTF-8 Content-Transfer-Encoding: 8bit --- docs/VALIDATOR.md | 1 + 1 file changed, 1 insertion(+) diff --git a/docs/VALIDATOR.md b/docs/VALIDATOR.md index 2f2c4f75..252f6777 100644 --- a/docs/VALIDATOR.md +++ b/docs/VALIDATOR.md @@ -107,6 +107,7 @@ Using Together AI and Open AI simplifies setup and reduces local resource requir echo "TOGETHERAI_API_KEY=your_together_ai_api_key" > .env echo "OPENAI_API_KEY=your_openai_api_key" >> .env echo "HF_TOKEN=your_hugging_face_token" >> .env (needed for some vLLM model) + echo "USE_TORCH=1" >> .env ``` ### Step 3: Run the Validator From dd3dbf0b0f81cc84de6f408b0b597b9266133dd2 Mon Sep 17 00:00:00 2001 From: LVH-Tony Date: Thu, 2 Jan 2025 22:21:04 +0000 Subject: [PATCH 3/3] model_copy -> copy --- neurons/validator/validator.py | 2 +- 1 file changed, 1 insertion(+), 1 deletion(-) diff --git a/neurons/validator/validator.py b/neurons/validator/validator.py index b326620c..724bc11f 100644 --- a/neurons/validator/validator.py +++ b/neurons/validator/validator.py @@ -195,7 +195,7 @@ def async_query_and_reward( ) if not synapse: continue - base_synapse = synapse.model_copy() + base_synapse = synapse.copy() synapse = synapse.miner_synapse() bt.logging.info(f"\033[1;34m🧠 Synapse to be sent to miners: {synapse}\033[0m") axons = [self.metagraph.axons[int(uid)] for uid in uids]