1+ import time
12import uuid
23from typing import Any
34
4- from fastapi import APIRouter , Depends , Header , HTTPException , Request
5+ from fastapi import APIRouter , Depends , HTTPException , Request
56from loguru import logger
67
78from prompting .datasets .random_website import DDGDatasetEntry
1112from shared import settings
1213from shared .base import DatasetEntry
1314from shared .dendrite import DendriteResponseEvent
14- from shared .epistula import SynapseStreamResult
15+ from shared .epistula import SynapseStreamResult , verify_signature
16+ from shared .settings import shared_settings
1517
1618router = APIRouter ()
1719
1820
19- def validate_scoring_key (api_key : str = Header (...)):
20- if api_key != settings .shared_settings .SCORING_KEY :
21+ async def verify_scoring_signature (request : Request ):
22+ signed_by = request .headers .get ("Epistula-Signed-By" )
23+ signed_for = request .headers .get ("Epistula-Signed-For" )
24+ if signed_for != shared_settings .WALLET .hotkey .ss58_address :
25+ raise HTTPException (status_code = 400 , detail = "Bad Request, message is not intended for self" )
26+ if signed_by != shared_settings .API_HOTKEY :
27+ raise HTTPException (status_code = 401 , detail = "Signer not the expected ss58 address" )
28+
29+ body = await request .body ()
30+ now = time .time ()
31+ err = verify_signature (
32+ request .headers .get ("Epistula-Request-Signature" ),
33+ body ,
34+ request .headers .get ("Epistula-Timestamp" ),
35+ request .headers .get ("Epistula-Uuid" ),
36+ signed_for ,
37+ signed_by ,
38+ now ,
39+ )
40+ if err :
41+ logger .error (err )
42+ raise HTTPException (status_code = 400 , detail = err )
43+
44+
45+ def validate_scoring_key (request : Request ):
46+ if request .headers .api_key != settings .shared_settings .SCORING_KEY :
2147 raise HTTPException (status_code = 403 , detail = "Invalid API key" )
2248
2349
@@ -27,54 +53,62 @@ def get_task_scorer(request: Request):
2753
2854@router .post ("/scoring" )
2955async def score_response (
30- request : Request , api_key_data : dict = Depends (validate_scoring_key ), task_scorer = Depends (get_task_scorer )
56+ request : Request , api_key_data : dict = Depends (verify_scoring_signature ), task_scorer = Depends (get_task_scorer )
3157):
58+ logger .debug ("Scoring Request received!!!!!!!!!!!!!!!!" )
3259 model = None
60+ logger .debug ("Setted Model to None" )
3361 payload : dict [str , Any ] = await request .json ()
62+ logger .debug (f"Awaited body: { payload } " )
3463 body = payload .get ("body" )
35- timeout = payload .get ("timeout" , settings . shared_settings .NEURON_TIMEOUT )
36- uids = payload .get ("uid " , [])
64+ timeout = payload .get ("timeout" , shared_settings .NEURON_TIMEOUT )
65+ uids = payload .get ("uids " , [])
3766 chunks = payload .get ("chunks" , {})
67+ timings = payload .get ("timings" , {})
68+ logger .debug ("About to check chunks and uids" )
3869 if not uids or not chunks :
3970 logger .error (f"Either uids: { uids } or chunks: { chunks } is not valid, skipping scoring" )
4071 return
4172 uids = [int (uid ) for uid in uids ]
4273 model = body .get ("model" )
43- if model :
44- try :
45- llm_model = ModelZoo .get_model_by_id (model )
46- except Exception :
47- logger .warning (
48- f"Organic request with model { body .get ('model' )} made but the model cannot be found in model zoo. Skipping scoring."
49- )
74+ logger .debug ("About to check model" )
75+ if model and model != shared_settings .LLM_MODEL :
76+ logger .error (f"Model { model } not available for scoring on this validator." )
5077 return
51- else :
52- llm_model = None
78+ logger .debug ("Model has been checked" )
79+ llm_model = ModelZoo .get_model_by_id (model )
80+ logger .debug ("Got LLM Model from ModelZoo" )
5381 task_name = body .get ("task" )
82+ logger .debug (f"Task name set: { task_name } " )
83+ logger .debug (f"Length pre-insertion: { len (task_scorer .scoring_queue )} " )
5484 if task_name == "InferenceTask" :
5585 logger .info (f"Received Organic InferenceTask with body: { body } " )
5686 logger .info (f"With model of type { type (body .get ('model' ))} " )
5787 organic_task = InferenceTask (
5888 messages = body .get ("messages" ),
5989 llm_model = llm_model ,
60- llm_model_id = body . get ( "model" ) ,
90+ llm_model_id = llm_model ,
6191 seed = int (body .get ("seed" , 0 )),
62- sampling_params = body .get ("sampling_parameters" , settings . shared_settings .SAMPLING_PARAMS ),
92+ sampling_params = body .get ("sampling_parameters" , shared_settings .SAMPLING_PARAMS ),
6393 query = body .get ("messages" ),
94+ organic = True ,
6495 )
6596 logger .info (f"Task created: { organic_task } " )
97+
6698 task_scorer .add_to_queue (
6799 task = organic_task ,
68100 response = DendriteResponseEvent (
69101 uids = uids ,
70102 stream_results = [SynapseStreamResult (accumulated_chunks = chunks .get (str (uid ), None )) for uid in uids ],
71103 timeout = timeout ,
104+ stream_results_all_chunks_timings = [timings .get (str (uid ), None ) for uid in uids ],
72105 ),
73106 dataset_entry = DatasetEntry (),
74- block = settings . shared_settings .METAGRAPH .block ,
107+ block = shared_settings .METAGRAPH .block ,
75108 step = - 1 ,
76109 task_id = str (uuid .uuid4 ()),
77110 )
111+
78112 elif task_name == "WebRetrievalTask" :
79113 logger .info (f"Received Organic WebRetrievalTask with body: { body } " )
80114 try :
@@ -91,15 +125,14 @@ async def score_response(
91125 query = search_term ,
92126 ),
93127 response = DendriteResponseEvent (
94- uids = [uids ],
95- stream_results = [
96- SynapseStreamResult (accumulated_chunks = [chunk for chunk in chunks if chunk is not None ])
97- ],
98- timeout = body .get ("timeout" , settings .shared_settings .NEURON_TIMEOUT ),
128+ uids = uids ,
129+ stream_results = [SynapseStreamResult (accumulated_chunks = chunks .get (str (uid ), [])) for uid in uids ],
130+ timeout = body .get ("timeout" , shared_settings .NEURON_TIMEOUT ),
99131 ),
100132 dataset_entry = DDGDatasetEntry (search_term = search_term ),
101- block = settings . shared_settings .METAGRAPH .block ,
133+ block = shared_settings .METAGRAPH .block ,
102134 step = - 1 ,
103135 task_id = str (uuid .uuid4 ()),
104136 )
137+ logger .debug (f"Length post-insertion: { len (task_scorer .scoring_queue )} " )
105138 logger .info ("Organic task appended to scoring queue" )
0 commit comments