Skip to content
New issue

Have a question about this project? Sign up for a free GitHub account to open an issue and contact its maintainers and the community.

By clicking “Sign up for GitHub”, you agree to our terms of service and privacy statement. We’ll occasionally send you account related emails.

Already on GitHub? Sign in to your account

General improvements: extended exposed latency metrics and allowed for filtering benchmark runs by client count. #101

Open
wants to merge 3 commits into
base: master
Choose a base branch
from
Open
Changes from 1 commit
Commits
File filter

Filter by extension

Filter by extension

Conversations
Failed to load comments.
Loading
Jump to
Jump to file
Failed to load files.
Loading
Diff view
Diff view
10 changes: 9 additions & 1 deletion benchmark/dataset.py
Original file line number Diff line number Diff line change
@@ -26,6 +26,12 @@ class DatasetConfig:
READER_TYPE = {"h5": AnnH5Reader, "jsonl": JSONReader, "tar": AnnCompoundReader}


# prepare progressbar
def show_progress(block_num, block_size, total_size):
percent = round(block_num * block_size / total_size * 100, 2)
print(f"{percent} %", end="\r")


class Dataset:
def __init__(self, config: dict):
self.config = DatasetConfig(**config)
@@ -39,7 +45,9 @@ def download(self):

if self.config.link:
print(f"Downloading {self.config.link}...")
tmp_path, _ = urllib.request.urlretrieve(self.config.link)
tmp_path, _ = urllib.request.urlretrieve(
self.config.link, None, show_progress
)

if self.config.link.endswith(".tgz") or self.config.link.endswith(
".tar.gz"
15 changes: 14 additions & 1 deletion engine/base_client/client.py
Original file line number Diff line number Diff line change
@@ -60,6 +60,7 @@ def run_experiment(
skip_upload: bool = False,
skip_search: bool = False,
skip_if_exists: bool = True,
parallels: [int] = [],
):
execution_params = self.configurator.execution_params(
distance=dataset.config.distance, vector_size=dataset.config.vector_size
@@ -96,7 +97,6 @@ def run_experiment(
if not skip_search:
print("Experiment stage: Search")
for search_id, searcher in enumerate(self.searchers):

if skip_if_exists:
glob_pattern = (
f"{self.name}-{dataset.config.name}-search-{search_id}-*.json"
@@ -110,12 +110,25 @@ def run_experiment(
continue

search_params = {**searcher.search_params}
ef = "default"
if "search_params" in search_params:
ef = search_params["search_params"].get("ef", "default")
client_count = search_params.get("parallel", 1)
filter_client_count = len(parallels) > 0
if filter_client_count and (client_count not in parallels):
print(f"\tSkipping ef runtime: {ef}; #clients {client_count}")
continue
print(f"\tRunning ef runtime: {ef}; #clients {client_count}")
Copy link
Member

Choose a reason for hiding this comment

The reason will be displayed to describe this comment to others. Learn more.

Could you please explain what are you trying to do here?

Copy link
Contributor Author

Choose a reason for hiding this comment

The reason will be displayed to describe this comment to others. Learn more.

this is to allow running specific client configs. The rest is just extra logging.


search_stats = searcher.search_all(
dataset.config.distance, reader.read_queries()
)
# ensure we specify the client count in the results
search_params["parallel"] = client_count
self.save_search_results(
dataset.config.name, search_stats, search_id, search_params
)

print("Experiment stage: Done")
print("Results saved to: ", RESULTS_DIR)

3 changes: 1 addition & 2 deletions engine/base_client/search.py
Original file line number Diff line number Diff line change
@@ -52,7 +52,6 @@ def _search_one(cls, query, top: Optional[int] = None):
if query.expected_result:
ids = set(x[0] for x in search_res)
precision = len(ids.intersection(query.expected_result[:top])) / top

return precision, end - start

def search_all(
@@ -62,7 +61,6 @@ def search_all(
):
parallel = self.search_params.pop("parallel", 1)
top = self.search_params.pop("top", None)

# setup_search may require initialized client
self.init_client(
self.host, distance, self.connection_params, self.search_params
@@ -108,6 +106,7 @@ def search_all(
"min_time": np.min(latencies),
"max_time": np.max(latencies),
"rps": len(latencies) / total_time,
"p50_time": np.percentile(latencies, 50),
"p95_time": np.percentile(latencies, 95),
"p99_time": np.percentile(latencies, 99),
"precisions": precisions,
15 changes: 10 additions & 5 deletions engine/base_client/upload.py
Original file line number Diff line number Diff line change
@@ -53,12 +53,15 @@ def upload(
self.upload_params,
),
) as pool:
latencies = list(
pool.imap(
self.__class__._upload_batch,
iter_batches(tqdm.tqdm(records), batch_size),
try:
latencies = list(
pool.imap(
self.__class__._upload_batch,
iter_batches(tqdm.tqdm(records), batch_size),
)
)
)
except Exception as e:
raise e

upload_time = time.perf_counter() - start

@@ -77,6 +80,8 @@ def upload(
"upload_time": upload_time,
"total_time": total_time,
"latencies": latencies,
"parallel": parallel,
"batch_size": batch_size,
}

@classmethod