import os #environment configs os.environ['OMP_NUM_THREADS'] = '2' import tensorflow as tf from transformers import BertTokenizer, TFBertModel tokenizer = BertTokenizer.from_pretrained('bert-base-uncased') model = TFBertModel.from_pretrained("bert-base-uncased") text = "Replace me by any text you'd like." encoded_input = tokenizer(text, return_tensors='tf') from time import time start_time = time() for i in range(1): output = model(encoded_input) end_time = time() time_taken = end_time - start_time print("average time (seconds) for bert Inference: ", time_taken) del os.environ['OMP_NUM_THREADS']