import time
import av
import av.datasets
container = av.open(
r"E:\ori_videos\463.mp4"
)
# container.streams.video[0].codec_context.skip_frame = "NONKEY"
container.streams.video[0].codec_context.thread_type = "NONE"
start_time = time.time()
for packet in container.demux():
for frame in packet.decode():
pass
default_time = time.time() - start_time
container.close()
container = av.open(
r"E:\ori_videos\463.mp4"
)
# container.streams.video[0].codec_context.skip_frame = "NONKEY"
container.streams.video[0].codec_context.thread_type = "FRAME"
start_time = time.time()
for packet in container.demux():
for frame in packet.decode():
pass
auto_time = time.time() - start_time
container.close()
print("Decoded with no threading in {:.2f}s.".format(default_time))
print("Decoded with frame threading in {:.2f}s.".format(auto_time))
Decoded with no threading in 94.13s.
Decoded with frame threading in 19.67s.
This works fine. But when I try to get keyframes only, there is almost no diffrence on decoding time and CPU usage.
Decoded with no threading in 10.71s.
Decoded with frame threading in 10.82s.
Is this intended? or am I using it wrong?
This works fine. But when I try to get keyframes only, there is almost no diffrence on decoding time and CPU usage.
Is this intended? or am I using it wrong?