Skip to content

Commit

Permalink
fix spellings in comments
Browse files Browse the repository at this point in the history
  • Loading branch information
Christian-B committed Feb 29, 2024
1 parent 500626c commit d4b7ede
Show file tree
Hide file tree
Showing 7 changed files with 25 additions and 24 deletions.
2 changes: 1 addition & 1 deletion spinn_front_end_common/interface/config_handler.py
Original file line number Diff line number Diff line change
Expand Up @@ -183,7 +183,7 @@ def _remove_excess_folders(
pass

def _set_up_report_specifics(self) -> None:
# clear and clean out folders considered not useful anymore
# clear and clean out folders considered not useful any more
report_dir_path = self._data_writer.get_report_dir_path()
if os.listdir(report_dir_path):
self._remove_excess_folders(
Expand Down
4 changes: 2 additions & 2 deletions spinn_front_end_common/interface/java_caller.py
Original file line number Diff line number Diff line change
Expand Up @@ -53,9 +53,9 @@ class JavaCaller(object):
"""
__slots__ = (
"_chip_by_ethernet",
# The call to get java to work. Including the path if required.
# The call to get Java to work. Including the path if required.
"_java_call",
# The location of the java jar file
# The location of the Java jar file
"_jar_file",
# The location where the machine json is written
"_machine_json_path",
Expand Down
4 changes: 2 additions & 2 deletions spinn_front_end_common/interface/spinnaker.cfg
Original file line number Diff line number Diff line change
Expand Up @@ -219,7 +219,7 @@ n_samples_per_recording_entry = 100

[Java]
use_java = False
# call to start a java process.
# call to start a Java process.
# If there is no jdk../bin in your class path this must include the full path
java_call = java

Expand All @@ -236,7 +236,7 @@ java_spinnaker_path = None
# Only use this if you have not built JavaSpiNNaker
java_jar_path = None

# Properties flag to be passed into every java call.
# Properties flag to be passed into every Java call.
# Default logging level is info so NO properties needed
java_properties = None
# Each Property must start with the -D flag
Expand Down
2 changes: 1 addition & 1 deletion spinn_front_end_common/utilities/helpful_functions.py
Original file line number Diff line number Diff line change
Expand Up @@ -206,7 +206,7 @@ def determine_flow_states(
expected_start_states[ExecutableType.SYNC] = (CPUState.SYNC0,)
expected_end_states[ExecutableType.SYNC] = (CPUState.FINISHED,)

# cores that use our sim interface
# cores that use our simulation interface
elif start_type == ExecutableType.USES_SIMULATION_INTERFACE:
if no_sync_changes % 2 == 0:
expected_start_states[start_type] = (CPUState.SYNC0,)
Expand Down
Original file line number Diff line number Diff line change
Expand Up @@ -76,8 +76,8 @@
SDP_RETRANSMISSION_HEADER_SIZE = 2

#: size of config region in bytes
#: 1.new seq key, 2.first data key, 3. transaction id key 4.end flag key,
# 5.base key, 6.iptag tag
#: 1.new sequence key, 2.first data key, 3. transaction id key
# 4.end flag key, 5.base key, 6.iptag tag
CONFIG_SIZE = 6 * BYTES_PER_WORD

#: items of data a SDP packet can hold when SCP header removed
Expand All @@ -92,10 +92,10 @@
#: the size in words of the command flag
COMMAND_SIZE_IN_ITEMS = 1

#: offset for missing seq starts in first packet
#: offset for missing sequence starts in first packet
WORDS_FOR_COMMAND_N_MISSING_TRANSACTION = 3

#: offset for missing seq starts in more packet
#: offset for missing sequence starts in more packet
WORDS_FOR_COMMAND_TRANSACTION = (
COMMAND_SIZE_IN_ITEMS + TRANSACTION_ID_SIZE_IN_ITEMS)

Expand Down Expand Up @@ -193,7 +193,7 @@ def ceildiv(dividend, divisor) -> int:
return int(q) + (r != 0)


# SDRAM requirement for storing missing SDP packets seq numbers
# SDRAM requirement for storing missing SDP packets sequence numbers
SDRAM_FOR_MISSING_SDP_SEQ_NUMS = ceildiv(
120.0 * 1024 * BYTES_PER_KB,
WORDS_PER_FULL_PACKET_WITH_SEQUENCE_NUM * BYTES_PER_WORD)
Expand Down Expand Up @@ -228,9 +228,9 @@ class DataSpeedUpPacketGatherMachineVertex(
"_ip_address",
# store for the last reinjection status
"_last_status",
# the max seq number expected given a data retrieval
# the max sequence number expected given a data retrieval
"_max_seq_num",
# holder for missing seq numbers for data in
# holder for missing sequence numbers for data in
"_missing_seq_nums_data_in",
# holder of data from out
"_output",
Expand Down Expand Up @@ -276,10 +276,10 @@ class DataSpeedUpPacketGatherMachineVertex(
_TIMEOUT_PER_RECEIVE_IN_SECONDS = 2
_TIMEOUT_FOR_SENDING_IN_SECONDS = 0.01

# end flag for missing seq numbers
# end flag for missing sequence numbers
_MISSING_SEQ_NUMS_END_FLAG = 0xFFFFFFFF

# flag for saying missing all SEQ numbers
# flag for saying missing all sequence numbers
FLAG_FOR_MISSING_ALL_SEQUENCES = 0xFFFFFFFE

_ADDRESS_PACKET_BYTE_FORMAT = struct.Struct(
Expand Down Expand Up @@ -766,7 +766,7 @@ def _outgoing_retransmit_missing_seq_nums(
missing_seqs_as_list = list(missing)
missing_seqs_as_list.sort()

# send seq data
# send sequence data
for missing_seq_num in missing_seqs_as_list:
message, _length = self.__make_data_in_stream_message(
data_to_write, missing_seq_num, None)
Expand Down Expand Up @@ -866,14 +866,14 @@ def _send_all_data_based_packets(

# send rest of data
for seq_num in range(self._max_seq_num or 0):
# put in command flag and seq number
# put in command flag and sequence number
message, length_to_send = self.__make_data_in_stream_message(
data_to_write, seq_num, position_in_data)
position_in_data += length_to_send

# send the message
self.__throttled_send(message, connection)
log.debug("sent seq {} of {} bytes", seq_num, length_to_send)
log.debug("sent sequence {} of {} bytes", seq_num, length_to_send)

# check for end flag
self.__send_tell_flag(connection)
Expand Down Expand Up @@ -1307,7 +1307,7 @@ def _process_data(
is_end_of_stream = (
first_packet_element & self._LAST_MESSAGE_FLAG_BIT_MASK) != 0

# check seq number not insane
# check sequence number not insane
if seq_num > self._max_seq_num:
raise ValueError(
f"got an insane sequence number. got {seq_num} when "
Expand All @@ -1319,7 +1319,7 @@ def _process_data(

# write data

# read offset from data is at byte 8. as first 4 is seq number,
# read offset from data is at byte 8. as first 4 is sequence number,
# second 4 is transaction id
true_data_length = (
offset + length_of_data - BYTES_FOR_SEQ_AND_TRANSACTION_ID)
Expand All @@ -1329,7 +1329,7 @@ def _process_data(
offset, true_data_length, data,
BYTES_FOR_SEQ_AND_TRANSACTION_ID, length_of_data)

# add seq number to list
# add sequence number to list
seq_nums.add(seq_num)

# if received a last flag on its own, its during retransmission.
Expand Down
Original file line number Diff line number Diff line change
Expand Up @@ -54,8 +54,8 @@
log = FormatAdapter(logging.getLogger(__name__))

_CONFIG_REGION_REINJECTOR_SIZE_IN_BYTES = 5 * BYTES_PER_WORD
#: 1.new seq key, 2.first data key, 3. transaction id key 4.end flag key,
# 5.base key
#: 1.new sequence key, 2.first data key, 3. transaction id key
# 4.end flag key, 5.base key
_CONFIG_DATA_SPEED_UP_SIZE_IN_BYTES = 5 * BYTES_PER_WORD
_CONFIG_MAX_EXTRA_SEQ_NUM_SIZE_IN_BYTES = 460 * BYTES_PER_KB
_CONFIG_DATA_IN_KEYS_SDRAM_IN_BYTES = 3 * BYTES_PER_WORD
Expand Down
Original file line number Diff line number Diff line change
Expand Up @@ -81,7 +81,8 @@

# The microseconds per timestep will be divided by this for the max offset
_MAX_OFFSET_DENOMINATOR = 10
# The max offset modulo to stop spikes in simple cases moving to the next ts
# The max offset modulo to stop spikes in simple cases
# moving to the next timestep
_MAX_OFFSET_MODULO = 1000


Expand Down

0 comments on commit d4b7ede

Please sign in to comment.