Skip to content

Commit

Permalink
Merge pull request #58598 from code-dot-org/datablock/enforce
Browse files Browse the repository at this point in the history
Enforce MAX_TABLE_ROW_COUNT, MAX_TABLE_COUNT, MAX_RECORD_LENGTH, MAX_NUM_KVPS and MAX_VALUE_LENGTH
  • Loading branch information
snickell committed May 15, 2024
2 parents e06a3db + b062751 commit 1488421
Show file tree
Hide file tree
Showing 5 changed files with 71 additions and 38 deletions.
4 changes: 0 additions & 4 deletions dashboard/app/controllers/datablock_storage_controller.rb
Original file line number Diff line number Diff line change
Expand Up @@ -61,7 +61,6 @@ def index
##########################################################

def set_key_value
raise StudentFacingError, "The value is too large. The maximum allowable size is #{DatablockStorageKvp::MAX_VALUE_LENGTH} bytes" if params[:value].length > DatablockStorageKvp::MAX_VALUE_LENGTH
value = JSON.parse params[:value]
DatablockStorageKvp.set_kvp @project_id, params[:key], value
render json: {key: params[:key], value: value}
Expand Down Expand Up @@ -219,7 +218,6 @@ def get_columns_for_table
##########################################################

def create_record
raise StudentFacingError, "The record is too large. The maximum allowable size is #{DatablockStorageRecord::MAX_RECORD_LENGTH} bytes" if params[:record_json].length > DatablockStorageRecord::MAX_RECORD_LENGTH
record_json = JSON.parse params[:record_json]
raise "record must be a hash" unless record_json.is_a? Hash

Expand All @@ -237,8 +235,6 @@ def read_records
end

def update_record
raise StudentFacingError, "The record is too large. The maximum allowable size is #{DatablockStorageRecord::MAX_RECORD_LENGTH} bytes" if params[:record_json].length > DatablockStorageRecord::MAX_RECORD_LENGTH

table = find_table
record_json = table.update_record params[:record_id], JSON.parse(params[:record_json])
table.save!
Expand Down
28 changes: 20 additions & 8 deletions dashboard/app/models/datablock_storage_kvp.rb
Original file line number Diff line number Diff line change
Expand Up @@ -17,17 +17,14 @@ class DatablockStorageKvp < ApplicationRecord

self.primary_keys = :project_id, :key

validate :max_kvp_count, on: :create
validate :max_value_length, on: [:create, :update]

StudentFacingError = DatablockStorageTable::StudentFacingError

# TODO: #56999, implement enforcement of MAX_VALUE_LENGTH, we already have
# a test for it, but we're skipping it until this is implemented.
MAX_VALUE_LENGTH = 4096

# TODO: #57000, implement encforement of MAX_NUM_KVPS. We didn't find enfocrement
# of this in Firebase in our initial exploration, so we may need to partly pick
# a value here and start enforcing it, previous exploration:
# https://github.com/code-dot-org/code-dot-org/issues/55554#issuecomment-1876143286
MAX_NUM_KVPS = 20000 # does firebase already have a limit? this matches max num table rows
MAX_NUM_KVPS = 20000

def self.get_kvps(project_id)
where(project_id: project_id).
Expand All @@ -37,7 +34,9 @@ def self.get_kvps(project_id)

def self.set_kvps(project_id, key_value_hashmap, upsert: true)
kvps = key_value_hashmap.map do |key, value|
{project_id: project_id, key: key, value: value.to_json}
kvp_attr = {project_id: project_id, key: key, value: value.to_json}
DatablockStorageKvp.new(kvp_attr).valid?
kvp_attr
end

if upsert
Expand All @@ -56,4 +55,17 @@ def self.set_kvp(project_id, key, value)
DatablockStorageKvp.set_kvps(project_id, {key => value}, upsert: true)
end
end

private def max_kvp_count
current_count = DatablockStorageKvp.where(project_id: project_id).count
if current_count >= MAX_NUM_KVPS
raise StudentFacingError.new(:MAX_KVPS_EXCEEDED), "Cannot have more than #{MAX_NUM_KVPS} key-value pairs per project"
end
end

private def max_value_length
if value.to_json.bytesize > MAX_VALUE_LENGTH
raise StudentFacingError.new(:MAX_VALUE_LENGTH_EXCEEDED), "Value data cannot exceed #{MAX_VALUE_LENGTH} bytes"
end
end
end
13 changes: 10 additions & 3 deletions dashboard/app/models/datablock_storage_record.rb
Original file line number Diff line number Diff line change
Expand Up @@ -20,8 +20,15 @@ class DatablockStorageRecord < ApplicationRecord
# Composite primary key:
self.primary_keys = :project_id, :table_name, :record_id

# TODO: #57001, implement enforcement of MAX_RECORD_LENGTH, we already have
# a test for this, but we're skipping it until this is implemented. This
# should ensure the string form of .json is less than 4096 bytes.
validate :max_record_length

StudentFacingError = DatablockStorageTable::StudentFacingError

MAX_RECORD_LENGTH = 4096

private def max_record_length
if record_json.to_json.bytesize > MAX_RECORD_LENGTH
raise StudentFacingError.new(:MAX_RECORD_LENGTH_EXCEEDED), "The record is too large. The maximum allowable size is #{DatablockStorageRecord::MAX_RECORD_LENGTH} bytes"
end
end
end
17 changes: 12 additions & 5 deletions dashboard/app/models/datablock_storage_table.rb
Original file line number Diff line number Diff line change
Expand Up @@ -41,6 +41,8 @@ class DatablockStorageTable < ApplicationRecord

after_initialize -> {self.columns ||= ['id']}, if: :new_record?

validate :validate_max_table_count, on: :create

# These are errors that should show up in the Applab Debug Console
# see description in datablock_storage_controller.rb.
class StudentFacingError < StandardError
Expand All @@ -58,12 +60,7 @@ def initialize(type = nil)
# the special project_id to indicate it's a shared table.
SHARED_TABLE_PROJECT_ID = 0

# TODO: #57003, implement enforcement of MAX_TABLE_COUNT, we already have
# a test for it but we're skipping it until this is implemented.
MAX_TABLE_COUNT = 10

# TODO: #57002, enforce MAX_TABLE_ROW_COUNT, we already have a test for it
# but we're skipping it until this is implemented.
MAX_TABLE_ROW_COUNT = 20000

def self.get_table_names(project_id)
Expand Down Expand Up @@ -159,6 +156,10 @@ def if_shared_table_copy_on_write
def create_records(record_jsons)
if_shared_table_copy_on_write

if records.count + record_jsons.count > MAX_TABLE_ROW_COUNT
raise StudentFacingError.new(:MAX_ROWS_EXCEEDED), "Cannot add more than #{MAX_TABLE_ROW_COUNT} rows to a table"
end

DatablockStorageRecord.transaction do
# Because we're using a composite primary key for records: (project_id, table_name, record_id)
# and we want an incrementing record_id unique to that (project_id, table_name), we lock
Expand Down Expand Up @@ -332,4 +333,10 @@ def coerce_column(column_name, column_type)
end
end
end

private def validate_max_table_count
if DatablockStorageTable.where(project_id: project_id).count >= MAX_TABLE_COUNT
raise StudentFacingError.new(:MAX_TABLES_EXCEEDED), "Cannot create more than #{MAX_TABLE_COUNT} tables"
end
end
end
47 changes: 29 additions & 18 deletions dashboard/test/controllers/datablock_storage_controller_test.rb
Original file line number Diff line number Diff line change
Expand Up @@ -64,6 +64,24 @@ def set_and_get_key_value(key, value)
set_and_get_key_value('👁️👄👁️', 'value is: 🎉')
end

test "set_key_value can't create more than MAX_NUM_KVPS kvps" do
# Lower the max table count to 3 so its easy to test
original_max_num_kvps = DatablockStorageKvp::MAX_NUM_KVPS
DatablockStorageKvp.const_set(:MAX_NUM_KVPS, 3)

# Create 3 kvps so we're right at the limit...
set_and_get_key_value('key1', 'val1')
set_and_get_key_value('key2', 'val2')
set_and_get_key_value('key3', 'val3')

post _url(:set_key_value), params: {key: 'key4', value: 'val4'.to_json}

assert_response :bad_request
assert_equal 'MAX_KVPS_EXCEEDED', JSON.parse(@response.body)['type']
ensure
DatablockStorageKvp.const_set(:MAX_NUM_KVPS, original_max_num_kvps)
end

test "set_key_value should enforce MAX_VALUE_LENGTH" do
too_many_bees = 'b' * (DatablockStorageKvp::MAX_VALUE_LENGTH + 1) # 1 more 'b' char than max
post _url(:set_key_value), params: {
Expand All @@ -72,8 +90,6 @@ def set_and_get_key_value(key, value)
}
assert_response :bad_request

skip "FIXME: controller bug, test will fail, because enforcing DatablockStorageTable::MAX_VALUE_LENGTH_EXCEEDED is not yet implemented, see #57002"
# Is MAX_VALUE_LENGTH_EXCEEDED the right error? check the JS
assert_equal 'MAX_VALUE_LENGTH_EXCEEDED', JSON.parse(@response.body)['type']
end

Expand Down Expand Up @@ -136,7 +152,6 @@ def set_and_get_key_value(key, value)
assert_response :success

post _url(:create_record), params: {table_name: 'table4', record_json: {'name' => 'bob'}.to_json}
skip "FIXME: controller bug, test will fail, because enforcing DatablockStorageTable::MAX_TABLE_COUNT is not yet implemented so we get :success when :bad_request is desired, see #57003"
assert_response :bad_request
assert_equal 'MAX_TABLES_EXCEEDED', JSON.parse(@response.body)['type']
ensure
Expand All @@ -155,9 +170,7 @@ def set_and_get_key_value(key, value)

post _url(:create_record), params: {table_name: 'mytable', record_json: {'name' => 'bob'}.to_json}

skip "FIXME: controller bug, test will fail, because enforcing DatablockStorageTable::MAX_TABLE_ROW_COUNT is not yet implemented so we get :success when :bad_request is desired, see #57002"
assert_response :bad_request
# Is MAX_ROWS_EXCEEDED the right error? check the JS
assert_equal 'MAX_ROWS_EXCEEDED', JSON.parse(@response.body)['type']
ensure
DatablockStorageTable.const_set(:MAX_TABLE_ROW_COUNT, original_max_table_row_count)
Expand All @@ -172,8 +185,6 @@ def set_and_get_key_value(key, value)
post _url(:create_record), params: {table_name: 'mytable', record_json: {'name' => too_many_bees}.to_json}
assert_response :bad_request

skip "FIXME: controller bug, test will fail, because enforcing DatablockStorageRecord::MAX_RECORD_LENGTH is not yet implemented at the DatablockStorage level, see #57001"
# Is MAX_RECORD_LENGTH_EXCEEDED the right error? check the JS
assert_equal 'MAX_RECORD_LENGTH_EXCEEDED', JSON.parse(@response.body)['type']
end

Expand Down Expand Up @@ -621,26 +632,26 @@ def create_shared_table
end

test "import_csv" do
CSV_DATA = <<~CSV
csv_data = <<~CSV
id,name,age,male
4,alice,7,false
5,bob,8,true
6,charlie,9,true
CSV

EXPECTED_RECORDS = [
expected_records = [
{"id" => 1, "name" => "alice", "age" => 7, "male" => false},
{"id" => 2, "name" => "bob", "age" => 8, "male" => true},
{"id" => 3, "name" => "charlie", "age" => 9, "male" => true},
]

post _url(:import_csv), params: {
table_name: 'mytable',
table_data_csv: CSV_DATA,
table_data_csv: csv_data,
}
assert_response :success

assert_equal EXPECTED_RECORDS, read_records
assert_equal expected_records, read_records
end

test "import_csv overwrites existing data" do
Expand All @@ -650,31 +661,31 @@ def create_shared_table
}
assert_response :success

CSV_DATA = <<~CSV
csv_data = <<~CSV
id,name
1,alice
2,bob
3,charlie
CSV

EXPECTED_RECORDS = [
expected_records = [
{"id" => 1, "name" => "alice"},
{"id" => 2, "name" => "bob"},
{"id" => 3, "name" => "charlie"},
]

post _url(:import_csv), params: {
table_name: 'mytable',
table_data_csv: CSV_DATA,
table_data_csv: csv_data,
}
assert_response :success

# Tim, age 2 record should be gone:
assert_equal EXPECTED_RECORDS, read_records
assert_equal expected_records, read_records
end

test "export csv" do
CSV_DATA = <<~CSV
csv_data = <<~CSV
id,name,age,male
1,alice,7,false
2,bob,8,true
Expand All @@ -683,7 +694,7 @@ def create_shared_table

post _url(:import_csv), params: {
table_name: 'mytable',
table_data_csv: CSV_DATA,
table_data_csv: csv_data,
}
assert_response :success

Expand All @@ -692,7 +703,7 @@ def create_shared_table
}
assert_response :success

assert_equal CSV_DATA, @response.body
assert_equal csv_data, @response.body
end

test "project_has_data" do
Expand Down

0 comments on commit 1488421

Please sign in to comment.