From 4626bdd242dd22b6a25c4cee98c4f2ab3bc0c9a3 Mon Sep 17 00:00:00 2001 From: Oscar Dowson Date: Thu, 18 Sep 2025 16:46:17 +1200 Subject: [PATCH 1/2] [FileFormats.LP] Add some keywords without surrounding new lines --- src/FileFormats/LP/read.jl | 32 +++++++++++++++++++++++++++++++ test/FileFormats/LP/LP.jl | 39 +++++++++++++++++++------------------- 2 files changed, 52 insertions(+), 19 deletions(-) diff --git a/src/FileFormats/LP/read.jl b/src/FileFormats/LP/read.jl index 6dc74a3cb1..48a0da2893 100644 --- a/src/FileFormats/LP/read.jl +++ b/src/FileFormats/LP/read.jl @@ -82,6 +82,38 @@ function Base.read!(io::IO, model::Model{T}) where {T} "No file contents are allowed after `end`.", ) else + if token.kind == _TOKEN_IDENTIFIER + # We didn't identify the token as an keyword during lexing. But + # it might be one that is missing surrounding `\n`. Since our + # alternative at this point is to throw an error, we ,might as + # well attempt to see it can be interpreted as one. + kw = _case_insenstive_identifier_to_keyword(token.value) + if kw !== nothing + _ = read(state, _Token, _TOKEN_IDENTIFIER) + keyword = Symbol(kw) + continue + elseif _compare_case_insenstive(token.value, "subject") + p = peek(state, _Token, 2) + if p !== nothing && p.kind == _TOKEN_IDENTIFIER + if _compare_case_insenstive(p.value, "to") + _ = read(state, _Token, _TOKEN_IDENTIFIER) + _ = read(state, _Token, _TOKEN_IDENTIFIER) + keyword = :CONSTRAINTS + continue + end + end + elseif _compare_case_insenstive(token.value, "such") + p = peek(state, _Token, 2) + if p !== nothing && p.kind == _TOKEN_IDENTIFIER + if _compare_case_insenstive(p.value, "that") + _ = read(state, _Token, _TOKEN_IDENTIFIER) + _ = read(state, _Token, _TOKEN_IDENTIFIER) + keyword = :CONSTRAINTS + continue + end + end + end + end _expect(state, token, _TOKEN_KEYWORD) end end diff --git a/test/FileFormats/LP/LP.jl b/test/FileFormats/LP/LP.jl index 02c183fff8..8b9a6c7b1e 100644 --- a/test/FileFormats/LP/LP.jl +++ b/test/FileFormats/LP/LP.jl @@ -1491,7 +1491,6 @@ function test_parse_term() @test term == MOI.ScalarAffineTerm(-coef, x) end for (input, reason) in [ - "subject to" => "Got a keyword defining a new section with value `CONSTRAINTS`.", ">= 1" => "Got the symbol `>=`.", ] io = IOBuffer(input) @@ -1545,14 +1544,14 @@ function test_parse_set_prefix() state = LP._LexerState(io) @test LP._parse_set_prefix(state, cache) == set end - io = IOBuffer("->") + io = IOBuffer("1.0 ->") state = LP._LexerState(io) @test_parse_error( """ Error parsing LP file on line 1: - -> - ^ - Got the symbol `->`. We expected this token to be a number.""", + 1.0 -> + ^ + Got the symbol `->`. We expected this to be an inequality like `>=`, `<=`, or `==`.""", LP._parse_set_prefix(state, cache), ) return @@ -1642,7 +1641,7 @@ function test_new_line_edge_cases_sos() return end -function test_new_line_edge_case_fails() +function test_missing_new_line_edge_cases() for input in [ # No newline between objective sense and objective "minimize x", @@ -1655,7 +1654,8 @@ function test_new_line_edge_case_fails() ] io = IOBuffer(input) model = LP.Model() - @test_throws LP.ParseError MOI.read!(io, model) + MOI.read!(io, model) + @test MOI.get(model, MOI.VariableIndex, "x") isa MOI.VariableIndex end return end @@ -1680,7 +1680,7 @@ function test_parse_keyword_edge_cases_identifier_is_keyword() end function test_parse_keyword_subject_to_errors() - for line in ["subject", "subject too", "subject to a:"] + for line in ["subject", "subject too"] io = IOBuffer(""" maximize obj: x @@ -1755,17 +1755,18 @@ function test_parse_quadratic_expr_eof() end function test_ambiguous_case_1() - # Xpress allows this. We currently don't. - io = IOBuffer("maximize obj: x subject to c: x <= 1 end") - model = LP.Model() - @test_parse_error( - """ - Error parsing LP file on line 1: - maximize obj: x subject to c: x <= 1 end - ^ - Got an identifier with value `maximize`. We expected this token to be a keyword defining a new section.""", - MOI.read!(io, model), - ) + # Xpress allows this. We currently do too. + for kw in ("subject to", "such that", "st") + io = IOBuffer("maximize obj: x $kw c: x <= 1\nend") + model = LP.Model() + MOI.read!(io, model) + @test MOI.get(model, MOI.ObjectiveSense()) == MOI.MAX_SENSE + F, S = MOI.ScalarAffineFunction{Float64}, MOI.LessThan{Float64} + @test isa( + MOI.get(model, MOI.ConstraintIndex, "c"), + MOI.ConstraintIndex{F,S}, + ) + end return end From 0dec45b7b50ae5fc1b8621a28ac82bc5a6faace9 Mon Sep 17 00:00:00 2001 From: Oscar Dowson Date: Thu, 18 Sep 2025 17:12:13 +1200 Subject: [PATCH 2/2] Fix formatting --- test/FileFormats/LP/LP.jl | 4 +--- 1 file changed, 1 insertion(+), 3 deletions(-) diff --git a/test/FileFormats/LP/LP.jl b/test/FileFormats/LP/LP.jl index 8b9a6c7b1e..5699b18c20 100644 --- a/test/FileFormats/LP/LP.jl +++ b/test/FileFormats/LP/LP.jl @@ -1490,9 +1490,7 @@ function test_parse_term() term = LP._parse_term(state, cache, -1.0) @test term == MOI.ScalarAffineTerm(-coef, x) end - for (input, reason) in [ - ">= 1" => "Got the symbol `>=`.", - ] + for (input, reason) in [">= 1" => "Got the symbol `>=`."] io = IOBuffer(input) state = LP._LexerState(io) @test_parse_error(