diff --git a/Makefile b/Makefile index 6d09420c1f..8579fb5db1 100644 --- a/Makefile +++ b/Makefile @@ -361,7 +361,7 @@ pedantic_gcc: -Wunused-const-variable=2 \ -Wunused-function \ -Wunused-label \ - -Wunused-local-typedefs \ + -Wno-unused-local-typedefs \ -Wunused-macros \ -Wunused-parameter \ -Wunused-result \ diff --git a/include/nlohmann/detail/input/binary_reader.hpp b/include/nlohmann/detail/input/binary_reader.hpp index e859c7d12d..93667e8f66 100644 --- a/include/nlohmann/detail/input/binary_reader.hpp +++ b/include/nlohmann/detail/input/binary_reader.hpp @@ -54,6 +54,8 @@ class binary_reader using string_t = typename BasicJsonType::string_t; using binary_t = typename BasicJsonType::binary_t; using json_sax_t = SAX; + using char_type = typename InputAdapterType::char_type; + using char_int_type = typename std::char_traits::int_type; public: /*! @@ -122,7 +124,7 @@ class binary_reader get(); } - if (JSON_HEDLEY_UNLIKELY(current != std::char_traits::eof())) + if (JSON_HEDLEY_UNLIKELY(current != std::char_traits::eof())) { return sax->parse_error(chars_read, get_token_string(), parse_error::create(110, chars_read, exception_message(format, "expected end of input; last byte: 0x" + get_token_string(), "value"))); @@ -180,7 +182,7 @@ class binary_reader { return true; } - *out++ = static_cast(current); + *out++ = static_cast(current); } return true; @@ -206,7 +208,7 @@ class binary_reader return sax->parse_error(chars_read, last_token, parse_error::create(112, chars_read, exception_message(input_format_t::bson, "string length must be at least 1, is " + std::to_string(len), "string"))); } - return get_string(input_format_t::bson, len - static_cast(1), result) and get() != std::char_traits::eof(); + return get_string(input_format_t::bson, len - static_cast(1), result) and get() != std::char_traits::eof(); } /*! @@ -245,7 +247,7 @@ class binary_reader Unsupported BSON record type 0x... @return whether a valid BSON-object/array was passed to the SAX parser */ - bool parse_bson_element_internal(const int element_type, + bool parse_bson_element_internal(const char_int_type element_type, const std::size_t element_type_parse_position) { switch (element_type) @@ -327,7 +329,7 @@ class binary_reader { string_t key; - while (int element_type = get()) + while (auto element_type = get()) { if (JSON_HEDLEY_UNLIKELY(not unexpect_eof(input_format_t::bson, "element list"))) { @@ -395,7 +397,7 @@ class binary_reader switch (get_char ? get() : current) { // EOF - case std::char_traits::eof(): + case std::char_traits::eof(): return unexpect_eof(input_format_t::cbor, "value"); // Integer 0x00..0x17 (0..23) @@ -690,12 +692,12 @@ class binary_reader case 0xF9: // Half-Precision Float (two-byte IEEE 754) { - const int byte1_raw = get(); + const auto byte1_raw = get(); if (JSON_HEDLEY_UNLIKELY(not unexpect_eof(input_format_t::cbor, "number"))) { return false; } - const int byte2_raw = get(); + const auto byte2_raw = get(); if (JSON_HEDLEY_UNLIKELY(not unexpect_eof(input_format_t::cbor, "number"))) { return false; @@ -1048,7 +1050,7 @@ class binary_reader switch (get()) { // EOF - case std::char_traits::eof(): + case std::char_traits::eof(): return unexpect_eof(input_format_t::msgpack, "value"); // positive fixint @@ -1825,7 +1827,7 @@ class binary_reader @return whether pair creation completed */ - bool get_ubjson_size_type(std::pair& result) + bool get_ubjson_size_type(std::pair& result) { result.first = string_t::npos; // size result.second = 0; // type @@ -1866,11 +1868,11 @@ class binary_reader @param prefix the previously read or set type prefix @return whether value creation completed */ - bool get_ubjson_value(const int prefix) + bool get_ubjson_value(const char_int_type prefix) { switch (prefix) { - case std::char_traits::eof(): // EOF + case std::char_traits::eof(): // EOF return unexpect_eof(input_format_t::ubjson, "value"); case 'T': // true @@ -1935,7 +1937,7 @@ class binary_reader auto last_token = get_token_string(); return sax->parse_error(chars_read, last_token, parse_error::create(113, chars_read, exception_message(input_format_t::ubjson, "byte after 'C' must be in range 0x00..0x7F; last byte: 0x" + last_token, "char"))); } - string_t s(1, static_cast(current)); + string_t s(1, static_cast(current)); return sax->string(s); } @@ -1964,7 +1966,7 @@ class binary_reader */ bool get_ubjson_array() { - std::pair size_and_type; + std::pair size_and_type; if (JSON_HEDLEY_UNLIKELY(not get_ubjson_size_type(size_and_type))) { return false; @@ -2026,7 +2028,7 @@ class binary_reader */ bool get_ubjson_object() { - std::pair size_and_type; + std::pair size_and_type; if (JSON_HEDLEY_UNLIKELY(not get_ubjson_size_type(size_and_type))) { return false; @@ -2108,11 +2110,11 @@ class binary_reader This function provides the interface to the used input adapter. It does not throw in case the input reached EOF, but returns a -'ve valued - `std::char_traits::eof()` in that case. + `std::char_traits::eof()` in that case. @return character read from the input */ - int get() + char_int_type get() { ++chars_read; return current = ia.get_character(); @@ -2121,7 +2123,7 @@ class binary_reader /*! @return character read from the input after ignoring all 'N' entries */ - int get_ignore_noop() + char_int_type get_ignore_noop() { do { @@ -2201,7 +2203,7 @@ class binary_reader { success = false; } - return static_cast(current); + return std::char_traits::to_char_type(current); }); return success; } @@ -2246,7 +2248,7 @@ class binary_reader JSON_HEDLEY_NON_NULL(3) bool unexpect_eof(const input_format_t format, const char* context) const { - if (JSON_HEDLEY_UNLIKELY(current == std::char_traits::eof())) + if (JSON_HEDLEY_UNLIKELY(current == std::char_traits::eof())) { return sax->parse_error(chars_read, "", parse_error::create(110, chars_read, exception_message(format, "unexpected end of input", context))); @@ -2306,7 +2308,7 @@ class binary_reader InputAdapterType ia; /// the current character - int current = std::char_traits::eof(); + char_int_type current = std::char_traits::eof(); /// the number of characters read std::size_t chars_read = 0; diff --git a/include/nlohmann/detail/input/input_adapters.hpp b/include/nlohmann/detail/input/input_adapters.hpp index 725ff1fe49..0a10847613 100644 --- a/include/nlohmann/detail/input/input_adapters.hpp +++ b/include/nlohmann/detail/input/input_adapters.hpp @@ -189,23 +189,23 @@ struct wide_string_input_helper } else if (wc <= 0x7FF) { - utf8_bytes[0] = static_cast::int_type>(0xC0u | ((wc >> 6u) & 0x1Fu)); - utf8_bytes[1] = static_cast::int_type>(0x80u | (wc & 0x3Fu)); + utf8_bytes[0] = static_cast::int_type>(0xC0u | ((static_cast(wc) >> 6u) & 0x1Fu)); + utf8_bytes[1] = static_cast::int_type>(0x80u | (static_cast(wc) & 0x3Fu)); utf8_bytes_filled = 2; } else if (wc <= 0xFFFF) { - utf8_bytes[0] = static_cast::int_type>(0xE0u | ((wc >> 12u) & 0x0Fu)); - utf8_bytes[1] = static_cast::int_type>(0x80u | ((wc >> 6u) & 0x3Fu)); - utf8_bytes[2] = static_cast::int_type>(0x80u | (wc & 0x3Fu)); + utf8_bytes[0] = static_cast::int_type>(0xE0u | ((static_cast(wc) >> 12u) & 0x0Fu)); + utf8_bytes[1] = static_cast::int_type>(0x80u | ((static_cast(wc) >> 6u) & 0x3Fu)); + utf8_bytes[2] = static_cast::int_type>(0x80u | (static_cast(wc) & 0x3Fu)); utf8_bytes_filled = 3; } else if (wc <= 0x10FFFF) { - utf8_bytes[0] = static_cast::int_type>(0xF0u | ((wc >> 18u) & 0x07u)); - utf8_bytes[1] = static_cast::int_type>(0x80u | ((wc >> 12u) & 0x3Fu)); - utf8_bytes[2] = static_cast::int_type>(0x80u | ((wc >> 6u) & 0x3Fu)); - utf8_bytes[3] = static_cast::int_type>(0x80u | (wc & 0x3Fu)); + utf8_bytes[0] = static_cast::int_type>(0xF0u | ((static_cast(wc) >> 18u) & 0x07u)); + utf8_bytes[1] = static_cast::int_type>(0x80u | ((static_cast(wc) >> 12u) & 0x3Fu)); + utf8_bytes[2] = static_cast::int_type>(0x80u | ((static_cast(wc) >> 6u) & 0x3Fu)); + utf8_bytes[3] = static_cast::int_type>(0x80u | (static_cast(wc) & 0x3Fu)); utf8_bytes_filled = 4; } else @@ -285,6 +285,8 @@ template class wide_string_input_adapter { public: + using char_type = char; + wide_string_input_adapter(BaseInputAdapter base) : base_adapter(base) {} diff --git a/include/nlohmann/detail/input/lexer.hpp b/include/nlohmann/detail/input/lexer.hpp index cf38a813ce..e710140b79 100644 --- a/include/nlohmann/detail/input/lexer.hpp +++ b/include/nlohmann/detail/input/lexer.hpp @@ -106,12 +106,14 @@ class lexer : public lexer_base using number_unsigned_t = typename BasicJsonType::number_unsigned_t; using number_float_t = typename BasicJsonType::number_float_t; using string_t = typename BasicJsonType::string_t; + using char_type = typename InputAdapterType::char_type; + using char_int_type = typename std::char_traits::int_type; public: using token_type = typename lexer_base::token_type; explicit lexer(InputAdapterType&& adapter) - : ia(std::move(adapter)), decimal_point_char(get_decimal_point()) {} + : ia(std::move(adapter)), decimal_point_char(static_cast(get_decimal_point())) {} // delete because of pointer members lexer(const lexer&) = delete; @@ -201,7 +203,7 @@ class lexer : public lexer_base @return true if and only if no range violation was detected */ - bool next_byte_in_range(std::initializer_list ranges) + bool next_byte_in_range(std::initializer_list ranges) { assert(ranges.size() == 2 or ranges.size() == 4 or ranges.size() == 6); add(current); @@ -252,7 +254,7 @@ class lexer : public lexer_base switch (get()) { // end of file while parsing string - case std::char_traits::eof(): + case std::char_traits::eof(): { error_message = "invalid string: missing closing quote"; return token_type::parse_error; @@ -370,28 +372,28 @@ class lexer : public lexer_base if (codepoint < 0x80) { // 1-byte characters: 0xxxxxxx (ASCII) - add(codepoint); + add(static_cast(codepoint)); } else if (codepoint <= 0x7FF) { // 2-byte characters: 110xxxxx 10xxxxxx - add(static_cast(0xC0u | (static_cast(codepoint) >> 6u))); - add(static_cast(0x80u | (static_cast(codepoint) & 0x3Fu))); + add(static_cast(0xC0u | (static_cast(codepoint) >> 6u))); + add(static_cast(0x80u | (static_cast(codepoint) & 0x3Fu))); } else if (codepoint <= 0xFFFF) { // 3-byte characters: 1110xxxx 10xxxxxx 10xxxxxx - add(static_cast(0xE0u | (static_cast(codepoint) >> 12u))); - add(static_cast(0x80u | ((static_cast(codepoint) >> 6u) & 0x3Fu))); - add(static_cast(0x80u | (static_cast(codepoint) & 0x3Fu))); + add(static_cast(0xE0u | (static_cast(codepoint) >> 12u))); + add(static_cast(0x80u | ((static_cast(codepoint) >> 6u) & 0x3Fu))); + add(static_cast(0x80u | (static_cast(codepoint) & 0x3Fu))); } else { // 4-byte characters: 11110xxx 10xxxxxx 10xxxxxx 10xxxxxx - add(static_cast(0xF0u | (static_cast(codepoint) >> 18u))); - add(static_cast(0x80u | ((static_cast(codepoint) >> 12u) & 0x3Fu))); - add(static_cast(0x80u | ((static_cast(codepoint) >> 6u) & 0x3Fu))); - add(static_cast(0x80u | (static_cast(codepoint) & 0x3Fu))); + add(static_cast(0xF0u | (static_cast(codepoint) >> 18u))); + add(static_cast(0x80u | ((static_cast(codepoint) >> 12u) & 0x3Fu))); + add(static_cast(0x80u | ((static_cast(codepoint) >> 6u) & 0x3Fu))); + add(static_cast(0x80u | (static_cast(codepoint) & 0x3Fu))); } break; @@ -1213,13 +1215,13 @@ class lexer : public lexer_base @param[in] return_type the token type to return on success */ JSON_HEDLEY_NON_NULL(2) - token_type scan_literal(const char* literal_text, const std::size_t length, + token_type scan_literal(const char_type* literal_text, const std::size_t length, token_type return_type) { assert(current == literal_text[0]); for (std::size_t i = 1; i < length; ++i) { - if (JSON_HEDLEY_UNLIKELY(get() != literal_text[i])) + if (JSON_HEDLEY_UNLIKELY(std::char_traits::to_char_type(get()) != literal_text[i])) { error_message = "invalid literal"; return token_type::parse_error; @@ -1237,7 +1239,7 @@ class lexer : public lexer_base { token_buffer.clear(); token_string.clear(); - token_string.push_back(std::char_traits::to_char_type(current)); + token_string.push_back(std::char_traits::to_char_type(current)); } /* @@ -1250,7 +1252,7 @@ class lexer : public lexer_base @return character read from the input */ - std::char_traits::int_type get() + char_int_type get() { ++position.chars_read_total; ++position.chars_read_current_line; @@ -1265,9 +1267,9 @@ class lexer : public lexer_base current = ia.get_character(); } - if (JSON_HEDLEY_LIKELY(current != std::char_traits::eof())) + if (JSON_HEDLEY_LIKELY(current != std::char_traits::eof())) { - token_string.push_back(std::char_traits::to_char_type(current)); + token_string.push_back(std::char_traits::to_char_type(current)); } if (current == '\n') @@ -1306,7 +1308,7 @@ class lexer : public lexer_base --position.chars_read_current_line; } - if (JSON_HEDLEY_LIKELY(current != std::char_traits::eof())) + if (JSON_HEDLEY_LIKELY(current != std::char_traits::eof())) { assert(not token_string.empty()); token_string.pop_back(); @@ -1314,9 +1316,9 @@ class lexer : public lexer_base } /// add a character to token_buffer - void add(int c) + void add(char_int_type c) { - token_buffer.push_back(std::char_traits::to_char_type(c)); + token_buffer.push_back(static_cast(c)); } public: @@ -1377,7 +1379,7 @@ class lexer : public lexer_base else { // add character as is - result.push_back(c); + result.push_back(static_cast(c)); } } @@ -1447,11 +1449,20 @@ class lexer : public lexer_base // literals case 't': - return scan_literal("true", 4, token_type::literal_true); + { + std::array true_literal = {{'t', 'r', 'u', 'e'}}; + return scan_literal(true_literal.data(), true_literal.size(), token_type::literal_true); + } case 'f': - return scan_literal("false", 5, token_type::literal_false); + { + std::array false_literal = {{'f', 'a', 'l', 's', 'e'}}; + return scan_literal(false_literal.data(), false_literal.size(), token_type::literal_false); + } case 'n': - return scan_literal("null", 4, token_type::literal_null); + { + std::array null_literal = {{'n', 'u', 'l', 'l'}}; + return scan_literal(null_literal.data(), null_literal.size(), token_type::literal_null); + } // string case '\"': @@ -1474,7 +1485,7 @@ class lexer : public lexer_base // end of input (the null byte is needed when parsing from // string literals) case '\0': - case std::char_traits::eof(): + case std::char_traits::eof(): return token_type::end_of_input; // error @@ -1489,7 +1500,7 @@ class lexer : public lexer_base InputAdapterType ia; /// the current character - std::char_traits::int_type current = std::char_traits::eof(); + char_int_type current = std::char_traits::eof(); /// whether the next get() call should just return current bool next_unget = false; @@ -1498,7 +1509,7 @@ class lexer : public lexer_base position_t position {}; /// raw input token string (for error messages) - std::vector token_string {}; + std::vector token_string {}; /// buffer for variable-length tokens (numbers, strings) string_t token_buffer {}; @@ -1512,7 +1523,7 @@ class lexer : public lexer_base number_float_t value_float = 0; /// the decimal point - const char decimal_point_char = '.'; + const char_type decimal_point_char = '.'; }; } // namespace detail } // namespace nlohmann diff --git a/include/nlohmann/json.hpp b/include/nlohmann/json.hpp index 592d19667b..86a8616e1e 100644 --- a/include/nlohmann/json.hpp +++ b/include/nlohmann/json.hpp @@ -4764,7 +4764,7 @@ class basic_json future 4.0.0 of the library. Please use @ref items() instead; that is, replace `json::iterator_wrapper(j)` with `j.items()`. */ - JSON_HEDLEY_DEPRECATED(3.1.0) + JSON_HEDLEY_DEPRECATED_FOR(3.1.0, items()) static iteration_proxy iterator_wrapper(reference ref) noexcept { return ref.items(); @@ -4773,7 +4773,7 @@ class basic_json /*! @copydoc iterator_wrapper(reference) */ - JSON_HEDLEY_DEPRECATED(3.1.0) + JSON_HEDLEY_DEPRECATED_FOR(3.1.0, items()) static iteration_proxy iterator_wrapper(const_reference ref) noexcept { return ref.items(); @@ -6530,7 +6530,7 @@ class basic_json instead; that is, replace calls like `j >> o;` with `o << j;`. @since version 1.0.0; deprecated since version 3.0.0 */ - JSON_HEDLEY_DEPRECATED(3.0.0) + JSON_HEDLEY_DEPRECATED_FOR(3.0.0, operator<<(std::ostream&, const basic_json&)) friend std::ostream& operator>>(const basic_json& j, std::ostream& o) { return o << j; @@ -6731,8 +6731,8 @@ class basic_json } template - JSON_HEDLEY_NON_NULL(2) JSON_HEDLEY_DEPRECATED_FOR(3.8.0, sax_parse(ptr, ptr + len, ...)) + JSON_HEDLEY_NON_NULL(2) static bool sax_parse(detail::span_input_adapter&& i, SAX* sax, input_format_t format = input_format_t::json, const bool strict = true) @@ -6743,9 +6743,6 @@ class basic_json : detail::binary_reader(std::move(ia)).sax_parse(format, sax, strict); } - - - /*! @brief deserialize from stream @deprecated This stream operator is deprecated and will be removed in @@ -6754,7 +6751,7 @@ class basic_json instead; that is, replace calls like `j << i;` with `i >> j;`. @since version 1.0.0; deprecated since version 3.0.0 */ - JSON_HEDLEY_DEPRECATED(3.0.0) + JSON_HEDLEY_DEPRECATED_FOR(3.0.0, operator>>(std::istream&, basic_json&)) friend std::istream& operator<<(basic_json& j, std::istream& i) { return operator>>(i, j); diff --git a/single_include/nlohmann/json.hpp b/single_include/nlohmann/json.hpp index a24a08b5ae..366a6fe84c 100644 --- a/single_include/nlohmann/json.hpp +++ b/single_include/nlohmann/json.hpp @@ -4611,23 +4611,23 @@ struct wide_string_input_helper } else if (wc <= 0x7FF) { - utf8_bytes[0] = static_cast::int_type>(0xC0u | ((wc >> 6u) & 0x1Fu)); - utf8_bytes[1] = static_cast::int_type>(0x80u | (wc & 0x3Fu)); + utf8_bytes[0] = static_cast::int_type>(0xC0u | ((static_cast(wc) >> 6u) & 0x1Fu)); + utf8_bytes[1] = static_cast::int_type>(0x80u | (static_cast(wc) & 0x3Fu)); utf8_bytes_filled = 2; } else if (wc <= 0xFFFF) { - utf8_bytes[0] = static_cast::int_type>(0xE0u | ((wc >> 12u) & 0x0Fu)); - utf8_bytes[1] = static_cast::int_type>(0x80u | ((wc >> 6u) & 0x3Fu)); - utf8_bytes[2] = static_cast::int_type>(0x80u | (wc & 0x3Fu)); + utf8_bytes[0] = static_cast::int_type>(0xE0u | ((static_cast(wc) >> 12u) & 0x0Fu)); + utf8_bytes[1] = static_cast::int_type>(0x80u | ((static_cast(wc) >> 6u) & 0x3Fu)); + utf8_bytes[2] = static_cast::int_type>(0x80u | (static_cast(wc) & 0x3Fu)); utf8_bytes_filled = 3; } else if (wc <= 0x10FFFF) { - utf8_bytes[0] = static_cast::int_type>(0xF0u | ((wc >> 18u) & 0x07u)); - utf8_bytes[1] = static_cast::int_type>(0x80u | ((wc >> 12u) & 0x3Fu)); - utf8_bytes[2] = static_cast::int_type>(0x80u | ((wc >> 6u) & 0x3Fu)); - utf8_bytes[3] = static_cast::int_type>(0x80u | (wc & 0x3Fu)); + utf8_bytes[0] = static_cast::int_type>(0xF0u | ((static_cast(wc) >> 18u) & 0x07u)); + utf8_bytes[1] = static_cast::int_type>(0x80u | ((static_cast(wc) >> 12u) & 0x3Fu)); + utf8_bytes[2] = static_cast::int_type>(0x80u | ((static_cast(wc) >> 6u) & 0x3Fu)); + utf8_bytes[3] = static_cast::int_type>(0x80u | (static_cast(wc) & 0x3Fu)); utf8_bytes_filled = 4; } else @@ -4707,6 +4707,8 @@ template class wide_string_input_adapter { public: + using char_type = char; + wide_string_input_adapter(BaseInputAdapter base) : base_adapter(base) {} @@ -5786,6 +5788,8 @@ class binary_reader using string_t = typename BasicJsonType::string_t; using binary_t = typename BasicJsonType::binary_t; using json_sax_t = SAX; + using char_type = typename InputAdapterType::char_type; + using char_int_type = typename std::char_traits::int_type; public: /*! @@ -5854,7 +5858,7 @@ class binary_reader get(); } - if (JSON_HEDLEY_UNLIKELY(current != std::char_traits::eof())) + if (JSON_HEDLEY_UNLIKELY(current != std::char_traits::eof())) { return sax->parse_error(chars_read, get_token_string(), parse_error::create(110, chars_read, exception_message(format, "expected end of input; last byte: 0x" + get_token_string(), "value"))); @@ -5912,7 +5916,7 @@ class binary_reader { return true; } - *out++ = static_cast(current); + *out++ = static_cast(current); } return true; @@ -5938,7 +5942,7 @@ class binary_reader return sax->parse_error(chars_read, last_token, parse_error::create(112, chars_read, exception_message(input_format_t::bson, "string length must be at least 1, is " + std::to_string(len), "string"))); } - return get_string(input_format_t::bson, len - static_cast(1), result) and get() != std::char_traits::eof(); + return get_string(input_format_t::bson, len - static_cast(1), result) and get() != std::char_traits::eof(); } /*! @@ -5977,7 +5981,7 @@ class binary_reader Unsupported BSON record type 0x... @return whether a valid BSON-object/array was passed to the SAX parser */ - bool parse_bson_element_internal(const int element_type, + bool parse_bson_element_internal(const char_int_type element_type, const std::size_t element_type_parse_position) { switch (element_type) @@ -6059,7 +6063,7 @@ class binary_reader { string_t key; - while (int element_type = get()) + while (auto element_type = get()) { if (JSON_HEDLEY_UNLIKELY(not unexpect_eof(input_format_t::bson, "element list"))) { @@ -6127,7 +6131,7 @@ class binary_reader switch (get_char ? get() : current) { // EOF - case std::char_traits::eof(): + case std::char_traits::eof(): return unexpect_eof(input_format_t::cbor, "value"); // Integer 0x00..0x17 (0..23) @@ -6422,12 +6426,12 @@ class binary_reader case 0xF9: // Half-Precision Float (two-byte IEEE 754) { - const int byte1_raw = get(); + const auto byte1_raw = get(); if (JSON_HEDLEY_UNLIKELY(not unexpect_eof(input_format_t::cbor, "number"))) { return false; } - const int byte2_raw = get(); + const auto byte2_raw = get(); if (JSON_HEDLEY_UNLIKELY(not unexpect_eof(input_format_t::cbor, "number"))) { return false; @@ -6780,7 +6784,7 @@ class binary_reader switch (get()) { // EOF - case std::char_traits::eof(): + case std::char_traits::eof(): return unexpect_eof(input_format_t::msgpack, "value"); // positive fixint @@ -7557,7 +7561,7 @@ class binary_reader @return whether pair creation completed */ - bool get_ubjson_size_type(std::pair& result) + bool get_ubjson_size_type(std::pair& result) { result.first = string_t::npos; // size result.second = 0; // type @@ -7598,11 +7602,11 @@ class binary_reader @param prefix the previously read or set type prefix @return whether value creation completed */ - bool get_ubjson_value(const int prefix) + bool get_ubjson_value(const char_int_type prefix) { switch (prefix) { - case std::char_traits::eof(): // EOF + case std::char_traits::eof(): // EOF return unexpect_eof(input_format_t::ubjson, "value"); case 'T': // true @@ -7667,7 +7671,7 @@ class binary_reader auto last_token = get_token_string(); return sax->parse_error(chars_read, last_token, parse_error::create(113, chars_read, exception_message(input_format_t::ubjson, "byte after 'C' must be in range 0x00..0x7F; last byte: 0x" + last_token, "char"))); } - string_t s(1, static_cast(current)); + string_t s(1, static_cast(current)); return sax->string(s); } @@ -7696,7 +7700,7 @@ class binary_reader */ bool get_ubjson_array() { - std::pair size_and_type; + std::pair size_and_type; if (JSON_HEDLEY_UNLIKELY(not get_ubjson_size_type(size_and_type))) { return false; @@ -7758,7 +7762,7 @@ class binary_reader */ bool get_ubjson_object() { - std::pair size_and_type; + std::pair size_and_type; if (JSON_HEDLEY_UNLIKELY(not get_ubjson_size_type(size_and_type))) { return false; @@ -7840,11 +7844,11 @@ class binary_reader This function provides the interface to the used input adapter. It does not throw in case the input reached EOF, but returns a -'ve valued - `std::char_traits::eof()` in that case. + `std::char_traits::eof()` in that case. @return character read from the input */ - int get() + char_int_type get() { ++chars_read; return current = ia.get_character(); @@ -7853,7 +7857,7 @@ class binary_reader /*! @return character read from the input after ignoring all 'N' entries */ - int get_ignore_noop() + char_int_type get_ignore_noop() { do { @@ -7933,7 +7937,7 @@ class binary_reader { success = false; } - return static_cast(current); + return std::char_traits::to_char_type(current); }); return success; } @@ -7978,7 +7982,7 @@ class binary_reader JSON_HEDLEY_NON_NULL(3) bool unexpect_eof(const input_format_t format, const char* context) const { - if (JSON_HEDLEY_UNLIKELY(current == std::char_traits::eof())) + if (JSON_HEDLEY_UNLIKELY(current == std::char_traits::eof())) { return sax->parse_error(chars_read, "", parse_error::create(110, chars_read, exception_message(format, "unexpected end of input", context))); @@ -8038,7 +8042,7 @@ class binary_reader InputAdapterType ia; /// the current character - int current = std::char_traits::eof(); + char_int_type current = std::char_traits::eof(); /// the number of characters read std::size_t chars_read = 0; @@ -8166,12 +8170,14 @@ class lexer : public lexer_base using number_unsigned_t = typename BasicJsonType::number_unsigned_t; using number_float_t = typename BasicJsonType::number_float_t; using string_t = typename BasicJsonType::string_t; + using char_type = typename InputAdapterType::char_type; + using char_int_type = typename std::char_traits::int_type; public: using token_type = typename lexer_base::token_type; explicit lexer(InputAdapterType&& adapter) - : ia(std::move(adapter)), decimal_point_char(get_decimal_point()) {} + : ia(std::move(adapter)), decimal_point_char(static_cast(get_decimal_point())) {} // delete because of pointer members lexer(const lexer&) = delete; @@ -8261,7 +8267,7 @@ class lexer : public lexer_base @return true if and only if no range violation was detected */ - bool next_byte_in_range(std::initializer_list ranges) + bool next_byte_in_range(std::initializer_list ranges) { assert(ranges.size() == 2 or ranges.size() == 4 or ranges.size() == 6); add(current); @@ -8312,7 +8318,7 @@ class lexer : public lexer_base switch (get()) { // end of file while parsing string - case std::char_traits::eof(): + case std::char_traits::eof(): { error_message = "invalid string: missing closing quote"; return token_type::parse_error; @@ -8430,28 +8436,28 @@ class lexer : public lexer_base if (codepoint < 0x80) { // 1-byte characters: 0xxxxxxx (ASCII) - add(codepoint); + add(static_cast(codepoint)); } else if (codepoint <= 0x7FF) { // 2-byte characters: 110xxxxx 10xxxxxx - add(static_cast(0xC0u | (static_cast(codepoint) >> 6u))); - add(static_cast(0x80u | (static_cast(codepoint) & 0x3Fu))); + add(static_cast(0xC0u | (static_cast(codepoint) >> 6u))); + add(static_cast(0x80u | (static_cast(codepoint) & 0x3Fu))); } else if (codepoint <= 0xFFFF) { // 3-byte characters: 1110xxxx 10xxxxxx 10xxxxxx - add(static_cast(0xE0u | (static_cast(codepoint) >> 12u))); - add(static_cast(0x80u | ((static_cast(codepoint) >> 6u) & 0x3Fu))); - add(static_cast(0x80u | (static_cast(codepoint) & 0x3Fu))); + add(static_cast(0xE0u | (static_cast(codepoint) >> 12u))); + add(static_cast(0x80u | ((static_cast(codepoint) >> 6u) & 0x3Fu))); + add(static_cast(0x80u | (static_cast(codepoint) & 0x3Fu))); } else { // 4-byte characters: 11110xxx 10xxxxxx 10xxxxxx 10xxxxxx - add(static_cast(0xF0u | (static_cast(codepoint) >> 18u))); - add(static_cast(0x80u | ((static_cast(codepoint) >> 12u) & 0x3Fu))); - add(static_cast(0x80u | ((static_cast(codepoint) >> 6u) & 0x3Fu))); - add(static_cast(0x80u | (static_cast(codepoint) & 0x3Fu))); + add(static_cast(0xF0u | (static_cast(codepoint) >> 18u))); + add(static_cast(0x80u | ((static_cast(codepoint) >> 12u) & 0x3Fu))); + add(static_cast(0x80u | ((static_cast(codepoint) >> 6u) & 0x3Fu))); + add(static_cast(0x80u | (static_cast(codepoint) & 0x3Fu))); } break; @@ -9273,13 +9279,13 @@ class lexer : public lexer_base @param[in] return_type the token type to return on success */ JSON_HEDLEY_NON_NULL(2) - token_type scan_literal(const char* literal_text, const std::size_t length, + token_type scan_literal(const char_type* literal_text, const std::size_t length, token_type return_type) { assert(current == literal_text[0]); for (std::size_t i = 1; i < length; ++i) { - if (JSON_HEDLEY_UNLIKELY(get() != literal_text[i])) + if (JSON_HEDLEY_UNLIKELY(std::char_traits::to_char_type(get()) != literal_text[i])) { error_message = "invalid literal"; return token_type::parse_error; @@ -9297,7 +9303,7 @@ class lexer : public lexer_base { token_buffer.clear(); token_string.clear(); - token_string.push_back(std::char_traits::to_char_type(current)); + token_string.push_back(std::char_traits::to_char_type(current)); } /* @@ -9310,7 +9316,7 @@ class lexer : public lexer_base @return character read from the input */ - std::char_traits::int_type get() + char_int_type get() { ++position.chars_read_total; ++position.chars_read_current_line; @@ -9325,9 +9331,9 @@ class lexer : public lexer_base current = ia.get_character(); } - if (JSON_HEDLEY_LIKELY(current != std::char_traits::eof())) + if (JSON_HEDLEY_LIKELY(current != std::char_traits::eof())) { - token_string.push_back(std::char_traits::to_char_type(current)); + token_string.push_back(std::char_traits::to_char_type(current)); } if (current == '\n') @@ -9366,7 +9372,7 @@ class lexer : public lexer_base --position.chars_read_current_line; } - if (JSON_HEDLEY_LIKELY(current != std::char_traits::eof())) + if (JSON_HEDLEY_LIKELY(current != std::char_traits::eof())) { assert(not token_string.empty()); token_string.pop_back(); @@ -9374,9 +9380,9 @@ class lexer : public lexer_base } /// add a character to token_buffer - void add(int c) + void add(char_int_type c) { - token_buffer.push_back(std::char_traits::to_char_type(c)); + token_buffer.push_back(static_cast(c)); } public: @@ -9437,7 +9443,7 @@ class lexer : public lexer_base else { // add character as is - result.push_back(c); + result.push_back(static_cast(c)); } } @@ -9507,11 +9513,20 @@ class lexer : public lexer_base // literals case 't': - return scan_literal("true", 4, token_type::literal_true); + { + std::array true_literal = {{'t', 'r', 'u', 'e'}}; + return scan_literal(true_literal.data(), true_literal.size(), token_type::literal_true); + } case 'f': - return scan_literal("false", 5, token_type::literal_false); + { + std::array false_literal = {{'f', 'a', 'l', 's', 'e'}}; + return scan_literal(false_literal.data(), false_literal.size(), token_type::literal_false); + } case 'n': - return scan_literal("null", 4, token_type::literal_null); + { + std::array null_literal = {{'n', 'u', 'l', 'l'}}; + return scan_literal(null_literal.data(), null_literal.size(), token_type::literal_null); + } // string case '\"': @@ -9534,7 +9549,7 @@ class lexer : public lexer_base // end of input (the null byte is needed when parsing from // string literals) case '\0': - case std::char_traits::eof(): + case std::char_traits::eof(): return token_type::end_of_input; // error @@ -9549,7 +9564,7 @@ class lexer : public lexer_base InputAdapterType ia; /// the current character - std::char_traits::int_type current = std::char_traits::eof(); + char_int_type current = std::char_traits::eof(); /// whether the next get() call should just return current bool next_unget = false; @@ -9558,7 +9573,7 @@ class lexer : public lexer_base position_t position {}; /// raw input token string (for error messages) - std::vector token_string {}; + std::vector token_string {}; /// buffer for variable-length tokens (numbers, strings) string_t token_buffer {}; @@ -9572,7 +9587,7 @@ class lexer : public lexer_base number_float_t value_float = 0; /// the decimal point - const char decimal_point_char = '.'; + const char_type decimal_point_char = '.'; }; } // namespace detail } // namespace nlohmann @@ -20527,7 +20542,7 @@ class basic_json future 4.0.0 of the library. Please use @ref items() instead; that is, replace `json::iterator_wrapper(j)` with `j.items()`. */ - JSON_HEDLEY_DEPRECATED(3.1.0) + JSON_HEDLEY_DEPRECATED_FOR(3.1.0, items()) static iteration_proxy iterator_wrapper(reference ref) noexcept { return ref.items(); @@ -20536,7 +20551,7 @@ class basic_json /*! @copydoc iterator_wrapper(reference) */ - JSON_HEDLEY_DEPRECATED(3.1.0) + JSON_HEDLEY_DEPRECATED_FOR(3.1.0, items()) static iteration_proxy iterator_wrapper(const_reference ref) noexcept { return ref.items(); @@ -22293,7 +22308,7 @@ class basic_json instead; that is, replace calls like `j >> o;` with `o << j;`. @since version 1.0.0; deprecated since version 3.0.0 */ - JSON_HEDLEY_DEPRECATED(3.0.0) + JSON_HEDLEY_DEPRECATED_FOR(3.0.0, operator<<(std::ostream&, const basic_json&)) friend std::ostream& operator>>(const basic_json& j, std::ostream& o) { return o << j; @@ -22494,8 +22509,8 @@ class basic_json } template - JSON_HEDLEY_NON_NULL(2) JSON_HEDLEY_DEPRECATED_FOR(3.8.0, sax_parse(ptr, ptr + len, ...)) + JSON_HEDLEY_NON_NULL(2) static bool sax_parse(detail::span_input_adapter&& i, SAX* sax, input_format_t format = input_format_t::json, const bool strict = true) @@ -22506,9 +22521,6 @@ class basic_json : detail::binary_reader(std::move(ia)).sax_parse(format, sax, strict); } - - - /*! @brief deserialize from stream @deprecated This stream operator is deprecated and will be removed in @@ -22517,7 +22529,7 @@ class basic_json instead; that is, replace calls like `j << i;` with `i >> j;`. @since version 1.0.0; deprecated since version 3.0.0 */ - JSON_HEDLEY_DEPRECATED(3.0.0) + JSON_HEDLEY_DEPRECATED_FOR(3.0.0, operator>>(std::istream&, basic_json&)) friend std::istream& operator<<(basic_json& j, std::istream& i) { return operator>>(i, j); diff --git a/test/src/unit-class_parser.cpp b/test/src/unit-class_parser.cpp index 5fa7b37ee9..097b306031 100644 --- a/test/src/unit-class_parser.cpp +++ b/test/src/unit-class_parser.cpp @@ -1587,7 +1587,7 @@ TEST_CASE("parser class") CHECK (j_filtered1.size() == 2); CHECK (j_filtered1 == json({1, {{"qux", "baz"}}})); - json j_filtered2 = json::parse(structured_array, [](int, json::parse_event_t e, const json & /*parsed*/) + json j_filtered2 = json::parse(structured_array, [](int, json::parse_event_t e, const json& /*parsed*/) { if (e == json::parse_event_t::object_end) { diff --git a/test/src/unit-user_defined_input.cpp b/test/src/unit-user_defined_input.cpp index 86f763d5a9..f0895a5d4d 100644 --- a/test/src/unit-user_defined_input.cpp +++ b/test/src/unit-user_defined_input.cpp @@ -87,7 +87,6 @@ TEST_CASE("Custom iterator") using reference = const char&; using iterator_category = std::input_iterator_tag; - MyIterator& operator++() { ++ptr; @@ -117,6 +116,4 @@ TEST_CASE("Custom iterator") CHECK(as_json.at(3) == 4); } - - -} \ No newline at end of file +} // namespace