diff --git a/rebar.config b/rebar.config index 1bc3e28..4eb1433 100644 --- a/rebar.config +++ b/rebar.config @@ -1,7 +1,12 @@ {require_min_otp_vsn, "21"}. {deps, [ - {jsx, "3.1.0"} + {jsx, "3.1.0"}, + {euneus, "0.5.1"} +]}. + +{dialyzer, [ + {plt_extra_apps, [euneus]} ]}. {erl_opts, [ diff --git a/rebar.lock b/rebar.lock index 072c702..d5230c6 100644 --- a/rebar.lock +++ b/rebar.lock @@ -1,8 +1,11 @@ {"1.2.0", -[{<<"jsx">>,{pkg,<<"jsx">>,<<"3.1.0">>},0}]}. +[{<<"euneus">>,{pkg,<<"euneus">>,<<"0.5.1">>},0}, + {<<"jsx">>,{pkg,<<"jsx">>,<<"3.1.0">>},0}]}. [ {pkg_hash,[ + {<<"euneus">>, <<"1F3151D77B5188584BF78C8509413F5343538DE551F4954856F2C5B9F55A085B">>}, {<<"jsx">>, <<"D12516BAA0BB23A59BB35DCCAF02A1BD08243FCBB9EFE24F2D9D056CCFF71268">>}]}, {pkg_hash_ext,[ + {<<"euneus">>, <<"63FC3E670D0AF531816E6465871AB9A72C3795ABF822CA79F600229CF73319E0">>}, {<<"jsx">>, <<"0C5CC8FDC11B53CC25CF65AC6705AD39E54ECC56D1C22E4ADB8F5A53FB9427F3">>}]} ]. diff --git a/rebar.test.config b/rebar.test.config index cbc6679..943e48f 100644 --- a/rebar.test.config +++ b/rebar.test.config @@ -10,5 +10,6 @@ {deps, [ {lager, "3.8.0"}, {proper, "1.3.0"}, - {jsx, "3.1.0"} + {jsx, "3.1.0"}, + {euneus, "0.4.0"} ]}. diff --git a/src/jsxrecordx.erl b/src/jsxrecordx.erl new file mode 100644 index 0000000..7f5c033 --- /dev/null +++ b/src/jsxrecordx.erl @@ -0,0 +1,263 @@ +%% @author Marc Worrell +%% @copyright 2018-2023 Marc Worrell +%% @doc JSON with records and 'undefined'/'null' mapping. Wrapper around jsx. +%% @end + +%% Copyright 2018-2023 Marc Worrell +%% +%% Licensed under the Apache License, Version 2.0 (the "License"); +%% you may not use this file except in compliance with the License. +%% You may obtain a copy of the License at +%% +%% http://www.apache.org/licenses/LICENSE-2.0 +%% +%% Unless required by applicable law or agreed to in writing, software +%% distributed under the License is distributed on an "AS IS" BASIS, +%% WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. +%% See the License for the specific language governing permissions and +%% limitations under the License. + +-module(jsxrecordx). + +-author('Marc Worrell '). + +-export([ + encode/1, + decode/1, + + load_records/1, + record_defs/0 +]). + +-define(RECORD_TYPE, <<"_type">>). + +-include_lib("kernel/include/logger.hrl"). + +%%==================================================================== +%% API +%%==================================================================== + +-spec encode( term() ) -> binary(). +encode(Source) -> + encode_json(Source). + +-spec decode( binary() | undefined ) -> term(). +decode(undefined) -> + undefined; +decode(Bin) when is_binary(Bin) -> + decode_json(Bin). + +%% @doc Load all record definitions. +-spec record_defs() -> map(). +record_defs() -> + try jsxrecord_defs:defs() + catch _:_ -> + _ = application:start(jsxrecord), + {ok, Ms} = application:get_env(jsxrecord, record_modules), + ok = do_load_records(Ms, #{}), + jsxrecord_defs:defs() + end. + +-spec load_records( module() | list( module( )) ) -> ok. +load_records(Module) when is_atom(Module) -> + load_records([ Module ]); +load_records(Modules) -> + do_load_records(Modules, record_defs_int()). + +%%==================================================================== +%% Internal +%%==================================================================== + +%% @doc Load all record definitions. +-spec record_defs_int() -> map(). +record_defs_int() -> + try + erlang:apply(jsxrecord_defs, defs, []) + catch _:_ -> + #{} + end. + +do_load_records(Modules, CurrRecordDefs) -> + Records = lists:flatten( lists:map( fun(M) -> extract_records(M) end, Modules ) ), + New = lists:foldl( + fun({Name, Fs}, Acc) -> + FsB = [ {atom_to_binary(F, utf8), Init} || {F,Init} <- Fs ], + Acc#{ atom_to_binary(Name, utf8) => FsB } + end, + CurrRecordDefs, + Records), + compile_module(New). + +encode_json(Term) -> + Options = #{ + nulls => [undefined, null], + list_encoder => fun + ([{K, _} | _] = Proplist, Opts) + when is_binary(K); is_atom(K); is_integer(K) -> + Map = proplists:to_map(Proplist), + euneus_encoder:encode_map(Map, Opts); + (List, Opts) -> + euneus_encoder:encode_list(List, Opts) + end, + unhandled_encoder => fun + ({struct, MochiJSON}, Opts) -> + Map = mochijson_to_map(MochiJSON), + euneus_encoder:encode_map(Map, Opts); + (R, _Opts) when is_tuple(R), is_atom(element(1, R)) -> + T = atom_to_binary(element(1, R), utf8), + case maps:find(T, record_defs()) of + {ok, Def} -> + encode_json(expand_record_1( + Def, 2, R, #{ ?RECORD_TYPE => T } + )); + error -> + euneus_encoder:throw_unsupported_type_error(R) + end; + (T, _Opts) -> + euneus_encoder:throw_unsupported_type_error(T) + end, + error_handler => fun jsx_error/3 + }, + case euneus:encode_to_binary(Term, Options) of + {ok, JSON} -> + JSON; + {error, Reason} -> + error(Reason) + end. + +decode_json(<<>>) -> undefined; +decode_json(B) -> + Options = #{ + objects => fun(M1, _Opts) -> + case maps:find(?RECORD_TYPE, M1) of + {ok, Type} -> + case maps:find(Type, record_defs_int()) of + {ok, Def} -> + Rec = lists:foldl( + fun({F, Default}, Acc) -> + V1 = case maps:get(F, M1, Default) of + V when is_map(V), is_list(Default) -> + make_proplist(V); + V -> + V + end, + [ V1 | Acc ] + end, + [ binary_to_atom(Type, utf8) ], + Def), + list_to_tuple( lists:reverse(Rec) ); + error -> + M1 + end; + error -> + M1 + end + end + }, + case euneus:decode(B, Options) of + {ok, Term} -> + Term; + {error, Reason} -> + error(Reason) + end. + +jsx_error(throw, {{token, Token}, Rest, Opts, Input, Pos, Buffer}, _Stacktrace) -> + ?LOG_ERROR(#{ + in => jsxrecord, + text => <<"Error mapping value to JSON">>, + result => error, + reason => json_token, + token => Token + }), + Replacement = null, + euneus_decoder:resume(Token, Replacement, Rest, Opts, Input, Pos, Buffer); +jsx_error(Class, Reason, Stacktrace) -> + euneus_decoder:handle_error(Class, Reason, Stacktrace). + +make_proplist(Map) -> + L = maps:to_list(Map), + lists:map( + fun + ({K,V}) when is_binary(K) -> + try + {binary_to_existing_atom(K, utf8), V} + catch + _:_ -> {K, V} + end; + (KV) -> + KV + end, + L). + +expand_record_1([ {F, _} | Fs ], N, R, Acc) -> + Acc1 = Acc#{ F => element(N, R) }, + expand_record_1(Fs, N+1, R, Acc1); +expand_record_1([], _N, _R, Acc) -> + Acc. + +mochijson_to_map({struct, L}) -> + maps:from_list([ mochijson_to_map(V) || V <- L ]); +mochijson_to_map({K, V}) -> + {K, mochijson_to_map(V)}; +mochijson_to_map(V) -> + V. + +%% @doc Compile the record defs to a module, for effictient caching of all definitions +-spec compile_module( map() ) -> ok. +compile_module( Defs ) -> + {ok, Module, Bin} = compile(Defs), + code:purge(Module), + {module, _} = code:load_binary(Module, "jsxrecord_defs.erl", Bin), + ok. + +-spec compile( map() ) -> {ok, atom(), binary()}. +compile(Defs) -> + ModuleAst = erl_syntax:attribute(erl_syntax:atom(module), [ erl_syntax:atom(jsxrecord_defs) ]), + ExportAst = erl_syntax:attribute( + erl_syntax:atom(export), + [ erl_syntax:list([ + erl_syntax:arity_qualifier(erl_syntax:atom(defs), erl_syntax:integer(0)) + ]) + ]), + FunAst = erl_syntax:function( + erl_syntax:atom(defs), + [ erl_syntax:clause([], none, [ erl_syntax:abstract(Defs) ]) ]), + Forms = [ erl_syntax:revert(X) || X <- [ ModuleAst, ExportAst, FunAst ] ], + {ok, Module, Bin} = compile:forms(Forms, []), + {ok, Module, Bin}. + +-spec extract_records( module() ) -> list( {atom(), list(atom())} ). +extract_records(Module) -> + case code:which(Module) of + BeamFile when is_list(BeamFile) -> + case beam_lib:chunks(BeamFile, [ abstract_code ]) of + {ok, {_, [ {abstract_code, {_, AbstractCode }} ]} } -> + extract_records_abs(AbstractCode); + _ -> + [] + end; + + _Other -> + [] + end. + +%% @doc Extract all record definitions from the abstract code +extract_records_abs( AbstractCode ) -> + lists:filtermap( + fun + ({attribute, _Pos, record, {Name, Fields}}) -> + {true, {Name, to_field_names(Fields)}}; + (_) -> + false + end, + AbstractCode). + +to_field_names(Fields) -> + [ to_field_name(Field) || Field <- Fields ]. + +to_field_name({typed_record_field, RecField, _Type}) -> + to_field_name(RecField); +to_field_name({record_field, _Line, {atom, _, FieldName}}) -> + {FieldName, undefined}; +to_field_name({record_field, _Line, {atom, _, FieldName}, InitExpr}) -> + {FieldName, erl_syntax:concrete(InitExpr)}. diff --git a/test/jsxrecordx_SUITE.erl b/test/jsxrecordx_SUITE.erl new file mode 100644 index 0000000..f641758 --- /dev/null +++ b/test/jsxrecordx_SUITE.erl @@ -0,0 +1,135 @@ +-module(jsxrecordx_SUITE). + +-compile(export_all). + +-include_lib("common_test/include/ct.hrl"). + +-record(test, { + a = 1, + b = 2, + c +}). + +-record(trans, { + tr = [] +}). + +%%-------------------------------------------------------------------- +%% COMMON TEST CALLBACK FUNCTIONS +%%-------------------------------------------------------------------- +init_per_suite(Config) -> + Config. + +end_per_suite(_Config) -> + ok. + +init_per_testcase(_TestCase, Config) -> + Config. + +end_per_testcase(_TestCase, _Config) -> + ok. + +all() -> + [ + undefined_value, + records, + records_nested, + record_defaults, + dates, + times, + proplist, + record_proplist, + mixed_list + ]. + +%%-------------------------------------------------------------------- +%% TEST CASES +%%-------------------------------------------------------------------- + +undefined_value(_Config) -> + <<"{\"a\":null}">> = encode( #{ a => undefined } ), + #{ <<"a">> := undefined } = decode( <<"{\"a\":null}">> ), + ok. + +records(_Config) -> + ok = jsxrecord:load_records(?MODULE), + #test{} = decode( encode( #test{} ) ), + #test{ a = <<"a">> } = decode( encode( #test{ a = a } ) ), + #test{ a = undefined } = decode( encode( #test{ a = undefined } ) ), + ok. + +records_nested(_Config) -> + #test{ a = #test{} } = decode( encode( #test{ a = #test{} } ) ), + <<"{\"a\":{\"_type\":\"test\",\"a\":1,\"b\":2,\"c\":null}}">> = encode(#{ a => #test{} }), + #{ <<"a">> := #test{} } = decode( encode(#{ a => #test{} }) ), + ok. + +record_defaults(_Config) -> + #test{ a = 1, b = 2, c = undefined } = decode( <<"{\"_type\":\"test\"}">> ), + ok. + +dates(_Config) -> + <<"\"2008-12-10T13:30:00Z\"">> = encode({{2008, 12, 10}, {13, 30, 0}}), + {{2008, 12, 10}, {13, 30, 0}} = decode(<<"\"2008-12-10T13:30:00Z\"">>), + + <<"[\"2008-12-10T13:30:00Z\",\"2008-12-10T13:30:00Z\"]">> = + encode([{{2008, 12, 10}, {13, 30, 0}}, {{2008, 12, 10}, {13, 30, 0}}]), + [{{2008, 12, 10}, {13, 30, 0}}, {{2008, 12, 10}, {13, 30, 0}}] = + decode(<<"[\"2008-12-10T13:30:00Z\",\"2008-12-10T13:30:00Z\"]">>), + + ok. + +times(_Config) -> + <<"\"2020-06-12T14:00:11.571Z\"">> = encode({1591,970411,571321}), + % We loose a little bit of precision, but that is ok. + {1591,970411,571000} = decode( <<"\"2020-06-12T14:00:11.571Z\"">> ), + ok. + +proplist(_Config) -> + <<"{\"a\":1}">> = encode([ {a, 1} ]), + ok. + +record_proplist(_Config) -> + Tr = #trans{ tr = [ {en, <<"hello">>} ]}, + Json = encode(Tr), + Tr = decode(Json), + ok. + +mixed_list(_Config) -> + L = [{n,7}, + {mean,166347}, + {min,750}, + {max,828167}, + {median,880}, + {50,880}, + {75,1143}, + {90,828167}, + {95,828167}, + {99,828167}, + {999,828167}], + E = [ + "\"n\":7", + "\"mean\":166347", + "\"min\":750", + "\"max\":828167", + "\"median\":880", + "\"50\":880", + "\"75\":1143", + "\"90\":828167", + "\"95\":828167", + "\"99\":828167", + "\"999\":828167" + ], + JSON = encode(L), + [ match = re:run(JSON, RE, [{capture, none}]) || RE <- E ], + ok. + +%%-------------------------------------------------------------------- +%% SUPPORT FUNCTIONS +%%-------------------------------------------------------------------- + +encode(Source) -> + jsxrecordx:encode(Source). + +decode(Bin) -> + jsxrecordx:decode(Bin).