Skip to content

Commit

Permalink
Missing import.
Browse files Browse the repository at this point in the history
  • Loading branch information
jdunkerley committed Dec 20, 2022
1 parent ab4fa41 commit 75f3856
Show file tree
Hide file tree
Showing 14 changed files with 85 additions and 53 deletions.
16 changes: 8 additions & 8 deletions distribution/lib/Standard/Base/0.0.0-dev/src/Data/Json.enso
Original file line number Diff line number Diff line change
Expand Up @@ -191,26 +191,26 @@ type JS_Object

## PRIVATE
Render the JS_Object to Text with truncated depth.
render object depth=0 max_depth=5 = case object of
render object depth=0 max_depth=5 max_length=100 = case object of
_ : JS_Object ->
if depth == max_depth then '"{...}"' else
len = object.length
keys = object.field_names.take 50
children = keys.map key-> key.to_json + ': ' + (render (object.get key) (depth+1) max_depth)
'{' + (children.join ", ") + (if len > 50 then ', "...": "Another ' + (len - 50).to_text + ' items truncated."' else "") + '}'
keys = object.field_names.take max_length
children = keys.map key-> key.to_json + ': ' + (render (object.get key) (depth+1) max_depth max_length)
'{' + (children.join ", ") + (if len > max_length then ', "...": "Another ' + (len - max_length).to_text + ' items truncated."' else "") + '}'
_ : Vector ->
if depth == max_depth then '"[...]"' else
len = object.length
children = 0.up_to (50.min len) . map i-> render (object.at i) (depth+1) max_depth
"[" + (children.join ", ") + (if len > 50 then ', "... another ' + (len - 50).to_text + ' items truncated."' else "") + "]"
children = 0.up_to (max_length.min len) . map i-> render (object.at i) (depth+1) max_depth max_length
"[" + (children.join ", ") + (if len > max_length then ', "... another ' + (len - max_length).to_text + ' items truncated."' else "") + "]"
_ : Array ->
proxy = Array_Proxy.new object.length (i-> make_enso (object.at i))
@Tail_Call render (Vector.from_polyglot_array proxy) depth max_depth
@Tail_Call render (Vector.from_polyglot_array proxy) depth max_depth max_length
Nothing -> object.to_json
_ : Text -> object.to_json
_ : Boolean -> object.to_json
_ : Number -> object.to_json
_ -> @Tail_Call render (JS_Object.Value object) depth max_depth
_ -> @Tail_Call render (JS_Object.Value object) depth max_depth max_length

## PRIVATE
Internal function ensuring that a JavaScript object is presented appropriately to Enso.
Expand Down
2 changes: 1 addition & 1 deletion distribution/lib/Standard/Geo/0.0.0-dev/src/Geo_Json.enso
Original file line number Diff line number Diff line change
Expand Up @@ -23,5 +23,5 @@ geo_json_to_table geo_json fields=Nothing = case geo_json of
case features of
_ : Vector ->
feature_rows = features.map Helpers.get_feature_row
Table.from_json feature_rows fields
Table.from_objects feature_rows fields
_ -> Error.throw (Invalid_JSON_Format.Error geo_json "not having the 'features' key.")
Original file line number Diff line number Diff line change
@@ -1,4 +1,5 @@
from Standard.Base import all
import Standard.Base.Error.Common.Type_Error
import Standard.Base.Error.Unimplemented.Unimplemented

import project.Data.Table.Table
Expand All @@ -17,31 +18,27 @@ Text.from (that : Table) (format:Delimited_Format = Delimited_Format.Delimited '
_ : Delimited_Format -> Delimited_Writer.write_text that format
_ -> Unimplemented.throw "Text.from is currently only implemented for Delimited_Format."

## Converts a JSON object or a Vector into a dataframe, by looking up the requested keys
from each item.
## Converts an object or a Vector of object into a Table, by looking up the
requested fields from each item.

Arguments:
- fields: a vector of texts representing the names of fields to look up.
- value: The object or Vector of objects to convert to a table.
If a Vector or Array, then each item is added as a row.
Otherwise, treated as a single row.
- fields: a Vector of Text representing the names of fields to look up.
If `Nothing` then all fields found are added.

The function assumes the elements have one of the following structures:
- a JSON object containing the requested keys. In case an item is not an
object, or the request key does not exist, the relevant values of the table
will be set to `Nothing`.
- otherwise, the element will match the `Value` field name.

> Example Converts a JSON array containing key-value pairs into a table for the
provided headers.
> Example Converts a set of objects into a table using the provided headers.

from Standard.Table import Table
import Standard.Examples

example_to_table =
json = Examples.simple_table_json
headers = Examples.simple_table_json_headers
Table.from_json json headers
Table.from_json : Any -> Vector | Nothing -> Table
Table.from_json value fields=Nothing =
Table.from_objects json headers
Table.from_objects : Any -> Vector | Nothing -> Table ! Type_Error
Table.from_objects value fields=Nothing =
table_for_value v =
column_names = fields.if_nothing ["Value"]
column_values = column_names.map f-> if f == "Value" then v else Nothing
Expand All @@ -53,7 +50,7 @@ Table.from_json value fields=Nothing =
_ : Number -> ["Value"]
_ : Boolean -> ["Value"]
_ : Text -> ["Value"]
_ -> Error.throw (Invalid_JSON_Format.Error v "Invalid item for Table.from_json.")
_ -> Error.throw (Type_Error.Error Any "Invalid item within Vector for Table.from_objects. Currently only JS_Object, Number, Boolean, Text and Nothing are supported.")

get_value v field = case v of
_ : JS_Object -> v.get field
Expand All @@ -67,7 +64,7 @@ Table.from_json value fields=Nothing =
_ : JS_Object ->
field_names = fields.if_nothing value.field_names
values = field_names.map value.get
Table.new [["Name", field_names], ["Value", values]]
Table.from_rows field_names [values]
_ : Vector ->
len = value.length

Expand All @@ -89,5 +86,5 @@ Table.from_json value fields=Nothing =
data.to_vector

Table.new (used_fields.zip used_values)
_ : Array -> Table.from_json (Vector.from_polyglot_array value) fields
_ -> Error.throw (Invalid_JSON_Format.Error value "Invalid value for Table.from_json.")
_ : Array -> Table.from_objects (Vector.from_polyglot_array value) fields
_ -> Error.throw (Type_Error.Error Any "Invalid value for Table.from_objects. Currently must be one of JS_Object, Vector, Array, Number, Boolean, Text and Nothing are supported.")
Original file line number Diff line number Diff line change
Expand Up @@ -60,7 +60,7 @@ type Delimited_Format
## If the File_Format supports reading from the web response, return a configured instance.
for_web : Text -> URI -> Delimited_Format | Nothing
for_web content_type _ =
parts = content_type.split ";" . map (x->x.trim)
parts = content_type.split ";" . map .trim

charset_part = parts.find (x->x.starts_with "charset=")
encoding = if charset_part.is_error then Encoding.utf_8 else
Expand Down
Original file line number Diff line number Diff line change
Expand Up @@ -31,4 +31,7 @@ process_to_json_text value =
json = case value of
Table.Value _ -> json_from_table value . to_text
_ -> value.to_json

## Workaround so that the JS String is converted to a Text
https://www.pivotaltracker.com/story/show/184061302
"" + json
Original file line number Diff line number Diff line change
Expand Up @@ -141,15 +141,15 @@ Vector.default_visualization self = Id.table
Vector.to_default_visualization_data : Text
Vector.to_default_visualization_data self = render_vector self

render_vector object depth=0 max_depth=5 =
render_vector object depth=0 max_depth=5 max_length=100 =
case object of
_ : Vector -> if depth == max_depth then "[...]" else
result = object.take 100 . map (elem-> if elem.is_error then elem.to_json else render_vector elem (depth+1) max_depth)
result = object.take max_length . map (elem-> if elem.is_error then elem.to_json else render_vector elem (depth+1) max_depth max_length)
"[" + result.join ", " + "]"
_ : Array ->
proxy = Array_Proxy.new object.length object.at
@Tail_Call render_vector (Vector.from_polyglot_array proxy) depth max_depth
_ : JS_Object -> render object depth max_depth
@Tail_Call render_vector (Vector.from_polyglot_array proxy) depth max_depth max_length
_ : JS_Object -> render object depth max_depth max_length
_ -> object.to_default_visualization_data

## UNSTABLE
Expand Down
Original file line number Diff line number Diff line change
Expand Up @@ -68,4 +68,6 @@ from_value value =
- value: the value to be visualized.
process_to_json_text : Any -> Text
process_to_json_text value =
## Workaround so that the JS String is converted to a Text
https://www.pivotaltracker.com/story/show/184061302
"" + (from_value value . to_json)
Original file line number Diff line number Diff line change
Expand Up @@ -5,15 +5,16 @@ import project.Helpers
Default visualization preprocessor.
default_preprocessor x =
## Workaround so that the JS String is converted to a Text
https://www.pivotaltracker.com/story/show/184061302
"" + x.to_default_visualization_data

## PRIVATE
Error visualization preprocessor.
error_preprocessor x =
ok = '{ message: ""}'
ok = JS_Object.from_pairs [['message', '']] . to_json
result = x.map_error err->
message = err.to_display_text
stack_trace = x.get_stack_trace_text.if_nothing "" . split '\n'
full_message = message + if stack_trace.length > 1 then " (" + stack_trace.at 1 . trim +")" else ""
'{ "kind": "Dataflow", "message": ' + full_message.to_json + '}'
JS_Object.from_pairs [['kind', 'Dataflow'], ['message', full_message]] . to_json
if result.is_error then result.catch else ok
Original file line number Diff line number Diff line change
Expand Up @@ -30,6 +30,9 @@ prepare_visualization x = Helpers.recover_errors <|
JS_Object.from_pairs [["value", value], ["actual_type", actual_type], ["expected_sql_type", expected_sql_type], ["expected_enso_type", expected_enso_type]]
dialect = x.connection.dialect.name
json = JS_Object.from_pairs [["dialect", dialect], ["code", code], ["interpolations", mapped]] . to_text

## Workaround so that the JS String is converted to a Text
https://www.pivotaltracker.com/story/show/184061302
"" + json

## PRIVATE
Expand Down
Original file line number Diff line number Diff line change
Expand Up @@ -108,7 +108,7 @@ Table.point_data self =
pairs = columns.map column->
value = column.at row_n . catch_ Nothing
[column.name, value]
Json.from_pairs pairs
JS_Object.from_pairs pairs

## PRIVATE

Expand All @@ -130,7 +130,7 @@ Table.axes self =
## PRIVATE
Vector.point_data : Vector -> Vector
Vector.point_data self =
self.map_with_index i-> elem-> Json.from_pairs [[Point_Data.X.name, i], [Point_Data.Y.name, elem]]
self.map_with_index i-> elem-> JS_Object.from_pairs [[Point_Data.X.name, i], [Point_Data.Y.name, elem]]

## PRIVATE
bound_data bounds data = case bounds of
Expand All @@ -142,9 +142,8 @@ bound_data bounds data = case bounds of
max_y = bounds.at 3

data.filter <| datum->
v = Json.parse datum
x = v.get "x"
y = v.get "y"
x = datum.get "x"
y = datum.get "y"

min_x<=x && x<=max_x && min_y<=y && y<=max_y

Expand All @@ -155,8 +154,8 @@ type Extreme
limit_data limit data = case limit of
Nothing -> data
_ -> if ((data.length <= limit) || (data.length == 0)) then data else
x datum = ((Json.parse datum).get "x")
y datum = ((Json.parse datum).get "y")
x datum = datum.get "x"
y datum = datum.get "y"

update_extreme current idx point =
new_min_x = if x current.min_x.second > x point then [idx, point] else current.min_x
Expand All @@ -175,17 +174,17 @@ limit_data limit data = case limit of
extreme + data.take (Index_Sub_Range.Sample (limit - extreme.length))

## PRIVATE
json_from_table : Table -> [Int]|Nothing -> Int|Nothing -> JS_Object
json_from_table : Table -> [Int]|Nothing -> Int|Nothing -> Text
json_from_table table bounds limit =
data = table.point_data |> bound_data bounds |> limit_data limit
axes = table.axes
'{"' + data_field + '": [' + (data.join ",") + '],' + '"' + axis_field + '":' + axes.to_json + '}'
JS_Object.from_pairs [[data_field, data], [axis_field, axes]] . to_json

## PRIVATE
json_from_vector : Vector Any -> [Int]|Nothing -> Int|Nothing -> JS_Object
json_from_vector : Vector Any -> [Int]|Nothing -> Int|Nothing -> Text
json_from_vector vec bounds limit =
data = vec.point_data |> bound_data bounds |> limit_data limit
'{"' + data_field + '": [' + (data.join ",") + '],' + '"' + axis_field + '":null}'
JS_Object.from_pairs [[data_field, data], [axis_field, Nothing]] . to_json

## PRIVATE

Expand All @@ -203,4 +202,6 @@ process_to_json_text value bounds=Nothing limit=Nothing =
_ : Vector -> json_from_vector value bounds limit
_ -> json_from_vector value.to_vector bounds limit

## Workaround so that the JS String is converted to a Text
https://www.pivotaltracker.com/story/show/184061302
"" + json
Original file line number Diff line number Diff line change
Expand Up @@ -21,14 +21,17 @@ import project.Helpers

In case of Database backed data, it materializes a fragment of the data.
prepare_visualization : Any -> Integer -> Text
prepare_visualization x max_rows=1000 = Helpers.recover_errors <| "" + case x of
prepare_visualization x max_rows=1000 = Helpers.recover_errors <| case x of
_ : Dataframe_Table ->
dataframe = x.take (First max_rows)
all_rows_count = x.row_count
included_rows = dataframe.row_count
index = dataframe.index.catch Any _->
Dataframe_Column.from_vector "" (Vector.new included_rows i->i)
make_json dataframe [index] all_rows_count

## Workaround so that the JS String is converted to a Text
https://www.pivotaltracker.com/story/show/184061302
"" + make_json dataframe [index] all_rows_count

_ : Database_Table ->
# Materialize a table with indices as normal columns (because dataframe does not support multi-indexing).
Expand All @@ -37,7 +40,10 @@ prepare_visualization x max_rows=1000 = Helpers.recover_errors <| "" + case x of
vis_df = df.select_columns (Column_Selector.By_Name (x.columns.map .name))
indices = if x.indices.length == 0 then [] else df.select_columns (Column_Selector.By_Name (x.indices.map .name)) . columns
all_rows_count = x.row_count
make_json vis_df indices all_rows_count

## Workaround so that the JS String is converted to a Text
https://www.pivotaltracker.com/story/show/184061302
"" + make_json vis_df indices all_rows_count

# We display columns as 1-column tables.
_ : Dataframe_Column ->
Expand Down
8 changes: 4 additions & 4 deletions test/Table_Tests/src/IO/Json_Spec.enso
Original file line number Diff line number Diff line change
Expand Up @@ -11,22 +11,22 @@ spec = Test.group 'JSON conversion' <|
clothes = enso_project.data/'clothes.csv' . read
simple_empty = enso_project.data/'simple_empty.csv' . read

Test.specify 'should convert tables to a format compatible with Table.from_json' <|
Test.specify 'should convert tables to a format compatible with Table.from_objects' <|
clothes_json = clothes.to_json
Table.from_json (Json.parse clothes_json) ['Id', 'Name', 'Quantity', 'Rating', 'Price'] . should_equal clothes
Table.from_objects (Json.parse clothes_json) ['Id', 'Name', 'Quantity', 'Rating', 'Price'] . should_equal clothes

Test.specify 'should write JSON tables to disk' <|
out = enso_project.data / 'out.json'
out.delete_if_exists
simple_empty.write_json out
Table.from_json (Json.parse out.read_text) ['a', 'b', 'c'] . should_equal simple_empty
Table.from_objects (Json.parse out.read_text) ['a', 'b', 'c'] . should_equal simple_empty
out.delete_if_exists

Test.specify "should allow converting a JSON array into a table" <|
r_1 = JS_Object.from_pairs [['foo', 20], ['bar', 'baz'], ['baz', False]]
r_2 = JS_Object.from_pairs [['bar', 'xyz'], ['baz', True]]
r_3 = JS_Object.from_pairs [['baz', False], ['foo', 13]]
t = Table.from_json [r_1, r_2, r_3] ['foo', 'bar', 'baz']
t = Table.from_objects [r_1, r_2, r_3] ['foo', 'bar', 'baz']
t.columns.map .name . should_equal ['foo', 'bar', 'baz']
t.at 'foo' . to_vector . should_equal [20, Nothing, 13]
t.at 'bar' . to_vector . should_equal ['baz', 'xyz', Nothing]
Expand Down
19 changes: 19 additions & 0 deletions test/Tests/src/Data/Json_Spec.enso
Original file line number Diff line number Diff line change
Expand Up @@ -95,6 +95,25 @@ spec =
Number.positive_infinity.to_json . should_equal "null"
Number.negative_infinity.to_json . should_equal "null"

Test.group "JS_Object" <|
Test.specify "should be buildable from pairs" <|
JS_Object.from_pairs [["foo", "bar"]] . to_json . should_equal '{"foo":"bar"}'
JS_Object.from_pairs [["foo", "bar"], ["baz", Nothing]] . to_json . should_equal '{"foo":"bar","baz":null}'

Test.specify "should be handle equality on a key level" <|
JS_Object.from_pairs [["a", 42]] . should_equal <| Json.parse '{"a": 42}'
JS_Object.from_pairs [["a", 42]] . should_not_equal <| JS_Object.from_pairs [["a", 43]]
JS_Object.from_pairs [["a", 42]] . should_not_equal <| JS_Object.from_pairs [["b", 42]]

JS_Object.from_pairs [["a", 42], ["b", Nothing]] . should_equal <| JS_Object.from_pairs [["b", Nothing], ["a", 42]]
JS_Object.from_pairs [["a", 42], ["b", Nothing]] . should_not_equal <| JS_Object.from_pairs [["c", Nothing], ["a", 42]]

obj1 = JS_Object.from_pairs [["a", 42], ["b", JS_Object.from_pairs [["c",1], ["d",2]]]]
obj1 . should_equal <| JS_Object.from_pairs [["a", 42], ["b", JS_Object.from_pairs [["c",1], ["d",2]]]]
obj1 . should_equal <| JS_Object.from_pairs [["b", JS_Object.from_pairs [["d",2], ["c",1]]], ["a", 42]]
obj1 . should_not_equal <| JS_Object.from_pairs [["a", 43], ["b", 123]]
obj1 . should_not_equal <| JS_Object.from_pairs [["a", 42], ["b", JS_Object.from_pairs [["c",1], ["d",3]]]]

Test.group "JSON" <|
Test.specify "should allow getting object fields" <|
object = Json.parse '{ "foo": "bar", "baz": ["foo", "x", false],"y": {"z": null, "w": null} }'
Expand Down
2 changes: 1 addition & 1 deletion test/Tests/src/Data/Vector_Spec.enso
Original file line number Diff line number Diff line change
Expand Up @@ -109,7 +109,7 @@ spec = Test.group "Vectors" <|
[1,2,3].get -2 . should_equal 2
[1,2,3].get -3 . should_equal 1

Test.specify "should return a dataflow error when accessing elements out of bounds" <|
Test.specify "should correctly handle out of bounds access" <|
[1,2,3].at -4 . should_fail_with Index_Out_Of_Bounds.Error
[1,2,3].at 3 . should_fail_with Index_Out_Of_Bounds.Error
[1,2,3].get -4 . should_equal Nothing
Expand Down

0 comments on commit 75f3856

Please sign in to comment.