diff --git a/CHANGELOG.md b/CHANGELOG.md
index 0dd97686f725..5086d4e022e3 100644
--- a/CHANGELOG.md
+++ b/CHANGELOG.md
@@ -661,6 +661,8 @@
- [Added `Vector.build_multiple`, and better for support for errors and warnings
inside `Vector.build` and `Vector.build_multiple`.][9766]
- [Added `Vector.duplicates`.][9917]
+- [Log operations performed on a Postgres database connection obtained through a
+ Data Link.][9873]
[debug-shortcuts]:
https://github.com/enso-org/enso/blob/develop/app/gui/docs/product/shortcuts.md#debug
@@ -970,6 +972,7 @@
[9750]: https://github.com/enso-org/enso/pull/9750
[9766]: https://github.com/enso-org/enso/pull/9766
[9917]: https://github.com/enso-org/enso/pull/9917
+[9873]: https://github.com/enso-org/enso/pull/9873
#### Enso Compiler
diff --git a/distribution/lib/Standard/Base/0.0.0-dev/src/Enso_Cloud/Enso_User.enso b/distribution/lib/Standard/Base/0.0.0-dev/src/Enso_Cloud/Enso_User.enso
index ac2ddced6010..c46b588b8046 100644
--- a/distribution/lib/Standard/Base/0.0.0-dev/src/Enso_Cloud/Enso_User.enso
+++ b/distribution/lib/Standard/Base/0.0.0-dev/src/Enso_Cloud/Enso_User.enso
@@ -4,12 +4,15 @@ import project.Data.Time.Duration.Duration
import project.Data.Vector.Vector
import project.Enso_Cloud.Enso_File.Enso_Asset_Type
import project.Enso_Cloud.Enso_File.Enso_File
+import project.Enso_Cloud.Errors.Not_Logged_In
+import project.Enso_Cloud.Internal.Authentication
import project.Enso_Cloud.Internal.Utils
import project.Error.Error
import project.Errors.Illegal_Argument.Illegal_Argument
import project.Network.HTTP.HTTP
import project.Network.HTTP.HTTP_Method.HTTP_Method
import project.Nothing.Nothing
+import project.Panic.Panic
from project.Data.Boolean import Boolean, False, True
from project.Enso_Cloud.Public_Utils import get_optional_field, get_required_field
@@ -29,12 +32,17 @@ type Enso_User
## ICON people
Fetch the current user.
- current : Enso_User
- current =
+ current -> Enso_User =
Utils.get_cached "users/me" cache_duration=(Duration.new minutes=120) <|
json = Utils.http_request_as_json HTTP_Method.Get (Utils.cloud_root_uri + "users/me")
Enso_User.from json
+ ## PRIVATE
+ Checks if the user is logged in.
+ is_logged_in -> Boolean =
+ Panic.catch Not_Logged_In handler=(_->False) <|
+ Authentication.get_access_token.is_error.not
+
## ICON people
Lists all known users.
list : Vector Enso_User
diff --git a/distribution/lib/Standard/Base/0.0.0-dev/src/Enso_Cloud/Internal/Audit_Log.enso b/distribution/lib/Standard/Base/0.0.0-dev/src/Enso_Cloud/Internal/Audit_Log.enso
new file mode 100644
index 000000000000..e46e5aa24e9f
--- /dev/null
+++ b/distribution/lib/Standard/Base/0.0.0-dev/src/Enso_Cloud/Internal/Audit_Log.enso
@@ -0,0 +1,37 @@
+import project.Data.Json.JS_Object
+import project.Data.Text.Text
+import project.Errors.Illegal_Argument.Illegal_Argument
+import project.Nothing.Nothing
+from project.Data.Boolean import Boolean, False, True
+
+polyglot java import org.enso.base.enso_cloud.audit.AuditLog
+
+## PRIVATE
+type Audit_Log
+ ## PRIVATE
+ Reports an event to the audit log.
+ The event is submitted asynchronously.
+
+ Arguments:
+ - event_type: The type of the event.
+ - message: The message associated with the event.
+ - metadata: Additional metadata to include with the event.
+ Note that it should be a JS object and it should _not_ contain fields
+ that are restricted. These fields are added to the metadata
+ automatically.
+ - async: Whether to submit the event asynchronously.
+ Defaults to True.
+
+ ? Restricted Fields
+
+ The following fields are added by the system and should not be included
+ in the provided metadata:
+ - `type`
+ - `operation`
+ - `localTimestamp`
+ - `projectName`
+ report_event event_type:Text message:Text (metadata:JS_Object = JS_Object.from_pairs []) (async : Boolean = True) -> Nothing =
+ Illegal_Argument.handle_java_exception <|
+ case async of
+ True -> AuditLog.logAsync event_type message metadata.object_node
+ False -> AuditLog.logSynchronously event_type message metadata.object_node
diff --git a/distribution/lib/Standard/Base/0.0.0-dev/src/Enso_Cloud/Public_Utils.enso b/distribution/lib/Standard/Base/0.0.0-dev/src/Enso_Cloud/Public_Utils.enso
index 19cdf44e2311..f3a4bed1bd16 100644
--- a/distribution/lib/Standard/Base/0.0.0-dev/src/Enso_Cloud/Public_Utils.enso
+++ b/distribution/lib/Standard/Base/0.0.0-dev/src/Enso_Cloud/Public_Utils.enso
@@ -2,6 +2,7 @@ import project.Any.Any
import project.Data.Json.JS_Object
import project.Data.Text.Text
import project.Enso_Cloud.Errors.Enso_Cloud_Error
+import project.Enso_Cloud.Internal.Utils
import project.Error.Error
import project.Meta
import project.Nothing.Nothing
@@ -57,3 +58,11 @@ get_optional_field (key : Text) js_object (~if_missing = Nothing) (show_value :
_ ->
representation = if show_value then js_object.to_display_text else Meta.type_of js_object . to_display_text
Error.throw (Enso_Cloud_Error.Invalid_Response_Payload "Expected a JSON object, but got "+representation+".")
+
+## PRIVATE
+ UNSTABLE
+ Re-exports parts of the functionality of `http_request_as_json` function that
+ is needed in tests.
+ It should not be used anywhere else and may be removed in the near future.
+cloud_http_request_for_test method url_suffix =
+ Utils.http_request_as_json method Utils.cloud_root_uri+url_suffix
diff --git a/distribution/lib/Standard/Base/0.0.0-dev/src/System/File_Format.enso b/distribution/lib/Standard/Base/0.0.0-dev/src/System/File_Format.enso
index b804333eb799..854dbad763f9 100644
--- a/distribution/lib/Standard/Base/0.0.0-dev/src/System/File_Format.enso
+++ b/distribution/lib/Standard/Base/0.0.0-dev/src/System/File_Format.enso
@@ -96,6 +96,13 @@ type File_Format
_ = [file, on_problems]
Unimplemented.throw "This is an interface only."
+ ## PRIVATE
+ Implements decoding the format from a stream.
+ read_stream : Input_Stream -> File_Format_Metadata -> Any
+ read_stream self stream:Input_Stream (metadata : File_Format_Metadata) =
+ _ = [stream, metadata]
+ Unimplemented.throw "This is an interface only."
+
## PRIVATE
default_widget : Widget
default_widget =
diff --git a/distribution/lib/Standard/Database/0.0.0-dev/src/Connection/Data_Link/Postgres_Data_Link.enso b/distribution/lib/Standard/Database/0.0.0-dev/src/Connection/Data_Link/Postgres_Data_Link.enso
index 645f79ed2597..1062e765bcb5 100644
--- a/distribution/lib/Standard/Database/0.0.0-dev/src/Connection/Data_Link/Postgres_Data_Link.enso
+++ b/distribution/lib/Standard/Database/0.0.0-dev/src/Connection/Data_Link/Postgres_Data_Link.enso
@@ -45,7 +45,9 @@ type Postgres_Data_Link
read self (format = Auto_Detect) (on_problems : Problem_Behavior) =
_ = on_problems
if format != Auto_Detect then Error.throw (Illegal_Argument.Error "Only the default Auto_Detect format should be used with a Postgres Data Link, because it does not point to a file resource, but a database entity, so setting a file format for it is meaningless.") else
- default_options = Connection_Options.Value
+ # TODO add related asset id here: https://github.com/enso-org/enso/issues/9869
+ audit_mode = if Enso_User.is_logged_in then "cloud" else "local"
+ default_options = Connection_Options.Value [["enso.internal.audit", audit_mode]]
connection = self.details.connect default_options
case self of
Postgres_Data_Link.Connection _ -> connection
diff --git a/distribution/lib/Standard/Database/0.0.0-dev/src/Connection/SQLite_Format.enso b/distribution/lib/Standard/Database/0.0.0-dev/src/Connection/SQLite_Format.enso
index 1f46608f504a..bec21fa76950 100644
--- a/distribution/lib/Standard/Database/0.0.0-dev/src/Connection/SQLite_Format.enso
+++ b/distribution/lib/Standard/Database/0.0.0-dev/src/Connection/SQLite_Format.enso
@@ -2,6 +2,7 @@ from Standard.Base import all
import Standard.Base.Errors.Illegal_Argument.Illegal_Argument
import Standard.Base.System.File.Generic.Writable_File.Writable_File
import Standard.Base.System.File_Format_Metadata.File_Format_Metadata
+import Standard.Base.System.Input_Stream.Input_Stream
from Standard.Base.Metadata.Choice import Option
import project.Connection.Database
@@ -48,6 +49,12 @@ type SQLite_Format
_ = [on_problems]
Database.connect (SQLite.From_File file)
+ ## PRIVATE
+ read_stream : Input_Stream -> File_Format_Metadata -> Any
+ read_stream self stream metadata =
+ _ = [stream, metadata]
+ Error.throw (Illegal_Argument.Error "Cannot connect to a SQLite database backed by a stream. Save it to a local file first.")
+
## PRIVATE
Based on the File Format definition at: https://www.sqlite.org/fileformat.html
magic_header_string =
diff --git a/distribution/lib/Standard/Database/0.0.0-dev/src/DB_Table.enso b/distribution/lib/Standard/Database/0.0.0-dev/src/DB_Table.enso
index d8d74165c468..206ee9e471ac 100644
--- a/distribution/lib/Standard/Database/0.0.0-dev/src/DB_Table.enso
+++ b/distribution/lib/Standard/Database/0.0.0-dev/src/DB_Table.enso
@@ -23,6 +23,7 @@ import Standard.Table.Internal.Add_Row_Number
import Standard.Table.Internal.Aggregate_Column_Helper
import Standard.Table.Internal.Column_Naming_Helper.Column_Naming_Helper
import Standard.Table.Internal.Constant_Column.Constant_Column
+import Standard.Table.Internal.Display_Helpers
import Standard.Table.Internal.Join_Kind_Cross.Join_Kind_Cross
import Standard.Table.Internal.Problem_Builder.Problem_Builder
import Standard.Table.Internal.Replace_Helpers
@@ -35,10 +36,8 @@ import Standard.Table.Internal.Widget_Helpers
import Standard.Table.Match_Columns as Match_Columns_Helpers
import Standard.Table.Row.Row
from Standard.Table import Aggregate_Column, Auto, Blank_Selector, Column_Ref, Data_Formatter, Join_Condition, Join_Kind, Match_Columns, Position, Previous_Value, Report_Unmatched, Set_Mode, Simple_Expression, Sort_Column, Table, Value_Type
-from Standard.Table.Column import get_item_string, normalize_string_for_display
from Standard.Table.Errors import all
from Standard.Table.Internal.Filter_Condition_Helpers import make_filter_column
-from Standard.Table.Table import print_table
import project.Connection.Connection.Connection
import project.DB_Column.DB_Column
@@ -97,9 +96,12 @@ type DB_Table
- format_terminal: whether ANSI-terminal formatting should be used
display : Integer -> Boolean -> Text
display self show_rows=10 format_terminal=False =
- df = self.read max_rows=show_rows warn_if_more_rows=False
- all_rows_count = self.row_count
- display_dataframe df indices_count=0 all_rows_count format_terminal
+ data_fragment_with_warning = self.read max_rows=show_rows warn_if_more_rows=True
+ has_more_rows = data_fragment_with_warning.has_warnings warning_type=Not_All_Rows_Downloaded
+ data_fragment_cleared = data_fragment_with_warning.remove_warnings Not_All_Rows_Downloaded
+ # `row_count` means another Database query is performed, so we only do it if we need to.
+ all_rows_count = if has_more_rows then self.row_count else data_fragment_cleared.row_count
+ Display_Helpers.display_table table=data_fragment_cleared add_row_index=False max_rows_to_show=show_rows all_rows_count=all_rows_count format_terminal=format_terminal
## PRIVATE
ADVANCED
@@ -2941,37 +2943,6 @@ make_table connection table_name columns ctx on_problems =
problem_builder.attach_problems_before on_problems <|
DB_Table.Value table_name connection cols ctx
-## PRIVATE
-
- Renders an ASCII-art representation for a Table from a dataframe that
- contains a fragment of the underlying data and count of all rows.
-
- Arguments:
- - df: The materialized dataframe that contains the data to be displayed, it
- should have no indices set.
- - indices_count: Indicates how many columns from the materialized dataframe
- should be treated as indices in the display (index columns will be bold if
- `format_terminal` is enabled).
- - all_rows_count: The count of all rows in the underlying Table; if
- `all_rows_count` is bigger than the amount of rows of `df`, an additional
- line will be included that will say how many hidden rows there are.
- - format_term: A boolean flag, specifying whether to use ANSI escape codes
- for rich formatting in the terminal.
-display_dataframe : Table -> Integer -> Integer -> Boolean -> Text
-display_dataframe df indices_count all_rows_count format_terminal =
- cols = Vector.from_polyglot_array df.java_table.getColumns
- col_names = cols.map .getName . map normalize_string_for_display
- col_vals = cols.map .getStorage
- display_rows = df.row_count
- rows = Vector.new display_rows row_num->
- col_vals.map col->
- if col.isNothing row_num then "Nothing" else get_item_string col row_num
- table = print_table col_names rows indices_count format_terminal
- if display_rows == all_rows_count then table else
- missing_rows_count = all_rows_count - display_rows
- missing = '\n\u2026 and ' + missing_rows_count.to_text + ' hidden rows.'
- table + missing
-
## PRIVATE
By default, join on the first column, unless it's a cross join, in which
case there are no join conditions.
diff --git a/distribution/lib/Standard/Database/0.0.0-dev/src/Internal/Postgres/Postgres_Connection.enso b/distribution/lib/Standard/Database/0.0.0-dev/src/Internal/Postgres/Postgres_Connection.enso
index 00d0ab9e6de2..e294da9dde27 100644
--- a/distribution/lib/Standard/Database/0.0.0-dev/src/Internal/Postgres/Postgres_Connection.enso
+++ b/distribution/lib/Standard/Database/0.0.0-dev/src/Internal/Postgres/Postgres_Connection.enso
@@ -52,7 +52,7 @@ type Postgres_Connection
Arguments:
- connection: the underlying connection.
- make_new: a function that returns a new connection.
- Value connection make_new
+ private Value connection make_new
## ICON data_input
Closes the connection releasing the underlying database resources
@@ -320,4 +320,3 @@ parse_postgres_encoding encoding_name =
fallback.catch Any _->
warning = Unsupported_Database_Encoding.Warning "The database is using an encoding ("+encoding_name.to_display_text+") that is currently not supported by Enso. Falling back to UTF-8. Column/table names may not be mapped correctly if they contain unsupported characters."
Warning.attach warning Encoding.utf_8
-
diff --git a/distribution/lib/Standard/Table/0.0.0-dev/src/Column.enso b/distribution/lib/Standard/Table/0.0.0-dev/src/Column.enso
index ba4100324774..26a5eef2b8da 100644
--- a/distribution/lib/Standard/Table/0.0.0-dev/src/Column.enso
+++ b/distribution/lib/Standard/Table/0.0.0-dev/src/Column.enso
@@ -30,7 +30,6 @@ import project.Value_Type.Value_Type
from project.Errors import Conversion_Failure, Floating_Point_Equality, Inexact_Type_Coercion, Invalid_Column_Names, Invalid_Value_Type, No_Index_Set_Error
from project.Internal.Column_Format import all
from project.Internal.Java_Exports import make_date_builder_adapter, make_string_builder
-from project.Table import print_table
polyglot java import org.enso.base.Time_Utils
polyglot java import org.enso.table.data.column.operation.cast.CastProblemAggregator
@@ -155,19 +154,7 @@ type Column
example_display = Examples.integer_column.display
display : Integer -> Boolean -> Text
display self show_rows=10 format_terminal=False =
- java_col = self.java_column
- col_name = normalize_string_for_display java_col.getName
- storage = java_col.getStorage
- num_rows = java_col.getSize
- display_rows = num_rows.min show_rows
- items = Vector.new display_rows num->
- row = if storage.isNothing num then "Nothing" else
- get_item_string storage num
- [num.to_text, row]
- table = print_table ["", col_name] items 1 format_terminal
- if num_rows - display_rows <= 0 then table else
- missing = '\n\u2026 and ' + (num_rows - display_rows).to_text + ' hidden rows.'
- table + missing
+ self.to_table.display show_rows format_terminal
## PRIVATE
ADVANCED
diff --git a/distribution/lib/Standard/Table/0.0.0-dev/src/Internal/Display_Helpers.enso b/distribution/lib/Standard/Table/0.0.0-dev/src/Internal/Display_Helpers.enso
new file mode 100644
index 000000000000..aea0f592e5fb
--- /dev/null
+++ b/distribution/lib/Standard/Table/0.0.0-dev/src/Internal/Display_Helpers.enso
@@ -0,0 +1,89 @@
+from Standard.Base import all
+
+import project.Table.Table
+from project.Column import get_item_string, normalize_string_for_display
+
+polyglot java import java.lang.System as Java_System
+
+## PRIVATE
+
+ Renders an ASCII-art representation for a Table from a dataframe that
+ contains a fragment of the underlying data and count of all rows.
+
+ Arguments:
+ - table: The materialized table that contains the data to be displayed.
+ - add_row_index: A boolean flag, specifying whether to display row indices.
+ - max_rows_to_show: The maximum amount of rows to display.
+ - all_rows_count: The count of all rows in the underlying Table; if
+ `all_rows_count` is bigger than the amount of rows displayed, an additional
+ line will be included that will say how many hidden rows there are.
+ Useful for remote tables where `df` contains only a fragment of the data.
+ - format_terminal: A boolean flag, specifying whether to use ANSI escape
+ codes for rich formatting in the terminal.
+display_table (table : Table) (add_row_index : Boolean) (max_rows_to_show : Integer) (all_rows_count : Integer) (format_terminal : Boolean) -> Text =
+ cols = Vector.from_polyglot_array table.java_table.getColumns
+ col_names = cols.map .getName . map normalize_string_for_display
+ col_vals = cols.map .getStorage
+ display_rows = table.row_count.min max_rows_to_show
+ rows = Vector.new display_rows row_num->
+ cols = col_vals.map col->
+ if col.isNothing row_num then "Nothing" else get_item_string col row_num
+ if add_row_index then [row_num.to_text] + cols else cols
+ table_text = case add_row_index of
+ True -> print_table [""]+col_names rows 1 format_terminal
+ False -> print_table col_names rows 0 format_terminal
+ if display_rows == all_rows_count then table_text else
+ missing_rows_count = all_rows_count - display_rows
+ missing = '\n\u2026 and ' + missing_rows_count.to_text + ' hidden rows.'
+ table_text + missing
+
+## PRIVATE
+
+ A helper function for creating an ASCII-art representation of tabular data.
+
+ Arguments:
+ - header: vector of names of columns in the table.
+ - rows: a vector of rows, where each row is a vector that contains a text
+ representation of each cell
+ - indices_count: the number specifying how many columns should be treated as
+ indices; this will make them in bold font if `format_term` is enabled.
+ - format_term: a boolean flag, specifying whether to use ANSI escape codes
+ for rich formatting in the terminal.
+print_table : Vector Text -> (Vector (Vector Text)) -> Integer -> Boolean -> Text
+print_table header rows indices_count format_term =
+ content_lengths = Vector.new header.length i->
+ max_row = 0.up_to rows.length . fold 0 a-> j-> a.max (rows.at j . at i . characters . length)
+ max_row.max (header.at i . characters . length)
+ header_line = header.zip content_lengths pad . map (ansi_bold format_term) . join ' | '
+ divider = content_lengths . map (l -> "-".repeat l+2) . join '+'
+ row_lines = rows.map r->
+ x = r.zip content_lengths pad
+ ixes = x.take (First indices_count) . map (ansi_bold format_term)
+ with_bold_ix = ixes + x.drop (First indices_count)
+ y = with_bold_ix . join ' | '
+ " " + y
+ ([" " + header_line, divider] + row_lines).join '\n'
+
+## PRIVATE
+
+ Ensures that the `txt` has at least `len` characters by appending spaces at
+ the end.
+
+ Arguments:
+ - txt: The text to pad.
+ - len: The minimum length of the text.
+pad : Text -> Integer -> Text
+pad txt len =
+ true_len = txt.characters.length
+ txt + (" ".repeat (len - true_len))
+
+## PRIVATE
+
+ Adds ANSI bold escape sequences to text if the feature is enabled.
+
+ Arguments:
+ - enabled: will insert ANSI sequences only if this flag is true and we are not on Windows.
+ - txt: The text to possibly bold.
+ansi_bold : Boolean -> Text -> Text
+ansi_bold enabled txt =
+ if enabled && (Java_System.console != Nothing) then '\e[1m' + txt + '\e[m' else txt
diff --git a/distribution/lib/Standard/Table/0.0.0-dev/src/Table.enso b/distribution/lib/Standard/Table/0.0.0-dev/src/Table.enso
index 26a0605be0d1..a6517ab8fc80 100644
--- a/distribution/lib/Standard/Table/0.0.0-dev/src/Table.enso
+++ b/distribution/lib/Standard/Table/0.0.0-dev/src/Table.enso
@@ -37,6 +37,7 @@ import project.Internal.Column_Naming_Helper.Column_Naming_Helper
import project.Internal.Constant_Column.Constant_Column
import project.Internal.Delimited_Reader
import project.Internal.Delimited_Writer
+import project.Internal.Display_Helpers
import project.Internal.Expand_Objects_Helpers
import project.Internal.Java_Problems
import project.Internal.Join_Helpers
@@ -181,19 +182,7 @@ type Table
example_display = Examples.inventory_table.display
display : Integer -> Boolean -> Text
display self show_rows=10 format_terminal=False =
- cols = Vector.from_polyglot_array self.java_table.getColumns
- col_names = ([""] + cols.map .getName) . map normalize_string_for_display
- col_vals = cols.map .getStorage
- num_rows = self.row_count
- display_rows = num_rows.min show_rows
- rows = Vector.new display_rows row_num->
- cols = col_vals.map col->
- if col.isNothing row_num then "Nothing" else get_item_string col row_num
- [row_num.to_text] + cols
- table = print_table col_names rows 1 format_terminal
- if num_rows - display_rows <= 0 then table else
- missing = '\n\u2026 and ' + (num_rows - display_rows).to_text + ' hidden rows.'
- table + missing
+ Display_Helpers.display_table self add_row_index=True max_rows_to_show=show_rows all_rows_count=self.row_count format_terminal=format_terminal
## PRIVATE
ADVANCED
@@ -3027,59 +3016,6 @@ type Table
problem_builder_for_unification.attach_problems_before on_problems <|
if good_columns.is_empty then problem_builder_for_unification.raise_no_output_columns_with_cause else
Table.new good_columns
-## PRIVATE
-
- Ensures that the `txt` has at least `len` characters by appending spaces at
- the end.
-
- Arguments:
- - txt: The text to pad.
- - len: The minimum length of the text.
-pad : Text -> Integer -> Text
-pad txt len =
- true_len = txt.characters.length
- txt + (" ".repeat (len - true_len))
-
-## PRIVATE
-
- Adds ANSI bold escape sequences to text if the feature is enabled.
-
- Arguments:
- - enabled: will insert ANSI sequences only if this flag is true and we are not on Windows.
- - txt: The text to possibly bold.
-ansi_bold : Boolean -> Text -> Text
-ansi_bold enabled txt =
- case Platform.os of
- ## Output formatting for Windows is not currently supported.
- Platform.OS.Windows -> txt
- _ -> if enabled then '\e[1m' + txt + '\e[m' else txt
-
-## PRIVATE
-
- A helper function for creating an ASCII-art representation of tabular data.
-
- Arguments:
- - header: vector of names of columns in the table.
- - rows: a vector of rows, where each row is a vector that contains a text
- representation of each cell
- - indices_count: the number specifying how many columns should be treated as
- indices; this will make them in bold font if `format_term` is enabled.
- - format_term: a boolean flag, specifying whether to use ANSI escape codes
- for rich formatting in the terminal.
-print_table : Vector Text -> (Vector (Vector Text)) -> Integer -> Boolean -> Text
-print_table header rows indices_count format_term =
- content_lengths = Vector.new header.length i->
- max_row = 0.up_to rows.length . fold 0 a-> j-> a.max (rows.at j . at i . characters . length)
- max_row.max (header.at i . characters . length)
- header_line = header.zip content_lengths pad . map (ansi_bold format_term) . join ' | '
- divider = content_lengths . map (l -> "-".repeat l+2) . join '+'
- row_lines = rows.map r->
- x = r.zip content_lengths pad
- ixes = x.take (First indices_count) . map (ansi_bold format_term)
- with_bold_ix = ixes + x.drop (First indices_count)
- y = with_bold_ix . join ' | '
- " " + y
- ([" " + header_line, divider] + row_lines).join '\n'
## PRIVATE
A helper to create a new table consisting of slices of the original table.
diff --git a/std-bits/base/src/main/java/org/enso/base/CurrentEnsoProject.java b/std-bits/base/src/main/java/org/enso/base/CurrentEnsoProject.java
new file mode 100644
index 000000000000..06b8442d7259
--- /dev/null
+++ b/std-bits/base/src/main/java/org/enso/base/CurrentEnsoProject.java
@@ -0,0 +1,30 @@
+package org.enso.base;
+
+/** A Java interface to the `Enso_Project` type. */
+public final class CurrentEnsoProject {
+ private final String name;
+ private final String namespace;
+
+ private CurrentEnsoProject(String name, String namespace) {
+ this.name = name;
+ this.namespace = namespace;
+ }
+
+ public static CurrentEnsoProject get() {
+ // TODO this currently does not work, because of bug
+ // https://github.com/enso-org/enso/issues/9845
+ return null;
+ }
+
+ public String getName() {
+ return name;
+ }
+
+ public String getNamespace() {
+ return namespace;
+ }
+
+ public String fullName() {
+ return namespace + "." + name;
+ }
+}
diff --git a/std-bits/base/src/main/java/org/enso/base/enso_cloud/AuthenticationProvider.java b/std-bits/base/src/main/java/org/enso/base/enso_cloud/AuthenticationProvider.java
index 2da06ef1b45f..01f2e64ad2ab 100644
--- a/std-bits/base/src/main/java/org/enso/base/enso_cloud/AuthenticationProvider.java
+++ b/std-bits/base/src/main/java/org/enso/base/enso_cloud/AuthenticationProvider.java
@@ -1,6 +1,6 @@
package org.enso.base.enso_cloud;
-import org.graalvm.polyglot.Context;
+import org.enso.base.polyglot.EnsoMeta;
import org.graalvm.polyglot.Value;
public class AuthenticationProvider {
@@ -20,14 +20,8 @@ public static void reset() {
}
private static Value createAuthenticationService() {
- var context = Context.getCurrent().getBindings("enso");
- var module =
- context.invokeMember("get_module", "Standard.Base.Enso_Cloud.Internal.Authentication");
- var moduleType = module.invokeMember("get_associated_type");
- var factory =
- module.invokeMember("get_method", moduleType, "instantiate_authentication_service");
- // The static method takes the module as the synthetic 'self' argument.
- return factory.execute(moduleType);
+ return EnsoMeta.callStaticModuleMethod(
+ "Standard.Base.Enso_Cloud.Internal.Authentication", "instantiate_authentication_service");
}
private static void ensureServicesSetup() {
diff --git a/std-bits/base/src/main/java/org/enso/base/enso_cloud/CloudAPI.java b/std-bits/base/src/main/java/org/enso/base/enso_cloud/CloudAPI.java
index 918d7f94356c..4eced3a5e400 100644
--- a/std-bits/base/src/main/java/org/enso/base/enso_cloud/CloudAPI.java
+++ b/std-bits/base/src/main/java/org/enso/base/enso_cloud/CloudAPI.java
@@ -3,6 +3,11 @@
import org.enso.base.Environment_Utils;
public final class CloudAPI {
+ /**
+ * Returns the URI to the root of the Cloud API.
+ *
+ *
It always ends with a slash.
+ */
public static String getAPIRootURI() {
var envUri = Environment_Utils.get_environment_variable("ENSO_CLOUD_API_URI");
var effectiveUri =
@@ -11,6 +16,15 @@ public static String getAPIRootURI() {
return uriWithSlash;
}
+ /**
+ * Returns the ID of the currently opened cloud project.
+ *
+ *
When running locally, this returns {@code null}.
+ */
+ public static String getCloudProjectId() {
+ return Environment_Utils.get_environment_variable("ENSO_CLOUD_PROJECT_ID");
+ }
+
public static void flushCloudCaches() {
CloudRequestCache.clear();
AuthenticationProvider.reset();
diff --git a/std-bits/base/src/main/java/org/enso/base/enso_cloud/DataLinkSPI.java b/std-bits/base/src/main/java/org/enso/base/enso_cloud/DataLinkSPI.java
index 1bb21859d45f..84cbe3cf341b 100644
--- a/std-bits/base/src/main/java/org/enso/base/enso_cloud/DataLinkSPI.java
+++ b/std-bits/base/src/main/java/org/enso/base/enso_cloud/DataLinkSPI.java
@@ -2,7 +2,7 @@
import java.util.ServiceLoader;
import java.util.stream.Collectors;
-import org.graalvm.polyglot.Context;
+import org.enso.base.polyglot.EnsoMeta;
import org.graalvm.polyglot.Value;
/**
@@ -42,9 +42,7 @@ public static Value findDataLinkType(String name) {
}
public Value getTypeObject() {
- final var context = Context.getCurrent().getBindings("enso");
- final var module = context.invokeMember("get_module", getModuleName());
- return module.invokeMember("get_type", getTypeName());
+ return EnsoMeta.getType(getModuleName(), getTypeName());
}
protected abstract String getModuleName();
diff --git a/std-bits/base/src/main/java/org/enso/base/enso_cloud/EnsoSecretHelper.java b/std-bits/base/src/main/java/org/enso/base/enso_cloud/EnsoSecretHelper.java
index ed121520246c..f82bf378c8d2 100644
--- a/std-bits/base/src/main/java/org/enso/base/enso_cloud/EnsoSecretHelper.java
+++ b/std-bits/base/src/main/java/org/enso/base/enso_cloud/EnsoSecretHelper.java
@@ -19,8 +19,8 @@
public final class EnsoSecretHelper extends SecretValueResolver {
/** Gets a JDBC connection resolving EnsoKeyValuePair into the properties. */
- public static Connection getJDBCConnection(String url, Pair[] properties)
- throws SQLException {
+ public static Connection getJDBCConnection(
+ String url, List> properties) throws SQLException {
var javaProperties = new Properties();
for (var pair : properties) {
javaProperties.setProperty(pair.getLeft(), resolveValue(pair.getRight()));
diff --git a/std-bits/base/src/main/java/org/enso/base/enso_cloud/audit/AuditLog.java b/std-bits/base/src/main/java/org/enso/base/enso_cloud/audit/AuditLog.java
new file mode 100644
index 000000000000..27941dbfe669
--- /dev/null
+++ b/std-bits/base/src/main/java/org/enso/base/enso_cloud/audit/AuditLog.java
@@ -0,0 +1,15 @@
+package org.enso.base.enso_cloud.audit;
+
+import com.fasterxml.jackson.databind.node.ObjectNode;
+
+public final class AuditLog {
+ public static void logAsync(String type, String message, ObjectNode metadata) {
+ var event = new AuditLogMessage(type, message, metadata);
+ AuditLogAPI.INSTANCE.logAsync(event);
+ }
+
+ public static void logSynchronously(String type, String message, ObjectNode metadata) {
+ var event = new AuditLogMessage(type, message, metadata);
+ AuditLogAPI.INSTANCE.logSync(event);
+ }
+}
diff --git a/std-bits/base/src/main/java/org/enso/base/enso_cloud/audit/AuditLogAPI.java b/std-bits/base/src/main/java/org/enso/base/enso_cloud/audit/AuditLogAPI.java
new file mode 100644
index 000000000000..302027aa24fa
--- /dev/null
+++ b/std-bits/base/src/main/java/org/enso/base/enso_cloud/audit/AuditLogAPI.java
@@ -0,0 +1,111 @@
+package org.enso.base.enso_cloud.audit;
+
+import java.io.IOException;
+import java.net.URI;
+import java.net.http.HttpClient;
+import java.net.http.HttpRequest;
+import java.net.http.HttpResponse;
+import java.nio.charset.StandardCharsets;
+import java.util.concurrent.ExecutorService;
+import java.util.concurrent.Future;
+import java.util.concurrent.LinkedBlockingQueue;
+import java.util.concurrent.ThreadPoolExecutor;
+import java.util.concurrent.TimeUnit;
+import java.util.logging.Logger;
+import org.enso.base.enso_cloud.AuthenticationProvider;
+import org.enso.base.enso_cloud.CloudAPI;
+
+/**
+ * Gives access to the low-level log event API in the Cloud and manages asynchronously submitting
+ * the logs.
+ */
+class AuditLogAPI {
+ private static final Logger logger = Logger.getLogger(AuditLogAPI.class.getName());
+ public static AuditLogAPI INSTANCE = new AuditLogAPI();
+ private final HttpClient httpClient =
+ HttpClient.newBuilder().followRedirects(HttpClient.Redirect.ALWAYS).build();
+ private final ExecutorService executorService;
+
+ private AuditLogAPI() {
+ // A thread pool that creates at most one thread, only when it is needed, and shuts it down
+ // after 60 seconds of inactivity.
+ // TODO we may want to set a maximum capacity to the queue and think how we deal with situations
+ // when logs are added faster than they can be processed
+ executorService =
+ new ThreadPoolExecutor(0, 1, 60L, TimeUnit.SECONDS, new LinkedBlockingQueue<>());
+ }
+
+ public void logSync(LogMessage message) {
+ var request = buildRequest(message);
+ sendLogRequest(request, 5);
+ }
+
+ public Future logAsync(LogMessage message) {
+ // We build the request on the main thread where the Enso Context is readily accessible - as we
+ // need to access the `Authentication_Service`.
+ var request = buildRequest(message);
+ return executorService.submit(
+ () -> {
+ try {
+ sendLogRequest(request, 5);
+ } catch (RequestFailureException e) {
+ throw e;
+ } catch (Exception e) {
+ logger.severe("Unexpected exception when sending a log message: " + e.getMessage());
+ throw e;
+ }
+ return null;
+ });
+ }
+
+ private HttpRequest buildRequest(LogMessage message) {
+ var apiUri = CloudAPI.getAPIRootURI() + "logs";
+ return HttpRequest.newBuilder()
+ .uri(URI.create(apiUri))
+ .header("Authorization", "Bearer " + AuthenticationProvider.getAccessToken())
+ .POST(HttpRequest.BodyPublishers.ofString(message.payload(), StandardCharsets.UTF_8))
+ .build();
+ }
+
+ private void sendLogRequest(HttpRequest request, int retryCount) throws RequestFailureException {
+ try {
+ try {
+ HttpResponse response =
+ httpClient.send(request, HttpResponse.BodyHandlers.ofString());
+ if (response.statusCode() < 200 || response.statusCode() >= 300) {
+ throw new RequestFailureException(
+ "Unexpected status code: " + response.statusCode() + " " + response.body(), null);
+ }
+ } catch (IOException | InterruptedException e) {
+ // Promote a checked exception to a runtime exception to simplify the code.
+ var errorMessage = e.getMessage() != null ? e.getMessage() : e.toString();
+ throw new RequestFailureException("Failed to send log message: " + errorMessage, e);
+ }
+ } catch (RequestFailureException e) {
+ if (retryCount < 0) {
+ logger.severe("Failed to send log message after retrying: " + e.getMessage());
+ failedLogCount++;
+ throw e;
+ } else {
+ logger.warning("Exception when sending a log message: " + e.getMessage() + ". Retrying...");
+ sendLogRequest(request, retryCount - 1);
+ }
+ }
+ }
+
+ public interface LogMessage {
+ String payload();
+ }
+
+ public static class RequestFailureException extends RuntimeException {
+ public RequestFailureException(String message, Throwable cause) {
+ super(message, cause);
+ }
+ }
+
+ private int failedLogCount = 0;
+
+ public int getFailedLogCount() {
+ return failedLogCount;
+ }
+}
diff --git a/std-bits/base/src/main/java/org/enso/base/enso_cloud/audit/AuditLogMessage.java b/std-bits/base/src/main/java/org/enso/base/enso_cloud/audit/AuditLogMessage.java
new file mode 100644
index 000000000000..2115d4f47494
--- /dev/null
+++ b/std-bits/base/src/main/java/org/enso/base/enso_cloud/audit/AuditLogMessage.java
@@ -0,0 +1,75 @@
+package org.enso.base.enso_cloud.audit;
+
+import com.fasterxml.jackson.databind.node.JsonNodeFactory;
+import com.fasterxml.jackson.databind.node.NullNode;
+import com.fasterxml.jackson.databind.node.ObjectNode;
+import com.fasterxml.jackson.databind.node.TextNode;
+import java.util.Objects;
+import org.enso.base.CurrentEnsoProject;
+import org.enso.base.enso_cloud.CloudAPI;
+
+public class AuditLogMessage implements AuditLogAPI.LogMessage {
+
+ /**
+ * A reserved field that is currently added by the cloud backend. Duplicating it will lead to
+ * internal server errors and log messages being discarded.
+ */
+ private static final String RESERVED_TYPE = "type";
+
+ private static final String OPERATION = "operation";
+ private static final String PROJECT_NAME = "projectName";
+ private static final String PROJECT_ID = "projectId";
+ private static final String LOCAL_TIMESTAMP = "localTimestamp";
+
+ private final String projectId;
+ private final String projectName;
+ private final String operation;
+ private final String message;
+ private final ObjectNode metadata;
+
+ public AuditLogMessage(String operation, String message, ObjectNode metadata) {
+ this.operation = Objects.requireNonNull(operation);
+ this.message = Objects.requireNonNull(message);
+ this.metadata = Objects.requireNonNull(metadata);
+ checkNoRestrictedField(metadata, RESERVED_TYPE);
+ checkNoRestrictedField(metadata, OPERATION);
+ checkNoRestrictedField(metadata, PROJECT_NAME);
+ checkNoRestrictedField(metadata, LOCAL_TIMESTAMP);
+
+ this.projectId = CloudAPI.getCloudProjectId();
+
+ var currentProject = CurrentEnsoProject.get();
+ this.projectName = currentProject == null ? null : currentProject.fullName();
+ }
+
+ private static void checkNoRestrictedField(ObjectNode metadata, String fieldName) {
+ if (metadata.has(fieldName)) {
+ throw new IllegalArgumentException(
+ "Metadata cannot contain a field named '" + fieldName + "'");
+ }
+ }
+
+ private ObjectNode computedMetadata() {
+ var copy = metadata.deepCopy();
+ copy.set(OPERATION, TextNode.valueOf(operation));
+
+ // TODO the null check should no longer be needed once
+ // https://github.com/enso-org/enso/issues/9845 is fixed
+ if (projectName != null) {
+ copy.set(PROJECT_NAME, TextNode.valueOf(projectName));
+ }
+
+ return copy;
+ }
+
+ @Override
+ public String payload() {
+ var payload = new ObjectNode(JsonNodeFactory.instance);
+ payload.set("message", TextNode.valueOf(message));
+ payload.set(
+ "projectId", projectId == null ? NullNode.getInstance() : TextNode.valueOf(projectId));
+ payload.set("metadata", computedMetadata());
+ payload.set("kind", TextNode.valueOf("Lib"));
+ return payload.toString();
+ }
+}
diff --git a/std-bits/base/src/main/java/org/enso/base/file_format/FileFormatSPI.java b/std-bits/base/src/main/java/org/enso/base/file_format/FileFormatSPI.java
index 14be055da15f..e3a3fb867775 100644
--- a/std-bits/base/src/main/java/org/enso/base/file_format/FileFormatSPI.java
+++ b/std-bits/base/src/main/java/org/enso/base/file_format/FileFormatSPI.java
@@ -3,7 +3,7 @@
import java.util.Objects;
import java.util.ServiceLoader;
import java.util.stream.Collectors;
-import org.graalvm.polyglot.Context;
+import org.enso.base.polyglot.EnsoMeta;
import org.graalvm.polyglot.Value;
public abstract class FileFormatSPI {
@@ -45,9 +45,7 @@ public static Value findFormatForDataLinkSubType(String subType) {
}
public Value getTypeObject() {
- final var context = Context.getCurrent().getBindings("enso");
- final var module = context.invokeMember("get_module", getModuleName());
- return module.invokeMember("get_type", getTypeName());
+ return EnsoMeta.getType(getModuleName(), getTypeName());
}
protected abstract String getModuleName();
diff --git a/std-bits/base/src/main/java/org/enso/base/file_system/FileSystemSPI.java b/std-bits/base/src/main/java/org/enso/base/file_system/FileSystemSPI.java
index 6fc2bfa15c93..78ba411c31d9 100644
--- a/std-bits/base/src/main/java/org/enso/base/file_system/FileSystemSPI.java
+++ b/std-bits/base/src/main/java/org/enso/base/file_system/FileSystemSPI.java
@@ -1,7 +1,7 @@
package org.enso.base.file_system;
import java.util.ServiceLoader;
-import org.graalvm.polyglot.Context;
+import org.enso.base.polyglot.EnsoMeta;
import org.graalvm.polyglot.Value;
public abstract class FileSystemSPI {
@@ -32,9 +32,7 @@ public static Value[] get_types(boolean refresh) {
}
public Value getTypeObject() {
- final var context = Context.getCurrent().getBindings("enso");
- final var module = context.invokeMember("get_module", getModuleName());
- return module.invokeMember("get_type", getTypeName());
+ return EnsoMeta.getType(getModuleName(), getTypeName());
}
protected abstract String getModuleName();
diff --git a/std-bits/base/src/main/java/org/enso/base/polyglot/EnsoMeta.java b/std-bits/base/src/main/java/org/enso/base/polyglot/EnsoMeta.java
new file mode 100644
index 000000000000..249dcd2ad555
--- /dev/null
+++ b/std-bits/base/src/main/java/org/enso/base/polyglot/EnsoMeta.java
@@ -0,0 +1,30 @@
+package org.enso.base.polyglot;
+
+import org.graalvm.polyglot.Context;
+import org.graalvm.polyglot.Value;
+
+/** A helper class that makes calling Enso methods from Java libraries easier. */
+public final class EnsoMeta {
+ private static Value getBindings() {
+ return Context.getCurrent().getBindings("enso");
+ }
+
+ /** Returns a type object from the Enso runtime. */
+ public static Value getType(String moduleName, String typeName) {
+ var module = getBindings().invokeMember("get_module", moduleName);
+ return module.invokeMember("get_type", typeName);
+ }
+
+ /** Calls a static method defined directly on a module (not inside of a type). */
+ public static Value callStaticModuleMethod(String moduleName, String methodName, Object... args) {
+ var module = getBindings().invokeMember("get_module", moduleName);
+ var moduleType = module.invokeMember("get_associated_type");
+ var factory = module.invokeMember("get_method", moduleType, methodName);
+ // The static method takes the module as the synthetic 'self' argument, so we need to prepend
+ // it:
+ Object[] argsWithSelf = new Object[args.length + 1];
+ argsWithSelf[0] = moduleType;
+ System.arraycopy(args, 0, argsWithSelf, 1, args.length);
+ return factory.execute(argsWithSelf);
+ }
+}
diff --git a/std-bits/database/src/main/java/org/enso/database/JDBCProxy.java b/std-bits/database/src/main/java/org/enso/database/JDBCProxy.java
index 00d3e339ad02..f76b0689ef44 100644
--- a/std-bits/database/src/main/java/org/enso/database/JDBCProxy.java
+++ b/std-bits/database/src/main/java/org/enso/database/JDBCProxy.java
@@ -3,9 +3,16 @@
import java.sql.Connection;
import java.sql.DriverManager;
import java.sql.SQLException;
+import java.util.ArrayList;
+import java.util.HashMap;
+import java.util.List;
+import java.util.Map;
import java.util.ServiceLoader;
+import org.enso.base.enso_cloud.EnsoSecretAccessDenied;
import org.enso.base.enso_cloud.EnsoSecretHelper;
import org.enso.base.enso_cloud.HideableValue;
+import org.enso.database.audit.CloudAuditedConnection;
+import org.enso.database.audit.LocalAuditedConnection;
import org.graalvm.collections.Pair;
/**
@@ -15,7 +22,7 @@
* which drivers are available and so if it is called directly from Enso it does not see the correct
* classloaders, thus not detecting the proper drivers.
*/
-public class JDBCProxy {
+public final class JDBCProxy {
/**
* A helper method that lists registered JDBC drivers.
*
@@ -37,7 +44,7 @@ public static Object[] getDrivers() {
* @param properties configuration for the connection
* @return a connection
*/
- public static Connection getConnection(String url, Pair[] properties)
+ public static Connection getConnection(String url, List> properties)
throws SQLException {
// We need to manually register all the drivers because the DriverManager is not able
// to correctly use our class loader, it only delegates to the platform class loader when
@@ -47,6 +54,51 @@ public static Connection getConnection(String url, Pair[]
DriverManager.registerDriver(driver);
}
- return EnsoSecretHelper.getJDBCConnection(url, properties);
+ PartitionedProperties partitionedProperties = PartitionedProperties.parse(properties);
+ var rawConnection =
+ EnsoSecretHelper.getJDBCConnection(url, partitionedProperties.jdbcProperties);
+ return switch (partitionedProperties.audited()) {
+ case "local" -> new LocalAuditedConnection(rawConnection);
+ case "cloud" -> new CloudAuditedConnection(
+ rawConnection, partitionedProperties.getRelatedAssetId());
+ case null -> rawConnection;
+ default -> throw new IllegalArgumentException(
+ "Unknown audit mode: " + partitionedProperties.audited());
+ };
+ }
+
+ private static final String ENSO_PROPERTY_PREFIX = "enso.internal.";
+ public static final String AUDITED_KEY = ENSO_PROPERTY_PREFIX + "audit";
+ public static final String RELATED_ASSET_ID_KEY = ENSO_PROPERTY_PREFIX + "relatedAssetId";
+
+ private record PartitionedProperties(
+ Map ensoProperties, List> jdbcProperties) {
+ public static PartitionedProperties parse(List> properties) {
+ List> jdbcProperties = new ArrayList<>();
+ HashMap ensoProperties = new HashMap<>();
+
+ for (var pair : properties) {
+ if (pair.getLeft().startsWith(ENSO_PROPERTY_PREFIX)) {
+ try {
+ ensoProperties.put(pair.getLeft(), pair.getRight().safeResolve());
+ } catch (EnsoSecretAccessDenied e) {
+ throw new IllegalStateException(
+ "Internal Enso property " + pair.getLeft() + " should not contain secrets.");
+ }
+ } else {
+ jdbcProperties.add(pair);
+ }
+ }
+
+ return new PartitionedProperties(ensoProperties, jdbcProperties);
+ }
+
+ public String audited() {
+ return ensoProperties.get(AUDITED_KEY);
+ }
+
+ public String getRelatedAssetId() {
+ return ensoProperties.get(RELATED_ASSET_ID_KEY);
+ }
}
}
diff --git a/std-bits/database/src/main/java/org/enso/database/audit/AuditedConnection.java b/std-bits/database/src/main/java/org/enso/database/audit/AuditedConnection.java
new file mode 100644
index 000000000000..0a76dbfee868
--- /dev/null
+++ b/std-bits/database/src/main/java/org/enso/database/audit/AuditedConnection.java
@@ -0,0 +1,369 @@
+package org.enso.database.audit;
+
+import java.sql.Array;
+import java.sql.Blob;
+import java.sql.CallableStatement;
+import java.sql.Clob;
+import java.sql.Connection;
+import java.sql.DatabaseMetaData;
+import java.sql.NClob;
+import java.sql.PreparedStatement;
+import java.sql.SQLClientInfoException;
+import java.sql.SQLException;
+import java.sql.SQLWarning;
+import java.sql.SQLXML;
+import java.sql.Savepoint;
+import java.sql.Statement;
+import java.sql.Struct;
+import java.util.Map;
+import java.util.Properties;
+import java.util.concurrent.Executor;
+
+abstract class AuditedConnection implements Connection {
+ protected final Connection underlying;
+
+ AuditedConnection(Connection underlying) {
+ this.underlying = underlying;
+ }
+
+ abstract void auditQuery(String operationType, String sql);
+
+ abstract void auditTransaction(String operation);
+
+ private RuntimeException unimplemented(String name) {
+ throw new UnsupportedOperationException(
+ name + " is not implemented. This is a bug in the Database library.");
+ }
+
+ @Override
+ public Statement createStatement() throws SQLException {
+ return new AuditedStatementImpl(underlying.createStatement());
+ }
+
+ @Override
+ public PreparedStatement prepareStatement(String sql) throws SQLException {
+ return new AuditedPreparedStatementImpl(underlying.prepareStatement(sql), sql);
+ }
+
+ @Override
+ public CallableStatement prepareCall(String sql) throws SQLException {
+ throw unimplemented("prepareCall");
+ }
+
+ @Override
+ public String nativeSQL(String sql) throws SQLException {
+ return underlying.nativeSQL(sql);
+ }
+
+ @Override
+ public void setAutoCommit(boolean autoCommit) throws SQLException {
+ if (autoCommit != underlying.getAutoCommit()) {
+ auditTransaction("setAutoCommit " + autoCommit);
+ }
+ underlying.setAutoCommit(autoCommit);
+ }
+
+ @Override
+ public boolean getAutoCommit() throws SQLException {
+ return underlying.getAutoCommit();
+ }
+
+ @Override
+ public void commit() throws SQLException {
+ auditTransaction("commit");
+ underlying.commit();
+ }
+
+ @Override
+ public void rollback() throws SQLException {
+ auditTransaction("rollback");
+ underlying.rollback();
+ }
+
+ @Override
+ public void close() throws SQLException {
+ underlying.close();
+ }
+
+ @Override
+ public boolean isClosed() throws SQLException {
+ return underlying.isClosed();
+ }
+
+ @Override
+ public DatabaseMetaData getMetaData() throws SQLException {
+ return underlying.getMetaData();
+ }
+
+ @Override
+ public void setReadOnly(boolean readOnly) throws SQLException {
+ underlying.setReadOnly(readOnly);
+ }
+
+ @Override
+ public boolean isReadOnly() throws SQLException {
+ return underlying.isReadOnly();
+ }
+
+ @Override
+ public void setCatalog(String catalog) throws SQLException {
+ underlying.setCatalog(catalog);
+ }
+
+ @Override
+ public String getCatalog() throws SQLException {
+ return underlying.getCatalog();
+ }
+
+ @Override
+ public void setTransactionIsolation(int level) throws SQLException {
+ underlying.setTransactionIsolation(level);
+ }
+
+ @Override
+ public int getTransactionIsolation() throws SQLException {
+ return underlying.getTransactionIsolation();
+ }
+
+ @Override
+ public SQLWarning getWarnings() throws SQLException {
+ return underlying.getWarnings();
+ }
+
+ @Override
+ public void clearWarnings() throws SQLException {
+ underlying.clearWarnings();
+ }
+
+ @Override
+ public Statement createStatement(int resultSetType, int resultSetConcurrency)
+ throws SQLException {
+ return new AuditedStatementImpl(
+ underlying.createStatement(resultSetType, resultSetConcurrency));
+ }
+
+ @Override
+ public PreparedStatement prepareStatement(String sql, int resultSetType, int resultSetConcurrency)
+ throws SQLException {
+ return new AuditedPreparedStatementImpl(
+ underlying.prepareStatement(sql, resultSetType, resultSetConcurrency), sql);
+ }
+
+ @Override
+ public CallableStatement prepareCall(String sql, int resultSetType, int resultSetConcurrency)
+ throws SQLException {
+ throw unimplemented("prepareCall");
+ }
+
+ @Override
+ public Map> getTypeMap() throws SQLException {
+ return underlying.getTypeMap();
+ }
+
+ @Override
+ public void setTypeMap(Map> map) throws SQLException {
+ underlying.setTypeMap(map);
+ }
+
+ @Override
+ public void setHoldability(int holdability) throws SQLException {
+ underlying.setHoldability(holdability);
+ }
+
+ @Override
+ public int getHoldability() throws SQLException {
+ return underlying.getHoldability();
+ }
+
+ private String savePointToString(Savepoint savepoint) {
+ try {
+ return savepoint.getSavepointName();
+ } catch (SQLException e) {
+ // Do nothing
+ }
+
+ try {
+ return "Savepoint " + savepoint.getSavepointName();
+ } catch (SQLException e) {
+ return savepoint.toString();
+ }
+ }
+
+ @Override
+ public Savepoint setSavepoint() throws SQLException {
+ var savepoint = underlying.setSavepoint();
+ auditTransaction("setSavepoint " + savePointToString(savepoint));
+ return savepoint;
+ }
+
+ @Override
+ public Savepoint setSavepoint(String name) throws SQLException {
+ var savepoint = underlying.setSavepoint(name);
+ auditTransaction("setSavepoint " + savePointToString(savepoint));
+ return savepoint;
+ }
+
+ @Override
+ public void rollback(Savepoint savepoint) throws SQLException {
+ auditTransaction("rollback " + savePointToString(savepoint));
+ underlying.rollback(savepoint);
+ }
+
+ @Override
+ public void releaseSavepoint(Savepoint savepoint) throws SQLException {
+ auditTransaction("releaseSavepoint " + savePointToString(savepoint));
+ underlying.releaseSavepoint(savepoint);
+ }
+
+ @Override
+ public Statement createStatement(
+ int resultSetType, int resultSetConcurrency, int resultSetHoldability) throws SQLException {
+ return new AuditedStatementImpl(
+ underlying.createStatement(resultSetType, resultSetConcurrency, resultSetHoldability));
+ }
+
+ @Override
+ public PreparedStatement prepareStatement(
+ String sql, int resultSetType, int resultSetConcurrency, int resultSetHoldability)
+ throws SQLException {
+ return new AuditedPreparedStatementImpl(
+ underlying.prepareStatement(sql, resultSetType, resultSetConcurrency, resultSetHoldability),
+ sql);
+ }
+
+ @Override
+ public CallableStatement prepareCall(
+ String sql, int resultSetType, int resultSetConcurrency, int resultSetHoldability)
+ throws SQLException {
+ throw unimplemented("prepareCall");
+ }
+
+ @Override
+ public PreparedStatement prepareStatement(String sql, int autoGeneratedKeys) throws SQLException {
+ return new AuditedPreparedStatementImpl(
+ underlying.prepareStatement(sql, autoGeneratedKeys), sql);
+ }
+
+ @Override
+ public PreparedStatement prepareStatement(String sql, int[] columnIndexes) throws SQLException {
+ return new AuditedPreparedStatementImpl(underlying.prepareStatement(sql, columnIndexes), sql);
+ }
+
+ @Override
+ public PreparedStatement prepareStatement(String sql, String[] columnNames) throws SQLException {
+ return new AuditedPreparedStatementImpl(underlying.prepareStatement(sql, columnNames), sql);
+ }
+
+ @Override
+ public Clob createClob() throws SQLException {
+ return underlying.createClob();
+ }
+
+ @Override
+ public Blob createBlob() throws SQLException {
+ return underlying.createBlob();
+ }
+
+ @Override
+ public NClob createNClob() throws SQLException {
+ return underlying.createNClob();
+ }
+
+ @Override
+ public SQLXML createSQLXML() throws SQLException {
+ return underlying.createSQLXML();
+ }
+
+ @Override
+ public boolean isValid(int timeout) throws SQLException {
+ return underlying.isValid(timeout);
+ }
+
+ @Override
+ public void setClientInfo(String name, String value) throws SQLClientInfoException {
+ underlying.setClientInfo(name, value);
+ }
+
+ @Override
+ public void setClientInfo(Properties properties) throws SQLClientInfoException {
+ underlying.setClientInfo(properties);
+ }
+
+ @Override
+ public String getClientInfo(String name) throws SQLException {
+ return underlying.getClientInfo(name);
+ }
+
+ @Override
+ public Properties getClientInfo() throws SQLException {
+ return underlying.getClientInfo();
+ }
+
+ @Override
+ public Array createArrayOf(String typeName, Object[] elements) throws SQLException {
+ return underlying.createArrayOf(typeName, elements);
+ }
+
+ @Override
+ public Struct createStruct(String typeName, Object[] attributes) throws SQLException {
+ return underlying.createStruct(typeName, attributes);
+ }
+
+ @Override
+ public void setSchema(String schema) throws SQLException {
+ underlying.setSchema(schema);
+ }
+
+ @Override
+ public String getSchema() throws SQLException {
+ return underlying.getSchema();
+ }
+
+ @Override
+ public void abort(Executor executor) throws SQLException {
+ auditTransaction("abort");
+ underlying.abort(executor);
+ }
+
+ @Override
+ public void setNetworkTimeout(Executor executor, int milliseconds) throws SQLException {
+ underlying.setNetworkTimeout(executor, milliseconds);
+ }
+
+ @Override
+ public int getNetworkTimeout() throws SQLException {
+ return underlying.getNetworkTimeout();
+ }
+
+ @Override
+ public T unwrap(Class iface) throws SQLException {
+ return underlying.unwrap(iface);
+ }
+
+ @Override
+ public boolean isWrapperFor(Class> iface) throws SQLException {
+ return underlying.isWrapperFor(iface);
+ }
+
+ private class AuditedPreparedStatementImpl extends AuditedPreparedStatement {
+ public AuditedPreparedStatementImpl(PreparedStatement underlying, String sql) {
+ super(underlying, sql);
+ }
+
+ @Override
+ protected void auditQuery(String operationType, String sql) {
+ AuditedConnection.this.auditQuery(operationType, sql);
+ }
+ }
+
+ private class AuditedStatementImpl extends AuditedStatement {
+ public AuditedStatementImpl(Statement underlying) {
+ super(underlying);
+ }
+
+ @Override
+ protected void auditQuery(String operationType, String sql) {
+ AuditedConnection.this.auditQuery(operationType, sql);
+ }
+ }
+}
diff --git a/std-bits/database/src/main/java/org/enso/database/audit/AuditedPreparedStatement.java b/std-bits/database/src/main/java/org/enso/database/audit/AuditedPreparedStatement.java
new file mode 100644
index 000000000000..9406774d05bf
--- /dev/null
+++ b/std-bits/database/src/main/java/org/enso/database/audit/AuditedPreparedStatement.java
@@ -0,0 +1,546 @@
+package org.enso.database.audit;
+
+import java.io.InputStream;
+import java.io.Reader;
+import java.math.BigDecimal;
+import java.net.URL;
+import java.sql.Array;
+import java.sql.Blob;
+import java.sql.Clob;
+import java.sql.Connection;
+import java.sql.Date;
+import java.sql.NClob;
+import java.sql.ParameterMetaData;
+import java.sql.PreparedStatement;
+import java.sql.Ref;
+import java.sql.ResultSet;
+import java.sql.ResultSetMetaData;
+import java.sql.RowId;
+import java.sql.SQLException;
+import java.sql.SQLWarning;
+import java.sql.SQLXML;
+import java.sql.Time;
+import java.sql.Timestamp;
+import java.util.Calendar;
+
+abstract class AuditedPreparedStatement implements PreparedStatement {
+ private final PreparedStatement underlying;
+ private final String sql;
+
+ protected abstract void auditQuery(String operationType, String sql);
+
+ public AuditedPreparedStatement(PreparedStatement underlying, String sql) {
+ this.underlying = underlying;
+ this.sql = sql;
+ }
+
+ @Override
+ public ResultSet executeQuery() throws SQLException {
+ auditQuery("query", sql);
+ return underlying.executeQuery();
+ }
+
+ @Override
+ public int executeUpdate() throws SQLException {
+ auditQuery("update", sql);
+ return underlying.executeUpdate();
+ }
+
+ @Override
+ public void setNull(int parameterIndex, int sqlType) throws SQLException {
+ underlying.setNull(parameterIndex, sqlType);
+ }
+
+ @Override
+ public void setBoolean(int parameterIndex, boolean x) throws SQLException {
+ underlying.setBoolean(parameterIndex, x);
+ }
+
+ @Override
+ public void setByte(int parameterIndex, byte x) throws SQLException {
+ underlying.setByte(parameterIndex, x);
+ }
+
+ @Override
+ public void setShort(int parameterIndex, short x) throws SQLException {
+ underlying.setShort(parameterIndex, x);
+ }
+
+ @Override
+ public void setInt(int parameterIndex, int x) throws SQLException {
+ underlying.setInt(parameterIndex, x);
+ }
+
+ @Override
+ public void setLong(int parameterIndex, long x) throws SQLException {
+ underlying.setLong(parameterIndex, x);
+ }
+
+ @Override
+ public void setFloat(int parameterIndex, float x) throws SQLException {
+ underlying.setFloat(parameterIndex, x);
+ }
+
+ @Override
+ public void setDouble(int parameterIndex, double x) throws SQLException {
+ underlying.setDouble(parameterIndex, x);
+ }
+
+ @Override
+ public void setBigDecimal(int parameterIndex, BigDecimal x) throws SQLException {
+ underlying.setBigDecimal(parameterIndex, x);
+ }
+
+ @Override
+ public void setString(int parameterIndex, String x) throws SQLException {
+ underlying.setString(parameterIndex, x);
+ }
+
+ @Override
+ public void setBytes(int parameterIndex, byte[] x) throws SQLException {
+ underlying.setBytes(parameterIndex, x);
+ }
+
+ @Override
+ public void setDate(int parameterIndex, Date x) throws SQLException {
+ underlying.setDate(parameterIndex, x);
+ }
+
+ @Override
+ public void setTime(int parameterIndex, Time x) throws SQLException {
+ underlying.setTime(parameterIndex, x);
+ }
+
+ @Override
+ public void setTimestamp(int parameterIndex, Timestamp x) throws SQLException {
+ underlying.setTimestamp(parameterIndex, x);
+ }
+
+ @Override
+ public void setAsciiStream(int parameterIndex, InputStream x, int length) throws SQLException {
+ underlying.setAsciiStream(parameterIndex, x);
+ }
+
+ @SuppressWarnings("deprecation")
+ @Override
+ public void setUnicodeStream(int parameterIndex, InputStream x, int length) throws SQLException {
+ underlying.setUnicodeStream(parameterIndex, x, length);
+ }
+
+ @Override
+ public void setBinaryStream(int parameterIndex, InputStream x, int length) throws SQLException {
+ underlying.setBinaryStream(parameterIndex, x, length);
+ }
+
+ @Override
+ public void clearParameters() throws SQLException {
+ underlying.clearParameters();
+ }
+
+ @Override
+ public void setObject(int parameterIndex, Object x, int targetSqlType) throws SQLException {
+ underlying.setObject(parameterIndex, x, targetSqlType);
+ }
+
+ @Override
+ public void setObject(int parameterIndex, Object x) throws SQLException {
+ underlying.setObject(parameterIndex, x);
+ }
+
+ @Override
+ public boolean execute() throws SQLException {
+ auditQuery("execute", sql);
+ return underlying.execute();
+ }
+
+ @Override
+ public void addBatch() throws SQLException {
+ underlying.addBatch();
+ }
+
+ @Override
+ public void setCharacterStream(int parameterIndex, Reader reader, int length)
+ throws SQLException {
+ underlying.setCharacterStream(parameterIndex, reader, length);
+ }
+
+ @Override
+ public void setRef(int parameterIndex, Ref x) throws SQLException {
+ underlying.setRef(parameterIndex, x);
+ }
+
+ @Override
+ public void setBlob(int parameterIndex, Blob x) throws SQLException {
+ underlying.setBlob(parameterIndex, x);
+ }
+
+ @Override
+ public void setClob(int parameterIndex, Clob x) throws SQLException {
+ underlying.setClob(parameterIndex, x);
+ }
+
+ @Override
+ public void setArray(int parameterIndex, Array x) throws SQLException {
+ underlying.setArray(parameterIndex, x);
+ }
+
+ @Override
+ public ResultSetMetaData getMetaData() throws SQLException {
+ return underlying.getMetaData();
+ }
+
+ @Override
+ public void setDate(int parameterIndex, Date x, Calendar cal) throws SQLException {
+ underlying.setDate(parameterIndex, x, cal);
+ }
+
+ @Override
+ public void setTime(int parameterIndex, Time x, Calendar cal) throws SQLException {
+ underlying.setTime(parameterIndex, x, cal);
+ }
+
+ @Override
+ public void setTimestamp(int parameterIndex, Timestamp x, Calendar cal) throws SQLException {
+ underlying.setTimestamp(parameterIndex, x, cal);
+ }
+
+ @Override
+ public void setNull(int parameterIndex, int sqlType, String typeName) throws SQLException {
+ underlying.setNull(parameterIndex, sqlType, typeName);
+ }
+
+ @Override
+ public void setURL(int parameterIndex, URL x) throws SQLException {
+ underlying.setURL(parameterIndex, x);
+ }
+
+ @Override
+ public ParameterMetaData getParameterMetaData() throws SQLException {
+ return underlying.getParameterMetaData();
+ }
+
+ @Override
+ public void setRowId(int parameterIndex, RowId x) throws SQLException {
+ underlying.setRowId(parameterIndex, x);
+ }
+
+ @Override
+ public void setNString(int parameterIndex, String value) throws SQLException {
+ underlying.setNString(parameterIndex, value);
+ }
+
+ @Override
+ public void setNCharacterStream(int parameterIndex, Reader value, long length)
+ throws SQLException {
+ underlying.setNCharacterStream(parameterIndex, value, length);
+ }
+
+ @Override
+ public void setNClob(int parameterIndex, NClob value) throws SQLException {
+ underlying.setNClob(parameterIndex, value);
+ }
+
+ @Override
+ public void setClob(int parameterIndex, Reader reader, long length) throws SQLException {
+ underlying.setClob(parameterIndex, reader, length);
+ }
+
+ @Override
+ public void setBlob(int parameterIndex, InputStream inputStream, long length)
+ throws SQLException {
+ underlying.setBlob(parameterIndex, inputStream, length);
+ }
+
+ @Override
+ public void setNClob(int parameterIndex, Reader reader, long length) throws SQLException {
+ underlying.setNClob(parameterIndex, reader, length);
+ }
+
+ @Override
+ public void setSQLXML(int parameterIndex, SQLXML xmlObject) throws SQLException {
+ underlying.setSQLXML(parameterIndex, xmlObject);
+ }
+
+ @Override
+ public void setObject(int parameterIndex, Object x, int targetSqlType, int scaleOrLength)
+ throws SQLException {
+ underlying.setObject(parameterIndex, x, targetSqlType, scaleOrLength);
+ }
+
+ @Override
+ public void setAsciiStream(int parameterIndex, InputStream x, long length) throws SQLException {
+ underlying.setAsciiStream(parameterIndex, x, length);
+ }
+
+ @Override
+ public void setBinaryStream(int parameterIndex, InputStream x, long length) throws SQLException {
+ underlying.setBinaryStream(parameterIndex, x, length);
+ }
+
+ @Override
+ public void setCharacterStream(int parameterIndex, Reader reader, long length)
+ throws SQLException {
+ underlying.setCharacterStream(parameterIndex, reader, length);
+ }
+
+ @Override
+ public void setAsciiStream(int parameterIndex, InputStream x) throws SQLException {
+ underlying.setAsciiStream(parameterIndex, x);
+ }
+
+ @Override
+ public void setBinaryStream(int parameterIndex, InputStream x) throws SQLException {
+ underlying.setBinaryStream(parameterIndex, x);
+ }
+
+ @Override
+ public void setCharacterStream(int parameterIndex, Reader reader) throws SQLException {
+ underlying.setCharacterStream(parameterIndex, reader);
+ }
+
+ @Override
+ public void setNCharacterStream(int parameterIndex, Reader value) throws SQLException {
+ underlying.setNCharacterStream(parameterIndex, value);
+ }
+
+ @Override
+ public void setClob(int parameterIndex, Reader reader) throws SQLException {
+ underlying.setClob(parameterIndex, reader);
+ }
+
+ @Override
+ public void setBlob(int parameterIndex, InputStream inputStream) throws SQLException {
+ underlying.setBlob(parameterIndex, inputStream);
+ }
+
+ @Override
+ public void setNClob(int parameterIndex, Reader reader) throws SQLException {
+ underlying.setNClob(parameterIndex, reader);
+ }
+
+ @Override
+ public ResultSet executeQuery(String sql) throws SQLException {
+ throw new IllegalStateException("Cannot be called on PreparedStatement");
+ }
+
+ @Override
+ public int executeUpdate(String sql) throws SQLException {
+ throw new IllegalStateException("Cannot be called on PreparedStatement");
+ }
+
+ @Override
+ public void close() throws SQLException {
+ underlying.close();
+ }
+
+ @Override
+ public int getMaxFieldSize() throws SQLException {
+ return underlying.getMaxFieldSize();
+ }
+
+ @Override
+ public void setMaxFieldSize(int max) throws SQLException {
+ underlying.setMaxFieldSize(max);
+ }
+
+ @Override
+ public int getMaxRows() throws SQLException {
+ return underlying.getMaxRows();
+ }
+
+ @Override
+ public void setMaxRows(int max) throws SQLException {
+ underlying.setMaxRows(max);
+ }
+
+ @Override
+ public void setEscapeProcessing(boolean enable) throws SQLException {
+ underlying.setEscapeProcessing(enable);
+ }
+
+ @Override
+ public int getQueryTimeout() throws SQLException {
+ return underlying.getQueryTimeout();
+ }
+
+ @Override
+ public void setQueryTimeout(int seconds) throws SQLException {
+ underlying.setQueryTimeout(seconds);
+ }
+
+ @Override
+ public void cancel() throws SQLException {
+ underlying.cancel();
+ }
+
+ @Override
+ public SQLWarning getWarnings() throws SQLException {
+ return underlying.getWarnings();
+ }
+
+ @Override
+ public void clearWarnings() throws SQLException {
+ underlying.clearWarnings();
+ }
+
+ @Override
+ public void setCursorName(String name) throws SQLException {
+ underlying.setCursorName(name);
+ }
+
+ @Override
+ public boolean execute(String sql) throws SQLException {
+ throw new IllegalStateException("Cannot be called on PreparedStatement");
+ }
+
+ @Override
+ public ResultSet getResultSet() throws SQLException {
+ return underlying.getResultSet();
+ }
+
+ @Override
+ public int getUpdateCount() throws SQLException {
+ return underlying.getUpdateCount();
+ }
+
+ @Override
+ public boolean getMoreResults() throws SQLException {
+ return underlying.getMoreResults();
+ }
+
+ @Override
+ public int getFetchDirection() throws SQLException {
+ return underlying.getFetchDirection();
+ }
+
+ @Override
+ public void setFetchDirection(int direction) throws SQLException {
+ underlying.setFetchDirection(direction);
+ }
+
+ @Override
+ public int getFetchSize() throws SQLException {
+ return underlying.getFetchSize();
+ }
+
+ @Override
+ public void setFetchSize(int rows) throws SQLException {
+ underlying.setFetchSize(rows);
+ }
+
+ @Override
+ public int getResultSetConcurrency() throws SQLException {
+ return underlying.getResultSetConcurrency();
+ }
+
+ @Override
+ public int getResultSetType() throws SQLException {
+ return underlying.getResultSetType();
+ }
+
+ @Override
+ public void addBatch(String sql) throws SQLException {
+ throw new IllegalStateException("Cannot be called on PreparedStatement");
+ }
+
+ @Override
+ public void clearBatch() throws SQLException {
+ underlying.clearBatch();
+ }
+
+ private boolean firstInBatch = true;
+
+ @Override
+ public int[] executeBatch() throws SQLException {
+ if (firstInBatch) {
+ auditQuery("executeBatch", sql);
+ firstInBatch = false;
+ }
+ return underlying.executeBatch();
+ }
+
+ @Override
+ public Connection getConnection() throws SQLException {
+ throw new IllegalStateException("`getConnection` cannot be called on AuditedPreparedStatement");
+ }
+
+ @Override
+ public boolean getMoreResults(int current) throws SQLException {
+ return underlying.getMoreResults(current);
+ }
+
+ @Override
+ public ResultSet getGeneratedKeys() throws SQLException {
+ return underlying.getGeneratedKeys();
+ }
+
+ @Override
+ public int executeUpdate(String sql, int autoGeneratedKeys) throws SQLException {
+ throw new IllegalStateException("Cannot be called on PreparedStatement");
+ }
+
+ @Override
+ public int executeUpdate(String sql, int[] columnIndexes) throws SQLException {
+ throw new IllegalStateException("Cannot be called on PreparedStatement");
+ }
+
+ @Override
+ public int executeUpdate(String sql, String[] columnNames) throws SQLException {
+ throw new IllegalStateException("Cannot be called on PreparedStatement");
+ }
+
+ @Override
+ public boolean execute(String sql, int autoGeneratedKeys) throws SQLException {
+ throw new IllegalStateException("Cannot be called on PreparedStatement");
+ }
+
+ @Override
+ public boolean execute(String sql, int[] columnIndexes) throws SQLException {
+ throw new IllegalStateException("Cannot be called on PreparedStatement");
+ }
+
+ @Override
+ public boolean execute(String sql, String[] columnNames) throws SQLException {
+ throw new IllegalStateException("Cannot be called on PreparedStatement");
+ }
+
+ @Override
+ public int getResultSetHoldability() throws SQLException {
+ return underlying.getResultSetHoldability();
+ }
+
+ @Override
+ public boolean isClosed() throws SQLException {
+ return underlying.isClosed();
+ }
+
+ @Override
+ public boolean isPoolable() throws SQLException {
+ return underlying.isPoolable();
+ }
+
+ @Override
+ public void setPoolable(boolean poolable) throws SQLException {
+ underlying.setPoolable(poolable);
+ }
+
+ @Override
+ public void closeOnCompletion() throws SQLException {
+ underlying.closeOnCompletion();
+ }
+
+ @Override
+ public boolean isCloseOnCompletion() throws SQLException {
+ return underlying.isCloseOnCompletion();
+ }
+
+ @Override
+ public T unwrap(Class iface) throws SQLException {
+ return underlying.unwrap(iface);
+ }
+
+ @Override
+ public boolean isWrapperFor(Class> iface) throws SQLException {
+ return underlying.isWrapperFor(iface);
+ }
+}
diff --git a/std-bits/database/src/main/java/org/enso/database/audit/AuditedStatement.java b/std-bits/database/src/main/java/org/enso/database/audit/AuditedStatement.java
new file mode 100644
index 000000000000..a9c953a189c9
--- /dev/null
+++ b/std-bits/database/src/main/java/org/enso/database/audit/AuditedStatement.java
@@ -0,0 +1,255 @@
+package org.enso.database.audit;
+
+import java.sql.Connection;
+import java.sql.ResultSet;
+import java.sql.SQLException;
+import java.sql.SQLWarning;
+import java.sql.Statement;
+
+abstract class AuditedStatement implements Statement {
+ private final Statement underlying;
+
+ AuditedStatement(Statement underlying) {
+ this.underlying = underlying;
+ }
+
+ protected abstract void auditQuery(String operationType, String sql);
+
+ @Override
+ public ResultSet executeQuery(String sql) throws SQLException {
+ auditQuery("query", sql);
+ return underlying.executeQuery(sql);
+ }
+
+ @Override
+ public int executeUpdate(String sql) throws SQLException {
+ auditQuery("update", sql);
+ return underlying.executeUpdate(sql);
+ }
+
+ @Override
+ public void close() throws SQLException {
+ underlying.close();
+ }
+
+ @Override
+ public int getMaxFieldSize() throws SQLException {
+ return underlying.getMaxFieldSize();
+ }
+
+ @Override
+ public void setMaxFieldSize(int max) throws SQLException {
+ underlying.setMaxFieldSize(max);
+ }
+
+ @Override
+ public int getMaxRows() throws SQLException {
+ return underlying.getMaxRows();
+ }
+
+ @Override
+ public void setMaxRows(int max) throws SQLException {
+ underlying.setMaxRows(max);
+ }
+
+ @Override
+ public void setEscapeProcessing(boolean enable) throws SQLException {
+ underlying.setEscapeProcessing(enable);
+ }
+
+ @Override
+ public int getQueryTimeout() throws SQLException {
+ return underlying.getQueryTimeout();
+ }
+
+ @Override
+ public void setQueryTimeout(int seconds) throws SQLException {
+ underlying.setQueryTimeout(seconds);
+ }
+
+ @Override
+ public void cancel() throws SQLException {
+ underlying.cancel();
+ }
+
+ @Override
+ public SQLWarning getWarnings() throws SQLException {
+ return underlying.getWarnings();
+ }
+
+ @Override
+ public void clearWarnings() throws SQLException {
+ underlying.clearWarnings();
+ }
+
+ @Override
+ public void setCursorName(String name) throws SQLException {
+ underlying.setCursorName(name);
+ }
+
+ @Override
+ public boolean execute(String sql) throws SQLException {
+ auditQuery("execute", sql);
+ return underlying.execute(sql);
+ }
+
+ @Override
+ public ResultSet getResultSet() throws SQLException {
+ return underlying.getResultSet();
+ }
+
+ @Override
+ public int getUpdateCount() throws SQLException {
+ return underlying.getUpdateCount();
+ }
+
+ @Override
+ public boolean getMoreResults() throws SQLException {
+ return underlying.getMoreResults();
+ }
+
+ @Override
+ public void setFetchDirection(int direction) throws SQLException {
+ underlying.setFetchDirection(direction);
+ }
+
+ @Override
+ public int getFetchDirection() throws SQLException {
+ return underlying.getFetchDirection();
+ }
+
+ @Override
+ public void setFetchSize(int rows) throws SQLException {
+ underlying.setFetchDirection(rows);
+ }
+
+ @Override
+ public int getFetchSize() throws SQLException {
+ return underlying.getFetchSize();
+ }
+
+ @Override
+ public int getResultSetConcurrency() throws SQLException {
+ return underlying.getResultSetConcurrency();
+ }
+
+ @Override
+ public int getResultSetType() throws SQLException {
+ return underlying.getResultSetType();
+ }
+
+ StringBuilder currentBatch = new StringBuilder();
+
+ @Override
+ public void addBatch(String sql) throws SQLException {
+ if (!currentBatch.isEmpty()) {
+ currentBatch.append("; ");
+ }
+ currentBatch.append(sql);
+ underlying.addBatch(sql);
+ }
+
+ @Override
+ public void clearBatch() throws SQLException {
+ currentBatch = new StringBuilder();
+ underlying.clearBatch();
+ }
+
+ @Override
+ public int[] executeBatch() throws SQLException {
+ auditQuery("executeBatch", currentBatch.toString());
+ currentBatch = new StringBuilder();
+ return underlying.executeBatch();
+ }
+
+ @Override
+ public Connection getConnection() throws SQLException {
+ throw new IllegalStateException("`getConnection` cannot be called on AuditedStatement");
+ }
+
+ @Override
+ public boolean getMoreResults(int current) throws SQLException {
+ return underlying.getMoreResults(current);
+ }
+
+ @Override
+ public ResultSet getGeneratedKeys() throws SQLException {
+ return underlying.getGeneratedKeys();
+ }
+
+ @Override
+ public int executeUpdate(String sql, int autoGeneratedKeys) throws SQLException {
+ auditQuery("update", sql);
+ return underlying.executeUpdate(sql, autoGeneratedKeys);
+ }
+
+ @Override
+ public int executeUpdate(String sql, int[] columnIndexes) throws SQLException {
+ auditQuery("update", sql);
+ return underlying.executeUpdate(sql, columnIndexes);
+ }
+
+ @Override
+ public int executeUpdate(String sql, String[] columnNames) throws SQLException {
+ auditQuery("update", sql);
+ return underlying.executeUpdate(sql, columnNames);
+ }
+
+ @Override
+ public boolean execute(String sql, int autoGeneratedKeys) throws SQLException {
+ auditQuery("execute", sql);
+ return underlying.execute(sql, autoGeneratedKeys);
+ }
+
+ @Override
+ public boolean execute(String sql, int[] columnIndexes) throws SQLException {
+ auditQuery("execute", sql);
+ return underlying.execute(sql, columnIndexes);
+ }
+
+ @Override
+ public boolean execute(String sql, String[] columnNames) throws SQLException {
+ auditQuery("execute", sql);
+ return underlying.execute(sql, columnNames);
+ }
+
+ @Override
+ public int getResultSetHoldability() throws SQLException {
+ return underlying.getResultSetHoldability();
+ }
+
+ @Override
+ public boolean isClosed() throws SQLException {
+ return underlying.isClosed();
+ }
+
+ @Override
+ public void setPoolable(boolean poolable) throws SQLException {
+ underlying.setPoolable(poolable);
+ }
+
+ @Override
+ public boolean isPoolable() throws SQLException {
+ return underlying.isPoolable();
+ }
+
+ @Override
+ public void closeOnCompletion() throws SQLException {
+ underlying.closeOnCompletion();
+ }
+
+ @Override
+ public boolean isCloseOnCompletion() throws SQLException {
+ return underlying.isCloseOnCompletion();
+ }
+
+ @Override
+ public T unwrap(Class iface) throws SQLException {
+ return underlying.unwrap(iface);
+ }
+
+ @Override
+ public boolean isWrapperFor(Class> iface) throws SQLException {
+ return underlying.isWrapperFor(iface);
+ }
+}
diff --git a/std-bits/database/src/main/java/org/enso/database/audit/CloudAuditedConnection.java b/std-bits/database/src/main/java/org/enso/database/audit/CloudAuditedConnection.java
new file mode 100644
index 000000000000..1fa43fd45d5d
--- /dev/null
+++ b/std-bits/database/src/main/java/org/enso/database/audit/CloudAuditedConnection.java
@@ -0,0 +1,50 @@
+package org.enso.database.audit;
+
+import com.fasterxml.jackson.databind.node.JsonNodeFactory;
+import com.fasterxml.jackson.databind.node.ObjectNode;
+import java.sql.Connection;
+import java.sql.SQLException;
+import java.util.logging.Logger;
+import org.enso.base.enso_cloud.audit.AuditLog;
+
+public final class CloudAuditedConnection extends AuditedConnection {
+ private static final Logger logger = Logger.getLogger(CloudAuditedConnection.class.getName());
+ private final ObjectNode metadata;
+
+ /**
+ * Because logs are sent asynchronously and their timestamps are only assigned on the receiving
+ * end, we need to assign a sequence number to be able to know the ordering between various
+ * events.
+ */
+ private long sequenceNumber = 1;
+
+ public CloudAuditedConnection(Connection underlying, String relatedAssetId) {
+ super(underlying);
+ metadata = new ObjectNode(JsonNodeFactory.instance);
+ if (relatedAssetId != null) {
+ metadata.put("asset_id", relatedAssetId);
+ }
+ try {
+ metadata.put("connection_uri", underlying.getMetaData().getURL());
+ } catch (SQLException e) {
+ // We ignore the exception, only logging it
+ logger.warning("Failed to get connection URI for " + underlying + ": " + e.getMessage());
+ }
+ }
+
+ private void audit(String operationType, String message) {
+ var metadataCopy = metadata.deepCopy();
+ metadataCopy.put("sequence_number", sequenceNumber++);
+ AuditLog.logAsync(operationType, message, metadataCopy);
+ }
+
+ @Override
+ protected void auditQuery(String operationType, String sql) {
+ audit(operationType, sql);
+ }
+
+ @Override
+ protected void auditTransaction(String operation) {
+ audit("transaction", operation);
+ }
+}
diff --git a/std-bits/database/src/main/java/org/enso/database/audit/LocalAuditedConnection.java b/std-bits/database/src/main/java/org/enso/database/audit/LocalAuditedConnection.java
new file mode 100644
index 000000000000..691e8de9cf9b
--- /dev/null
+++ b/std-bits/database/src/main/java/org/enso/database/audit/LocalAuditedConnection.java
@@ -0,0 +1,33 @@
+package org.enso.database.audit;
+
+import java.sql.Connection;
+import java.sql.SQLException;
+import java.util.logging.Logger;
+
+public class LocalAuditedConnection extends AuditedConnection {
+ private static final Logger logger = Logger.getLogger("Standard.Database.Connection");
+ private final String connectionIdentifier;
+
+ public LocalAuditedConnection(Connection underlying) {
+ super(underlying);
+
+ String connectionUri = null;
+ try {
+ connectionUri = underlying.getMetaData().getURL();
+ } catch (SQLException e) {
+ // We ignore the exception
+ }
+
+ this.connectionIdentifier = connectionUri == null ? underlying.toString() : connectionUri;
+ }
+
+ @Override
+ protected void auditQuery(String operationType, String sql) {
+ logger.info(connectionIdentifier + " - " + operationType + ": " + sql);
+ }
+
+ @Override
+ protected void auditTransaction(String operation) {
+ logger.info(connectionIdentifier + " - transaction - " + operation);
+ }
+}
diff --git a/test/Base_Tests/src/Network/Enso_Cloud/Audit_Log_Spec.enso b/test/Base_Tests/src/Network/Enso_Cloud/Audit_Log_Spec.enso
new file mode 100644
index 000000000000..71277bb422d3
--- /dev/null
+++ b/test/Base_Tests/src/Network/Enso_Cloud/Audit_Log_Spec.enso
@@ -0,0 +1,83 @@
+from Standard.Base import all
+import Standard.Base.Enso_Cloud.Internal.Audit_Log.Audit_Log
+import Standard.Base.Enso_Cloud.Errors.Enso_Cloud_Error
+import Standard.Base.Errors.Illegal_Argument.Illegal_Argument
+import Standard.Base.Errors.Time_Error.Time_Error
+from Standard.Base.Enso_Cloud.Public_Utils import get_optional_field, get_required_field, cloud_http_request_for_test
+
+from Standard.Test import all
+import Standard.Test.Test_Environment
+
+import project.Network.Enso_Cloud.Cloud_Tests_Setup.Cloud_Tests_Setup
+
+
+add_specs suite_builder =
+ ## By default, these tests are run only on the Cloud mock, not on the real deployment.
+ This is mostly because we don't yet have log filtering so the results on the real deployment could be massive.
+ The local environment is more predictable for running these tests.
+ The following flag can be changed to `False` to run it on the real cloud (if it is set up in the test context).
+ This can be used to verify that the mock logic is consistent with the real thing.
+ always_run_on_mock = True
+ setup = if always_run_on_mock then Cloud_Tests_Setup.prepare_mock_setup else Cloud_Tests_Setup.prepare
+ suite_builder.group "Enso Cloud Audit Log" pending=setup.pending group_builder->
+ [False, True].each async->
+ group_builder.specify "should be able to send a log message "+(if async then "(async)" else "(sync)") <| setup.with_prepared_environment <|
+ random_payload = "payload-" + Random.uuid
+ Audit_Log.report_event "TestEvent" "Message" (JS_Object.from_pairs [["my_field", random_payload]]) async=async . should_succeed
+ my_event = Test.with_retries <|
+ event = get_audit_log_events . find ev-> (ev.metadata.get "my_field") == random_payload
+ event.should_succeed
+ event
+
+ my_event.metadata.get "operation" . should_equal "TestEvent"
+
+ # TODO this test should be re-enabled after https://github.com/enso-org/enso/issues/9845 is fixed
+ #my_event.metadata.get "projectName" . should_equal "enso_dev.Base_Tests"
+
+ my_event.message . should_equal "Message"
+ my_event.user_email . should_equal Enso_User.current.email
+
+ group_builder.specify "will include the project id in the log message, if provided by the Cloud" <| setup.with_prepared_environment <|
+ example_project_id = "project-27xJM00p8jWoL2qByTo6tQfciWC"
+ Test_Environment.unsafe_with_environment_override "ENSO_CLOUD_PROJECT_ID" example_project_id <|
+ random_payload = "payload-" + Random.uuid
+ Audit_Log.report_event "TestEventInCloud" "Messageâ›…" (JS_Object.from_pairs [["my_field", random_payload]]) async=False . should_succeed
+ my_event = Test.with_retries <|
+ event = get_audit_log_events . find ev-> (ev.metadata.get "my_field") == random_payload
+ event.should_succeed
+ event
+
+ my_event.project_id . should_equal example_project_id
+
+ group_builder.specify "does not allow restricted fields in metadata" <| setup.with_prepared_environment <|
+ Audit_Log.report_event "TestEventType" "Message" (JS_Object.from_pairs [["type", "my type override?"]]) . should_fail_with Illegal_Argument
+
+main filter=Nothing =
+ suite = Test.build suite_builder->
+ add_specs suite_builder
+ suite.run_with_filter filter
+
+## PRIVATE
+ Returns log events available in the current account.
+ This method is only used for testing and should not be used in production.
+get_audit_log_events -> Vector Audit_Log_Event =
+ json = cloud_http_request_for_test HTTP_Method.Get "log_events"
+ events_json = get_required_field "events" json
+ events_json.map Audit_Log_Event.from_json
+
+## PRIVATE
+type Audit_Log_Event
+ ## PRIVATE
+ Value organization_id:Text user_email:Text timestamp:Date_Time metadata:JS_Object message:Text project_id:Text
+
+ ## PRIVATE
+ from_json json =
+ organization_id = get_required_field "organizationId" json expected_type=Text
+ user_email = get_required_field "userEmail" json expected_type=Text
+ timestamp_text = get_optional_field "timestamp" json expected_type=Text
+ timestamp = timestamp_text.if_not_nothing <| Date_Time.parse timestamp_text . catch Time_Error error->
+ Error.throw (Enso_Cloud_Error.Invalid_Response_Payload "Invalid timestamp format in audit log event: "+error.to_display_text)
+ metadata = get_required_field "metadata" json
+ message = get_required_field "message" json expected_type=Text
+ project_id = get_optional_field "projectId" json expected_type=Text
+ Audit_Log_Event.Value organization_id user_email timestamp metadata message project_id
diff --git a/test/Base_Tests/src/Network/Enso_Cloud/Cloud_Data_Link_Spec.enso b/test/Base_Tests/src/Network/Enso_Cloud/Cloud_Data_Link_Spec.enso
index 1394bc1d7b0a..dc72ae7bc7ab 100644
--- a/test/Base_Tests/src/Network/Enso_Cloud/Cloud_Data_Link_Spec.enso
+++ b/test/Base_Tests/src/Network/Enso_Cloud/Cloud_Data_Link_Spec.enso
@@ -16,7 +16,7 @@ import project.Network.Enso_Cloud.Cloud_Tests_Setup.Temporary_Directory
from project.Network.Enso_Cloud.Cloud_Tests_Setup import create_local_datalink_to
-add_specs suite_builder setup:Cloud_Tests_Setup = setup.with_prepared_environment <|
+add_specs suite_builder setup:Cloud_Tests_Setup =
suite_builder.group "DataLinks in Enso Cloud" pending=setup.real_cloud_pending group_builder->
group_builder.specify "should be able to access an example HTTP data-link" <|
## We assume that the cloud has a sample data-link called `TestDataLink` present
diff --git a/test/Base_Tests/src/Network/Enso_Cloud/Cloud_Tests_Setup.enso b/test/Base_Tests/src/Network/Enso_Cloud/Cloud_Tests_Setup.enso
index 3ece2e016988..e191d9d2d028 100644
--- a/test/Base_Tests/src/Network/Enso_Cloud/Cloud_Tests_Setup.enso
+++ b/test/Base_Tests/src/Network/Enso_Cloud/Cloud_Tests_Setup.enso
@@ -126,6 +126,14 @@ type Cloud_Tests_Setup
HTTP.fetch uri . decode_as_json
_ -> Panic.throw (Illegal_State.Error "This method is only valid for Mock setup.")
+ ## Runs the action with overridden credentials, clearing caches before and
+ after the action to ensure that the credential change is picked up.
+ run_with_overridden_credentials path:File ~action =
+ Panic.with_finalizer Cloud_Tests_Setup.reset <|
+ Test_Environment.unsafe_with_environment_override "ENSO_CLOUD_CREDENTIALS_FILE" path.absolute.normalize.path <|
+ Cloud_Tests_Setup.reset
+ action
+
type Mock_Credentials
Value access_token:Text expire_at:Date_Time refresh_token:Text refresh_url:Text client_id:Text
diff --git a/test/Base_Tests/src/Network/Enso_Cloud/Enso_Cloud_Spec.enso b/test/Base_Tests/src/Network/Enso_Cloud/Enso_Cloud_Spec.enso
index 8894d048abd9..e65fc5e36db6 100644
--- a/test/Base_Tests/src/Network/Enso_Cloud/Enso_Cloud_Spec.enso
+++ b/test/Base_Tests/src/Network/Enso_Cloud/Enso_Cloud_Spec.enso
@@ -7,7 +7,6 @@ import Standard.Base.Errors.Illegal_Argument.Illegal_Argument
import Standard.Base.Errors.Illegal_State.Illegal_State
from Standard.Test import all
-import Standard.Test.Test_Environment
from Standard.Test.Execution_Context_Helpers import run_with_and_without_output
@@ -72,21 +71,16 @@ add_specs suite_builder setup:Cloud_Tests_Setup =
group_builder.specify "will fail if the user is not logged in" <| setup.with_prepared_environment <|
non_existent_file = (enso_project.data / "nonexistent-file") . absolute . normalize
non_existent_file.exists.should_be_false
- Panic.with_finalizer Cloud_Tests_Setup.reset <|
- Test_Environment.unsafe_with_environment_override "ENSO_CLOUD_CREDENTIALS_FILE" non_existent_file.path <|
- Cloud_Tests_Setup.reset
- payload = Test.expect_panic Not_Logged_In <|
- Enso_User.current
- payload.to_display_text . should_contain "Please log in and restart"
+ Cloud_Tests_Setup.run_with_overridden_credentials non_existent_file <|
+ payload = Test.expect_panic Not_Logged_In <|
+ Enso_User.current
+ payload.to_display_text . should_contain "Please log in and restart"
group_builder.specify "will fail if the token is malformed" pending=setup.pending <| setup.with_prepared_environment <|
invalid_token_file = File.create_temporary_file "enso-test-credentials" "-invalid.txt"
run_with_token_payload payload ~action =
payload.write invalid_token_file . should_succeed
- Cloud_Tests_Setup.reset
- Panic.with_finalizer Cloud_Tests_Setup.reset <|
- Test_Environment.unsafe_with_environment_override "ENSO_CLOUD_CREDENTIALS_FILE" invalid_token_file.absolute.normalize.path <|
- action
+ Cloud_Tests_Setup.run_with_overridden_credentials invalid_token_file action
r1 = run_with_token_payload "invalid-token" <|
Test.expect_panic Illegal_State <| Enso_User.current
diff --git a/test/Base_Tests/src/Network/Enso_Cloud/Main.enso b/test/Base_Tests/src/Network/Enso_Cloud/Main.enso
index 88eed588d6f7..42a82da17f39 100644
--- a/test/Base_Tests/src/Network/Enso_Cloud/Main.enso
+++ b/test/Base_Tests/src/Network/Enso_Cloud/Main.enso
@@ -2,6 +2,7 @@ from Standard.Base import all
from Standard.Test import all
+import project.Network.Enso_Cloud.Audit_Log_Spec
import project.Network.Enso_Cloud.Cloud_Data_Link_Spec
import project.Network.Enso_Cloud.Cloud_Tests_Setup.Cloud_Tests_Setup
import project.Network.Enso_Cloud.Enso_Cloud_Spec
@@ -13,6 +14,7 @@ add_specs suite_builder setup =
Enso_File_Spec.add_specs suite_builder setup
Secrets_Spec.add_specs suite_builder setup
Cloud_Data_Link_Spec.add_specs suite_builder setup
+ Audit_Log_Spec.add_specs suite_builder
main filter=Nothing =
setup = Cloud_Tests_Setup.prepare
diff --git a/test/Table_Tests/src/Database/Common/Audit_Spec.enso b/test/Table_Tests/src/Database/Common/Audit_Spec.enso
new file mode 100644
index 000000000000..eb2adb005e5d
--- /dev/null
+++ b/test/Table_Tests/src/Database/Common/Audit_Spec.enso
@@ -0,0 +1,91 @@
+from Standard.Base import all
+
+from Standard.Table import Table
+
+from Standard.Database import all
+
+from Standard.Test import all
+
+import enso_dev.Base_Tests.Network.Enso_Cloud.Cloud_Tests_Setup.Cloud_Tests_Setup
+from enso_dev.Base_Tests.Network.Enso_Cloud.Audit_Log_Spec import Audit_Log_Event, get_audit_log_events
+
+from project.Util import all
+
+polyglot java import java.lang.Thread
+
+## Tests the audit capabilities of the connection.
+ It takes a data link to the connection. This data link is expected to be a local data link (from the filesystem), not a Cloud one.
+add_specs suite_builder prefix ~datalink_to_connection database_pending =
+ ## By default, these tests are run only on the Cloud mock, not on the real deployment.
+ This is mostly because we don't yet have log filtering so the results on the real deployment could be massive.
+ The local environment is more predictable for running these tests.
+ The following flag can be changed to `False` to run it on the real cloud (if it is set up in the test context).
+ This can be used to verify that the mock logic is consistent with the real thing.
+ always_run_on_mock = True
+ cloud_setup = if always_run_on_mock then Cloud_Tests_Setup.prepare_mock_setup else Cloud_Tests_Setup.prepare
+ suite_builder.group prefix+"Audit Logs" pending=(cloud_setup.pending.if_nothing database_pending) group_builder->
+ group_builder.specify "should see Database operations performed on a table through the standard workflow" <| cloud_setup.with_prepared_environment <|
+ audited_connection = datalink_to_connection.read
+ table_name = "audited-table-"+Random.uuid
+ mem_table = Table.new [["X", [1, 2]], ["Y", ["my_payload", "foo"]]]
+ audited_table = mem_table.select_into_database_table audited_connection table_name temporary=True . should_succeed
+ audited_table.read . should_equal mem_table
+ audited_connection.drop_table table_name . should_succeed
+
+ # Retrying is needed as there may be some delay before the background thread finishes processing the logs.
+ Test.with_retries <|
+ Thread.sleep 500
+ all_events = get_audit_log_events
+ relevant_events = all_events.filter e-> e.message.contains table_name
+
+ create = relevant_events.find (e-> e.message.contains "CREATE")
+ create.should_succeed
+ create.user_email . should_equal Enso_User.current.email
+ create.metadata.get "connection_uri" . should_contain "jdbc:"
+
+ insert = relevant_events.find (e-> e.message.contains "INSERT INTO")
+ insert.should_succeed
+ # The insert query should not contain column cell data - only column names / metadata.
+ insert.message.should_not_contain "my_payload"
+
+ create_sequence_number = create.metadata.get "sequence_number"
+ insert_sequence_number = insert.metadata.get "sequence_number"
+ create_sequence_number.should_be_a Integer
+ insert_sequence_number.should_be_a Integer
+ (create_sequence_number < insert_sequence_number) . should_be_true
+
+ relevant_events.find (e-> e.message.contains "SELECT") . should_succeed
+ relevant_events.find (e-> e.message.contains "DROP") . should_succeed
+
+ group_builder.specify "should see Database operations performed manually" <| cloud_setup.with_prepared_environment <|
+ audited_connection = datalink_to_connection.read
+ query = "SELECT 1 AS A, 2 AS B, "+(Random.integer 0 1000000).to_text+" AS C"
+ t = audited_connection.read query
+ # Force the connector to perform the query:
+ t.at 0 . to_vector . should_equal [1]
+
+ # Retrying is needed as there may be some delay before the background thread finishes processing the logs.
+ Test.with_retries <|
+ all_events = get_audit_log_events
+ ## This is a bit white-box test - we assume the input query is found inside of the query that is run unchanged.
+ Currently that is OK, but if that ever stops being enough the test may need to be amended to be 'smarter'.
+ relevant_event = all_events.find e-> e.message.contains query
+ relevant_event.should_succeed
+
+ group_builder.specify "should still be able to open a local data link if not logged in" <|
+ # The logs should be sent to the local logger, but we cannot easily check that in tests.
+ non_existent_file = (enso_project.data / "nonexistent-credentials-file")
+ non_existent_file.exists.should_be_false
+ Cloud_Tests_Setup.run_with_overridden_credentials non_existent_file <|
+ locally_audited_connection = datalink_to_connection.read
+
+ # We just check that we can read queries through this connection:
+ locally_audited_connection.read "SELECT 1" . at 0 . to_vector . should_equal [1]
+
+ group_builder.specify "should know the asset id of the data link used for the connection" pending="TODO: https://github.com/enso-org/enso/issues/9869" <|
+ ## TODO:
+ 1. write data link to Enso File
+ 2. set up real cloud and establish the connection
+ 3. switch to mock cloud (if wanted) and run some queries
+ 4. inspect logs and search for the asset id
+ Error.throw "TODO"
diff --git a/test/Table_Tests/src/Database/Postgres_Spec.enso b/test/Table_Tests/src/Database/Postgres_Spec.enso
index e7a31551ea0d..289b259c41ee 100644
--- a/test/Table_Tests/src/Database/Postgres_Spec.enso
+++ b/test/Table_Tests/src/Database/Postgres_Spec.enso
@@ -20,6 +20,7 @@ from Standard.Database.Internal.Postgres.Postgres_Connection import parse_postgr
from Standard.Test import all
import Standard.Test.Test_Environment
+import project.Database.Common.Audit_Spec
import project.Database.Common.Common_Spec
import project.Database.Transaction_Spec
import project.Database.Upload_Spec
@@ -862,27 +863,30 @@ add_connection_setup_specs suite_builder = suite_builder.group "[PostgreSQL] Con
[c2, c3, c4].each c->
c.jdbc_properties . should_equal <| add_ssl [Pair.new "user" "other user", Pair.new "password" "other password"]
+transform_file base_file connection_details =
+ content = Data_Link.read_raw_config base_file
+ new_content = content
+ . replace "HOSTNAME" connection_details.host
+ . replace "12345" connection_details.port.to_text
+ . replace "DBNAME" connection_details.database
+ . replace "USERNAME" connection_details.credentials.username
+ . replace "PASSWORD" connection_details.credentials.password
+ temp_file = File.create_temporary_file "postgres-test-db" ".datalink"
+ Data_Link.write_raw_config temp_file new_content replace_existing=True . if_not_error temp_file
+
+type Temporary_Data_Link_File
+ Value ~get
+
+ make connection_details = Temporary_Data_Link_File.Value <|
+ transform_file (enso_project.data / "datalinks" / "postgres-db.datalink") connection_details
+
add_data_link_specs suite_builder =
connection_details = get_configured_connection_details
pending = if connection_details.is_nothing then "PostgreSQL test database is not configured. See README.md for instructions."
-
- # This transforms the datalink file, replacing prepared constants with actual values.
- transform_file base_file =
- content = Data_Link.read_raw_config base_file
- new_content = content
- . replace "HOSTNAME" connection_details.host
- . replace "12345" connection_details.port.to_text
- . replace "DBNAME" connection_details.database
- . replace "USERNAME" connection_details.credentials.username
- . replace "PASSWORD" connection_details.credentials.password
- temp_file = File.create_temporary_file base_file.name base_file.extension
- Data_Link.write_raw_config temp_file new_content replace_existing=True . if_not_error temp_file
-
+ data_link_file = Temporary_Data_Link_File.make connection_details
suite_builder.group "[PostgreSQL] DataLink" pending=pending group_builder->
group_builder.specify "should be able to open a datalink setting up a connection to the database" <|
- data_link_file = transform_file (enso_project.data / "datalinks" / "postgres-db.datalink")
-
- data_link_connection = Data.read data_link_file
+ data_link_connection = Data.read data_link_file.get
Panic.with_finalizer data_link_connection.close <|
data_link_connection.tables.column_names . should_contain "Name"
@@ -892,7 +896,7 @@ add_data_link_specs suite_builder =
q.at "A" . to_vector . should_equal [1]
group_builder.specify "should be able to open a datalink to a particular database table" <|
- data_link_file = transform_file (enso_project.data / "datalinks" / "postgres-table.datalink")
+ table_data_link_file = transform_file (enso_project.data / "datalinks" / "postgres-table.datalink") connection_details
connection = Database.connect connection_details
Panic.with_finalizer connection.close <|
# We create the table that will then be accessed through the datalink, and ensure it's cleaned up afterwards.
@@ -901,30 +905,27 @@ add_data_link_specs suite_builder =
Panic.with_finalizer (connection.drop_table example_table.name) <|
## Now we access this table but this time through a datalink.
Btw. this will keep a connection open until the table is garbage collected, but that is probably fine...
- data_link_table = Data.read data_link_file
+ data_link_table = Data.read table_data_link_file
data_link_table.should_be_a DB_Table
data_link_table.column_names . should_equal ["X", "Y"]
data_link_table.at "X" . to_vector . should_equal [22]
data_link_table.at "Y" . to_vector . should_equal ["o"]
group_builder.specify "will reject any format overrides or stream operations on the data link" <|
- data_link_file = transform_file (enso_project.data / "datalinks" / "postgres-db.datalink")
-
- r1 = Data.read data_link_file Plain_Text
+ r1 = Data.read data_link_file.get Plain_Text
r1.should_fail_with Illegal_Argument
r1.catch.to_display_text . should_contain "Only the default Auto_Detect format should be used"
- r2 = data_link_file.with_input_stream [File_Access.Read] .read_all_bytes
+ r2 = data_link_file.get.with_input_stream [File_Access.Read] .read_all_bytes
r2.should_fail_with Illegal_Argument
r2.catch.to_display_text . should_contain "The Postgres Data Link cannot be opened as a stream"
# But we can read the raw data link if we ask for it:
- r3 = data_link_file.with_input_stream [File_Access.Read, Data_Link_Access.No_Follow] .read_all_bytes
+ r3 = data_link_file.get.with_input_stream [File_Access.Read, Data_Link_Access.No_Follow] .read_all_bytes
r3.should_be_a Vector
group_builder.specify "does not allow to write 'byte' data to a database data link" <|
- data_link_file = transform_file (enso_project.data / "datalinks" / "postgres-db.datalink")
- r = "foobar".write data_link_file
+ r = "foobar".write data_link_file.get
r.should_fail_with Illegal_Argument
r.catch.to_display_text . should_contain "The Postgres Data Link does not support writing"
@@ -936,6 +937,8 @@ add_data_link_specs suite_builder =
r.should_fail_with Illegal_Argument
r.catch.to_display_text . should_contain "The Postgres Data Link cannot be saved to a file."
+ Audit_Spec.add_specs suite_builder "[PostgreSQL] " data_link_file.get database_pending=pending
+
add_specs suite_builder =
add_table_specs suite_builder
diff --git a/test/Table_Tests/src/In_Memory/Table_Spec.enso b/test/Table_Tests/src/In_Memory/Table_Spec.enso
index 03efdb09aca9..3c267053a650 100644
--- a/test/Table_Tests/src/In_Memory/Table_Spec.enso
+++ b/test/Table_Tests/src/In_Memory/Table_Spec.enso
@@ -61,7 +61,7 @@ type Data
Data.Value make_varied_type_table
add_specs suite_builder =
- suite_builder.group "Construction" group_builder->
+ suite_builder.group "Table Construction" group_builder->
data = Data.setup
group_builder.specify "should allow creating a table from rows" <|
@@ -178,6 +178,20 @@ add_specs suite_builder =
col_3 = Column.from_vector 'z' [False, True, False]
col_3.to_vector.map .not . should_equal [True, False, True]
+ suite_builder.group "Table Display" group_builder->
+ group_builder.specify "should be able to display a table" <|
+ t = Table.new [["foo", [10, 20, 30]], ["bar", [False, True, False]]]
+ raw_expected_text = """
+ | foo | bar
+ ---+-----+-------
+ 0 | 10 | False
+ 1 | 20 | True
+ 2 | 30 | False
+ expected_text = raw_expected_text.lines.map .trim . join '\n'
+ raw_got_text = t.display
+ got_text = raw_got_text.lines.map .trim . join '\n'
+ got_text . should_equal expected_text
+
suite_builder.group "Mapping Operations" group_builder->
group_builder.specify "should allow mapping a function over a column" <|
c_str = Column.from_vector 'x' ['a', 'b', Nothing, 'b']
diff --git a/tools/http-test-helper/src/main/java/org/enso/shttp/SimpleHttpHandler.java b/tools/http-test-helper/src/main/java/org/enso/shttp/SimpleHttpHandler.java
index decf491f2387..afe3a7025d1a 100644
--- a/tools/http-test-helper/src/main/java/org/enso/shttp/SimpleHttpHandler.java
+++ b/tools/http-test-helper/src/main/java/org/enso/shttp/SimpleHttpHandler.java
@@ -48,6 +48,11 @@ protected final void sendResponse(
exchange.close();
}
+ protected final void sendEmptyResponse(int code, HttpExchange exchange) throws IOException {
+ exchange.sendResponseHeaders(code, -1);
+ exchange.close();
+ }
+
protected String decodeBodyAsText(HttpExchange exchange) throws IOException {
return new String(exchange.getRequestBody().readAllBytes(), StandardCharsets.UTF_8);
}
diff --git a/tools/http-test-helper/src/main/java/org/enso/shttp/cloud_mock/CloudHandler.java b/tools/http-test-helper/src/main/java/org/enso/shttp/cloud_mock/CloudHandler.java
index c7c87a48795d..cf067616d1f5 100644
--- a/tools/http-test-helper/src/main/java/org/enso/shttp/cloud_mock/CloudHandler.java
+++ b/tools/http-test-helper/src/main/java/org/enso/shttp/cloud_mock/CloudHandler.java
@@ -16,6 +16,8 @@ interface CloudExchange {
void sendResponse(int code, String response) throws IOException;
+ void sendEmptyResponse(int code) throws IOException;
+
String decodeBodyAsText() throws IOException;
HttpMethod getMethod();
diff --git a/tools/http-test-helper/src/main/java/org/enso/shttp/cloud_mock/CloudRoot.java b/tools/http-test-helper/src/main/java/org/enso/shttp/cloud_mock/CloudRoot.java
index af5a8783982b..de227d37ff30 100644
--- a/tools/http-test-helper/src/main/java/org/enso/shttp/cloud_mock/CloudRoot.java
+++ b/tools/http-test-helper/src/main/java/org/enso/shttp/cloud_mock/CloudRoot.java
@@ -10,19 +10,24 @@ public class CloudRoot extends HandlerWithTokenAuth {
public final String prefix = "/enso-cloud-mock/";
private final ExpiredTokensCounter expiredTokensCounter;
- private final AssetStore assetStore = new AssetStore();
- private final CloudHandler[] handlers =
- new CloudHandler[] {
- new UsersHandler(),
- new SecretsHandler(assetStore),
- new HiddenSecretsHandler(assetStore),
- new AssetsHandler(assetStore),
- new PathResolver(assetStore),
- new DirectoriesHandler(assetStore)
- };
+ private final CloudHandler[] handlers;
public CloudRoot(ExpiredTokensCounter expiredTokensCounter) {
this.expiredTokensCounter = expiredTokensCounter;
+ AssetStore assetStore = new AssetStore();
+ UsersService usersService = new UsersService();
+ EventsService eventsService = new EventsService();
+ this.handlers =
+ new CloudHandler[] {
+ new UsersHandler(usersService),
+ new SecretsHandler(assetStore),
+ new HiddenSecretsHandler(assetStore),
+ new AssetsHandler(assetStore),
+ new PathResolver(assetStore),
+ new DirectoriesHandler(assetStore),
+ new GetLogsHandler(eventsService),
+ new PostLogHandler(usersService, eventsService)
+ };
}
@Override
@@ -59,6 +64,7 @@ protected void handleAuthorized(HttpExchange exchange) throws IOException {
}
}
+ System.err.println("No handler found for request: " + subPath);
sendResponse(404, "No handler found for: " + subPath, exchange);
}
@@ -79,6 +85,11 @@ public void sendResponse(int code, String response) throws IOException {
CloudRoot.this.sendResponse(code, response, exchange);
}
+ @Override
+ public void sendEmptyResponse(int code) throws IOException {
+ CloudRoot.this.sendEmptyResponse(code, exchange);
+ }
+
@Override
public String decodeBodyAsText() throws IOException {
return CloudRoot.this.decodeBodyAsText(exchange);
diff --git a/tools/http-test-helper/src/main/java/org/enso/shttp/cloud_mock/EventsService.java b/tools/http-test-helper/src/main/java/org/enso/shttp/cloud_mock/EventsService.java
new file mode 100644
index 000000000000..084777edfa27
--- /dev/null
+++ b/tools/http-test-helper/src/main/java/org/enso/shttp/cloud_mock/EventsService.java
@@ -0,0 +1,25 @@
+package org.enso.shttp.cloud_mock;
+
+import com.fasterxml.jackson.databind.JsonNode;
+import java.util.ArrayList;
+import java.util.List;
+
+public class EventsService {
+ public record LogEvent(
+ String organizationId,
+ String userEmail,
+ String timestamp,
+ JsonNode metadata,
+ String message,
+ String projectId) {}
+
+ public void recordEvent(LogEvent event) {
+ events.add(event);
+ }
+
+ public List getEvents() {
+ return new ArrayList<>(events);
+ }
+
+ private final ArrayList events = new ArrayList<>();
+}
diff --git a/tools/http-test-helper/src/main/java/org/enso/shttp/cloud_mock/GetLogsHandler.java b/tools/http-test-helper/src/main/java/org/enso/shttp/cloud_mock/GetLogsHandler.java
new file mode 100644
index 000000000000..261b3616569b
--- /dev/null
+++ b/tools/http-test-helper/src/main/java/org/enso/shttp/cloud_mock/GetLogsHandler.java
@@ -0,0 +1,27 @@
+package org.enso.shttp.cloud_mock;
+
+import com.fasterxml.jackson.databind.ObjectMapper;
+import java.io.IOException;
+import java.util.List;
+
+public class GetLogsHandler implements CloudHandler {
+ private final EventsService events;
+ private final ObjectMapper jsonMapper = new ObjectMapper();
+
+ public GetLogsHandler(EventsService events) {
+ this.events = events;
+ }
+
+ @Override
+ public boolean canHandle(String subPath) {
+ return subPath.equals("log_events");
+ }
+
+ @Override
+ public void handleCloudAPI(CloudExchange exchange) throws IOException {
+ var response = new GetLogEventsResponse(events.getEvents());
+ exchange.sendResponse(200, jsonMapper.writeValueAsString(response));
+ }
+
+ private record GetLogEventsResponse(List events) {}
+}
diff --git a/tools/http-test-helper/src/main/java/org/enso/shttp/cloud_mock/PostLogHandler.java b/tools/http-test-helper/src/main/java/org/enso/shttp/cloud_mock/PostLogHandler.java
new file mode 100644
index 000000000000..963c01487752
--- /dev/null
+++ b/tools/http-test-helper/src/main/java/org/enso/shttp/cloud_mock/PostLogHandler.java
@@ -0,0 +1,52 @@
+package org.enso.shttp.cloud_mock;
+
+import com.fasterxml.jackson.databind.JsonNode;
+import com.fasterxml.jackson.databind.ObjectMapper;
+import java.io.IOException;
+import java.time.ZoneId;
+import java.time.ZonedDateTime;
+import org.enso.shttp.HttpMethod;
+
+public class PostLogHandler implements CloudHandler {
+ private final UsersService users;
+ private final EventsService events;
+ private final ObjectMapper jsonMapper = new ObjectMapper();
+
+ public PostLogHandler(UsersService users, EventsService events) {
+ this.users = users;
+ this.events = events;
+ }
+
+ @Override
+ public boolean canHandle(String subPath) {
+ return subPath.equals("logs");
+ }
+
+ @Override
+ public void handleCloudAPI(CloudExchange exchange) throws IOException {
+ if (exchange.getMethod() != HttpMethod.POST) {
+ exchange.sendResponse(405, "Method Not Allowed");
+ return;
+ }
+
+ try {
+ // Delay recording the event to simulate network conditions
+ Thread.sleep(100);
+ } catch (InterruptedException e) {
+ // ignore the interruption
+ }
+
+ JsonNode root = jsonMapper.readTree(exchange.decodeBodyAsText());
+ String message = root.get("message").asText();
+ String organizationId = users.currentUserOrganizationId();
+ String userEmail = users.currentUserEmail();
+ String timestamp = ZonedDateTime.now().withZoneSameInstant(ZoneId.of("UTC")).toString();
+ JsonNode metadata = root.get("metadata");
+ String projectId = root.get("projectId").asText();
+ EventsService.LogEvent event =
+ new EventsService.LogEvent(
+ organizationId, userEmail, timestamp, metadata, message, projectId);
+ events.recordEvent(event);
+ exchange.sendEmptyResponse(204);
+ }
+}
diff --git a/tools/http-test-helper/src/main/java/org/enso/shttp/cloud_mock/UsersHandler.java b/tools/http-test-helper/src/main/java/org/enso/shttp/cloud_mock/UsersHandler.java
index 2f49ebac4b3b..5cef7f0ff6ce 100644
--- a/tools/http-test-helper/src/main/java/org/enso/shttp/cloud_mock/UsersHandler.java
+++ b/tools/http-test-helper/src/main/java/org/enso/shttp/cloud_mock/UsersHandler.java
@@ -5,6 +5,7 @@
public class UsersHandler implements CloudHandler {
private static final String USERS = "users";
+ private final String currentUser;
@Override
public boolean canHandle(String subPath) {
@@ -41,29 +42,33 @@ private void sendUserList(CloudExchange exchange) throws IOException {
exchange.sendResponse(200, response);
}
- private final String currentUser =
- """
- {
- "userId": "user-2Xcxm00p8jWoL2qByTo6tQfciWC",
- "organizationId": "organization-27xJM00p8jWoL2qByTo6tQfciWC",
- "name": "My test User 1",
- "organizationName": "Test.ORG",
- "email": "enso-test-user-1@example.com",
- "isEnabled": true,
- "rootDirectoryId": "directory-27xJM00p8jWoL2qByTo6tQfciWC"
- }
- """;
+ private final String otherUser;
- private final String otherUser =
- """
- {
- "userId": "user-44AAA00A8AAAA2AAAAA6AAAAAAA",
- "organizationId": "organization-27xJM00p8jWoL2qByTo6tQfciWC",
- "name": "My test User 2",
- "organizationName": "Test.ORG",
- "email": "enso-test-user-2@example.com",
- "isEnabled": false,
- "rootDirectoryId": "directory-27xJM00p8jWoL2qByTo6tQfciWC"
- }
- """;
+ public UsersHandler(UsersService usersService) {
+ currentUser =
+ """
+ {
+ "userId": "user-2Xcxm00p8jWoL2qByTo6tQfciWC",
+ "organizationId": "%s",
+ "name": "My test User 1",
+ "organizationName": "Test.ORG",
+ "email": "%s",
+ "isEnabled": true,
+ "rootDirectoryId": "directory-27xJM00p8jWoL2qByTo6tQfciWC"
+ }
+ """
+ .formatted(usersService.currentUserOrganizationId(), usersService.currentUserEmail());
+ otherUser =
+ """
+ {
+ "userId": "user-44AAA00A8AAAA2AAAAA6AAAAAAA",
+ "organizationId": "organization-27xJM00p8jWoL2qByTo6tQfciWC",
+ "name": "My test User 2",
+ "organizationName": "Test.ORG",
+ "email": "enso-test-user-2@example.com",
+ "isEnabled": false,
+ "rootDirectoryId": "directory-27xJM00p8jWoL2qByTo6tQfciWC"
+ }
+ """;
+ }
}
diff --git a/tools/http-test-helper/src/main/java/org/enso/shttp/cloud_mock/UsersService.java b/tools/http-test-helper/src/main/java/org/enso/shttp/cloud_mock/UsersService.java
new file mode 100644
index 000000000000..1400baebe93c
--- /dev/null
+++ b/tools/http-test-helper/src/main/java/org/enso/shttp/cloud_mock/UsersService.java
@@ -0,0 +1,11 @@
+package org.enso.shttp.cloud_mock;
+
+public class UsersService {
+ public String currentUserOrganizationId() {
+ return "organization-27xJM00p8jWoL2qByTo6tQfciWC";
+ }
+
+ public String currentUserEmail() {
+ return "enso-test-user-1@example.com";
+ }
+}