Skip to content

Commit

Permalink
Merge pull request #5128 from debrief/develop
Browse files Browse the repository at this point in the history
Update, for release
  • Loading branch information
IanMayo committed Aug 11, 2021
2 parents fd2fe45 + 012fda9 commit ae1fd68
Show file tree
Hide file tree
Showing 29 changed files with 1,242 additions and 913 deletions.
2 changes: 1 addition & 1 deletion org.mwc.cmap.combined.feature/feature.xml
Expand Up @@ -17,7 +17,7 @@
<feature
id="org.mwc.cmap.combined.feature"
label="%featureName"
version="1.1.22"
version="1.1.23"
provider-name="MWC"
plugin="org.mwc.cmap.core">

Expand Down
2 changes: 1 addition & 1 deletion org.mwc.cmap.combined.feature/pom.xml
Expand Up @@ -24,6 +24,6 @@
</parent>
<groupId>org.mwc.debrief</groupId>
<artifactId>org.mwc.cmap.combined.feature</artifactId>
<version>1.1.22</version>
<version>1.1.23</version>
<packaging>eclipse-feature</packaging>
</project>
2 changes: 1 addition & 1 deletion org.mwc.cmap.grideditor/META-INF/MANIFEST.MF
Expand Up @@ -2,7 +2,7 @@ Manifest-Version: 1.0
Bundle-ManifestVersion: 2
Bundle-Name: Gridded data editor
Bundle-SymbolicName: org.mwc.cmap.grideditor;singleton:=true
Bundle-Version: 1.0.29
Bundle-Version: 1.0.30
Bundle-Vendor: MWC
Bundle-RequiredExecutionEnvironment: JavaSE-11
Bundle-ActivationPolicy: lazy
Expand Down
2 changes: 1 addition & 1 deletion org.mwc.cmap.grideditor/pom.xml
Expand Up @@ -24,6 +24,6 @@
</parent>
<groupId>org.mwc.debrief</groupId>
<artifactId>org.mwc.cmap.grideditor</artifactId>
<version>1.0.29</version>
<version>1.0.30</version>
<packaging>eclipse-plugin</packaging>
</project>
9 changes: 1 addition & 8 deletions org.mwc.debrief.combined.feature/feature.xml
Expand Up @@ -16,7 +16,7 @@
<feature
id="org.mwc.debrief.combined.feature"
label="%featureName"
version="1.1.30"
version="1.1.31"
provider-name="MWC"
plugin="org.mwc.debrief.core">

Expand Down Expand Up @@ -491,13 +491,6 @@ Library.
version="0.0.0"
unpack="false"/>

<plugin
id="org.eclipse.ease.lang.javascript.rhino.debugger"
download-size="0"
install-size="0"
version="0.0.0"
unpack="false"/>

<plugin
id="org.eclipse.ease.lang.javascript.ui"
download-size="0"
Expand Down
2 changes: 1 addition & 1 deletion org.mwc.debrief.combined.feature/pom.xml
Expand Up @@ -24,6 +24,6 @@
</parent>
<groupId>org.mwc.debrief</groupId>
<artifactId>org.mwc.debrief.combined.feature</artifactId>
<version>1.1.30</version>
<version>1.1.31</version>
<packaging>eclipse-feature</packaging>
</project>
2 changes: 1 addition & 1 deletion org.mwc.debrief.core/META-INF/MANIFEST.MF
Expand Up @@ -2,7 +2,7 @@ Manifest-Version: 1.0
Bundle-ManifestVersion: 2
Bundle-Name: Debrief Core
Bundle-SymbolicName: org.mwc.debrief.core; singleton:=true
Bundle-Version: 1.0.402
Bundle-Version: 1.0.403
Bundle-ClassPath: debrief_core.jar,
libs/jdom-1.0.jar,
libs/eclipselink.jar
Expand Down
6 changes: 3 additions & 3 deletions org.mwc.debrief.core/about.mappings
Expand Up @@ -2,6 +2,6 @@
# contains fill-ins for about.properties
# java.io.Properties file (ISO 8859-1 with "\" escapes)
# This file does not need to be translated.
0=3.1.31
1=20210520
2=2021-05-20
0=3.1.32
1=20210811
2=2021-08-11
2 changes: 1 addition & 1 deletion org.mwc.debrief.core/pom.xml
Expand Up @@ -23,7 +23,7 @@
<version>0.0.1-SNAPSHOT</version>
</parent>
<artifactId>org.mwc.debrief.core</artifactId>
<version>1.0.402</version>
<version>1.0.403</version>
<packaging>eclipse-plugin</packaging>


Expand Down
2 changes: 1 addition & 1 deletion org.mwc.debrief.legacy/META-INF/MANIFEST.MF
Expand Up @@ -2,7 +2,7 @@ Manifest-Version: 1.0
Bundle-ManifestVersion: 2
Bundle-Name: Legacy Plug-in
Bundle-SymbolicName: org.mwc.debrief.legacy;singleton:=true
Bundle-Version: 1.0.445
Bundle-Version: 1.0.446
Bundle-ClassPath: debrief_legacy.jar,
libs/pdfbox-2.0.3.jar,
libs/commons-logging-1.2.jar,
Expand Down
2 changes: 1 addition & 1 deletion org.mwc.debrief.legacy/pom.xml
Expand Up @@ -24,7 +24,7 @@
</parent>
<groupId>org.mwc.debrief</groupId>
<artifactId>org.mwc.debrief.legacy</artifactId>
<version>1.0.445</version>
<version>1.0.446</version>
<packaging>eclipse-plugin</packaging>
<build>
<plugins>
Expand Down
2 changes: 1 addition & 1 deletion org.mwc.debrief.pepys/META-INF/MANIFEST.MF
Expand Up @@ -2,7 +2,7 @@ Manifest-Version: 1.0
Bundle-ManifestVersion: 2
Bundle-Name: Pepys
Bundle-SymbolicName: org.mwc.debrief.pepys;singleton:=true
Bundle-Version: 1.0.12
Bundle-Version: 1.0.13
Automatic-Module-Name: org.mwc.debref.pepys
Bundle-RequiredExecutionEnvironment: JavaSE-11
Bundle-ClassPath: libs/c3p0-0.9.5.5.jar,
Expand Down
84 changes: 55 additions & 29 deletions org.mwc.debrief.pepys/comments.sql
@@ -1,45 +1,71 @@
with
ui_filter_input as
(select
? as start_time, --Input should be same as for Phase 1
? as end_time, --Input should be same as for Phase 1
? as comment_search_string, --Input should be same as for Phase 1
? as sensor_id, --Input from Phase 2 of import, can be set as null: null as sensor_id
? as source_id, --Input from Phase 2 of import, can be set as null: null as source_id
? as platform_id, --Input from Phase 2 of import, can be set as null: null as platform_id
? start_time, --Input should be same as for Phase 1
? end_time, --Input should be same as for Phase 1
? comment_search_string, --Input should be same as for Phase 1
?::text[] source_id, --Input from Phase 2 of import, can be set as null: null as source_id
?::text[] platform_id, --Input from Phase 2 of import, can be set as null: null as platform_id
--null as platform_id, --Example on how to provide null
1 as page_no, --Pagination input. Page No For ex. if there are 1000 records paginated into pages of 100 records each, 1 here will return the first page or first 100 records
1000000 as page_size --Pagination input - No. of records per page
1::integer page_no, --Pagination input. Page No For ex. if there are 1000 records paginated into pages of 100 records each, 1 here will return the first page or first 100 records
1000000::integer page_size --Pagination input - No. of records per page
),
processed_ui_filter_values as
(select
case when (trim(ui_input.start_time)='' OR ui_input.start_time is null) then '1000-01-01 00:00:00.000000'::timestamp else to_timestamp(ui_input.start_time, 'YYYY-MM-DD HH24:MI:SS.US') end as start_time,
case when (trim(ui_input.end_time)='' OR ui_input.end_time is null) then '9999-12-12 23:59:59.000000'::timestamp else to_timestamp(ui_input.end_time, 'YYYY-MM-DD HH24:MI:SS.US') end as end_time,
case when (trim(ui_input.comment_search_string)='' OR ui_input.comment_search_string is null) then null::varchar else '%'||upper(ui_input.comment_search_string)||'%' end as comment_search_string,
case when (trim(ui_input.sensor_id)='' OR ui_input.sensor_id is null) then null else string_to_array(ui_input.sensor_id,',') end as sensor_id,
case when (trim(ui_input.source_id)='' OR ui_input.source_id is null) then null else string_to_array(ui_input.source_id,',') end as source_id,
case when (trim(ui_input.platform_id)='' OR ui_input.platform_id is null) then null else string_to_array(ui_input.platform_id,',') end as platform_id,
case when (ui_input.page_no is null OR ui_input.page_no <=0) then 1 else ui_input.page_no end as page_no,
case when (ui_input.page_size is null OR ui_input.page_size <=0) then 100 else ui_input.page_size end as page_size
case when (coalesce(array_length(ui_input.source_id,1),0)::int = 0) then null else ui_input.source_id end as source_id,
case when (coalesce(array_length(ui_input.platform_id,1),0)::int = 0) then null else ui_input.platform_id end as platform_id
from
ui_filter_input as ui_input
)
select filtered_comments.comment_id, filtered_comments.time, Platforms.name as platform_name,
PlatformTypes.name as platform_type_name, Nationalities.name as nationalities_name,
filtered_comments.content, CommentTypes.name as comment_type_name from
pepys."Comments" as filtered_comments inner join
pepys."Platforms" as Platforms on filtered_comments.platform_id=Platforms.platform_id inner join
pepys."PlatformTypes" as PlatformTypes on Platforms.platform_type_id = PlatformTypes.platform_type_id inner join
pepys."Nationalities" as Nationalities on Platforms.nationality_id = Nationalities.nationality_id inner join
pepys."CommentTypes" as CommentTypes on filtered_comments.comment_type_id = CommentTypes.comment_type_id
WHERE
),
filtered_comments as
(
select
com.comment_id,
com.time,
com.content,
com.source_id,
com.platform_id,
com.comment_type_id
from
pepys."Comments" com
where
--Start and End Time criteria from the UI
tsrange((select start_time::timestamp from processed_ui_filter_values), (select end_time::timestamp from processed_ui_filter_values), '[]') @> filtered_comments.time AND
tsrange((select start_time::timestamp from processed_ui_filter_values), (select end_time::timestamp from processed_ui_filter_values), '[]') @> com.time AND
--Comment search criteria from the UI
((select comment_search_string from processed_ui_filter_values) is null OR upper(filtered_comments.content) like (select comment_search_string from processed_ui_filter_values)) AND
((select comment_search_string from processed_ui_filter_values) is null OR upper(com.content) like (select comment_search_string from processed_ui_filter_values)) AND
--Source criteria from the UI
((select source_id from processed_ui_filter_values) is null OR filtered_comments.source_id in (select unnest(source_id::uuid[]) from processed_ui_filter_values)) AND
((select source_id from processed_ui_filter_values) is null OR com.source_id in (select unnest(source_id::uuid[]) from processed_ui_filter_values)) AND
--Platform criteria from the UI
((select platform_id from processed_ui_filter_values) is null OR filtered_comments.platform_id in (select unnest(platform_id::uuid[]) from processed_ui_filter_values))
--Sort clause for pagination
order by filtered_comments.comment_id asc limit (select page_size from processed_ui_filter_values) offset (select page_size*(page_no -1) from processed_ui_filter_values);
((select platform_id from processed_ui_filter_values) is null OR com.platform_id in (select unnest(platform_id::uuid[]) from processed_ui_filter_values))
),
filtered_limits as
(select
case when (ui_input.page_no = -1 OR ui_input.page_size = -1) then 1 else ui_input.page_no end as page_no,
case when (ui_input.page_no = -1 OR ui_input.page_size = -1) then (select count(1) from filtered_comments) else ui_input.page_size end as page_size
from
ui_filter_input as ui_input
)
select
filtered_comments.comment_id,
filtered_comments.time,
Platforms.name as platform_name,
PlatformTypes.name as platform_type_name,
Nationalities.name as nationalities_name,
filtered_comments.content,
CommentTypes.name as comment_type_name,
Datafiles.reference
from
filtered_comments inner join
pepys."Datafiles" as Datafiles on Datafiles.datafile_id=filtered_comments.source_id inner join
pepys."Platforms" as Platforms on filtered_comments.platform_id=Platforms.platform_id inner join
pepys."PlatformTypes" as PlatformTypes on Platforms.platform_type_id = PlatformTypes.platform_type_id inner join
pepys."Nationalities" as Nationalities on Platforms.nationality_id = Nationalities.nationality_id inner join
pepys."CommentTypes" as CommentTypes on filtered_comments.comment_type_id = CommentTypes.comment_type_id
--Sort clause for pagination
order by
filtered_comments.comment_id asc
limit (select page_size from filtered_limits)
offset (select page_size*(page_no -1) from filtered_limits);
1 change: 0 additions & 1 deletion org.mwc.debrief.pepys/commentsProc.sql
Expand Up @@ -2,7 +2,6 @@ with parameters as
(select ? as start_time,
? as end_time,
? as comment_search_string,
? as sensor_id,
? as source_id,
? as platform_id)

Expand Down
125 changes: 94 additions & 31 deletions org.mwc.debrief.pepys/contacts.sql
@@ -1,48 +1,111 @@
with
ui_filter_input as
ui_filter_input as
(select
? as start_time, --Input should be same as for Phase 1
? as end_time, --Input should be same as for Phase 1
? as location, --Input should be same as for Phase 1
? as sensor_id, --Input from Phase 2 of import, can be set as null: null as sensor_id
? as source_id, --Input from Phase 2 of import, can be set as null: null as source_id
? as platform_id, --Input from Phase 2 of import, can be set as null: null as platform_id
? start_time, --Input should be same as for Phase 1
? end_time, --Input should be same as for Phase 1
? "location", --Input should be same as for Phase 1
?::text[] sensor_id, --Input from Phase 2 of import, can be set as null: null as sensor_id
?::text[] source_id, --Input from Phase 2 of import, can be set as null: null as source_id
?::text[] platform_id, --Input from Phase 2 of import, can be set as null: null as platform_id
--null as platform_id, --Example on how to provide null
1 as page_no, --Pagination input. Page No For ex. if there are 1000 records paginated into pages of 100 records each, 1 here will return the first page or first 100 records
1000000 as page_size --Pagination input - No. of records per page
1::integer page_no, --Pagination input. Page No For ex. if there are 1000 records paginated into pages of 100 records each, 1 here will return the first page or first 100 records
1000000::integer page_size --Pagination input - No. of records per page
),
processed_ui_filter_values as
(select
case when (trim(ui_input.start_time)='' OR ui_input.start_time is null) then '1000-01-01 00:00:00.000000'::timestamp else to_timestamp(ui_input.start_time, 'YYYY-MM-DD HH24:MI:SS.US') end as start_time,
case when (trim(ui_input.end_time)='' OR ui_input.end_time is null) then '9999-12-12 23:59:59.000000'::timestamp else to_timestamp(ui_input.end_time, 'YYYY-MM-DD HH24:MI:SS.US') end as end_time,
case when (trim(ui_input.location)='' OR ui_input.location is null) then null else ST_GeomFromText(ui_input.location) end as location,
case when (trim(ui_input.sensor_id)='' OR ui_input.sensor_id is null) then null else string_to_array(ui_input.sensor_id,',') end as sensor_id,
case when (trim(ui_input.source_id)='' OR ui_input.source_id is null) then null else string_to_array(ui_input.source_id,',') end as source_id,
case when (trim(ui_input.platform_id)='' OR ui_input.platform_id is null) then null else string_to_array(ui_input.platform_id,',') end as platform_id,
case when (coalesce(array_length(ui_input.sensor_id,1),0)::int = 0) then null else ui_input.sensor_id end as sensor_id,
case when (coalesce(array_length(ui_input.source_id,1),0)::int = 0) then null else ui_input.source_id end as source_id,
case when (coalesce(array_length(ui_input.platform_id,1),0)::int = 0) then null else ui_input.platform_id end as platform_id,
case when (ui_input.page_no is null OR ui_input.page_no <=0) then 1 else ui_input.page_no end as page_no,
case when (ui_input.page_size is null OR ui_input.page_size <=0) then 100 else ui_input.page_size end as page_size
from
ui_filter_input as ui_input
)
select filtered_contacts.contact_id, filtered_contacts.time, Sensors.name as sensor_name, Platforms.name as platform_name,
PlatformTypes.name as platform_type_name, Nationalities.name as nationality_name,
filtered_contacts.bearing, filtered_contacts.range, filtered_contacts.location, Datafiles.reference from
pepys."Contacts" as filtered_contacts inner join
pepys."Sensors" as Sensors on filtered_contacts.sensor_id = Sensors.sensor_id inner join
pepys."Platforms" as Platforms on Sensors.host=Platforms.platform_id inner join
pepys."PlatformTypes" as PlatformTypes on Platforms.platform_type_id = PlatformTypes.platform_type_id inner join
pepys."Nationalities" as Nationalities on Platforms.nationality_id = Nationalities.nationality_id inner join
pepys."Datafiles" as Datafiles on filtered_contacts.source_id = Datafiles.datafile_id
WHERE
),
selected_sensors as
(select
sensor_id,
name,
host
from
pepys."Sensors" sen
where
--Platform criteria from the UI
((select platform_id from processed_ui_filter_values) is null OR sen.host in (select unnest(platform_id::uuid[]) from processed_ui_filter_values)) AND
--Sensor criteria from the UI
((select sensor_id from processed_ui_filter_values) is null OR sen.sensor_id in (select unnest(sensor_id::uuid[]) from processed_ui_filter_values))
),
filtered_sensors as
(select
sen.sensor_id,
sen.name sensor_name,
plat.name platform_name,
platty.name platformtype_name,
nat.name nationality_name
from
selected_sensors as sen inner join
pepys."Platforms" as plat on sen.host=plat.platform_id inner join
pepys."PlatformTypes" as platty on plat.platform_type_id = platty.platform_type_id inner join
pepys."Nationalities" as nat on plat.nationality_id = nat.nationality_id
),
filtered_datafiles as
(select
datafile_id,
reference
from
pepys."Datafiles" dat
where
--Source criteria from the UI
((select source_id from processed_ui_filter_values) is null OR dat.datafile_id in (select unnest(source_id::uuid[]) from processed_ui_filter_values))
),
filtered_contacts as
(
select
con.contact_id,
con.time,
con.bearing,
con.range,
con.location,
con.source_id,
con.sensor_id
from
pepys."Contacts" con
where
--Start and End Time criteria from the UI
tsrange((select start_time::timestamp from processed_ui_filter_values), (select end_time::timestamp from processed_ui_filter_values), '[]') @> filtered_contacts.time AND
tsrange((select start_time::timestamp from processed_ui_filter_values), (select end_time::timestamp from processed_ui_filter_values), '[]') @> con.time AND
--Spatial criteria from the UI
((select location from processed_ui_filter_values) is null OR ST_Contains((select location from processed_ui_filter_values),filtered_contacts.location)) AND
((select location from processed_ui_filter_values) is null OR ST_Contains((select location from processed_ui_filter_values),con.location)) AND
--Sensor criteria from the UI
((select sensor_id from processed_ui_filter_values) is null OR filtered_contacts.sensor_id in (select unnest(sensor_id::uuid[]) from processed_ui_filter_values)) AND
((select sensor_id from processed_ui_filter_values) is null OR con.sensor_id in (select unnest(sensor_id::uuid[]) from processed_ui_filter_values)) AND
--Source criteria from the UI
((select source_id from processed_ui_filter_values) is null OR filtered_contacts.source_id in (select unnest(source_id::uuid[]) from processed_ui_filter_values)) AND
--Platform criteria from the UI
((select platform_id from processed_ui_filter_values) is null OR Platforms.platform_id in (select unnest(platform_id::uuid[]) from processed_ui_filter_values))
--Sort clause for pagination
order by filtered_contacts.contact_id asc limit (select page_size from processed_ui_filter_values) offset (select page_size*(page_no -1) from processed_ui_filter_values);
((select source_id from processed_ui_filter_values) is null OR con.source_id in (select unnest(source_id::uuid[]) from processed_ui_filter_values))
),
filtered_limits as
(select
case when (ui_input.page_no = -1 OR ui_input.page_size = -1) then 1 else ui_input.page_no end as page_no,
case when (ui_input.page_no = -1 OR ui_input.page_size = -1) then (select count(1) from filtered_contacts) else ui_input.page_size end as page_size
from
ui_filter_input as ui_input
)
select
filtered_contacts.contact_id,
filtered_contacts.time,
filtered_sensors.sensor_name,
filtered_sensors.platform_name,
filtered_sensors.platformtype_name,
filtered_sensors.nationality_name,
filtered_contacts.bearing,
filtered_contacts.range,
filtered_contacts.location,
filtered_datafiles.reference
from
filtered_contacts inner join
filtered_datafiles on filtered_contacts.source_id=filtered_datafiles.datafile_id inner join
filtered_sensors on filtered_contacts.sensor_id = filtered_sensors.sensor_id
--Sort clause for pagination
order by
filtered_contacts.contact_id asc
limit (select page_size from filtered_limits)
offset (select page_size*(page_no -1) from filtered_limits);
2 changes: 1 addition & 1 deletion org.mwc.debrief.pepys/contactsProc.sql
@@ -1 +1 @@
select contact_id, contact_time as time, sensor_name, platform_name, platformtype_name as platform_type_name, nationality_name, bearing, range, contact_location as location, reference, null as name from pepys.contacts_for (?, ?, ?, ?, ?, ?)
select contact_id, contact_time as time, sensor_name, platform_name, platformtype_name, nationality_name, bearing, range, contact_location as location, reference from pepys.contacts_for (?, ?, ?, ?, ?, ?)

0 comments on commit ae1fd68

Please sign in to comment.