Skip to content

Commit

Permalink
Merge pull request #23 from xspanger3770/develop
Browse files Browse the repository at this point in the history
Dev merge from upstream
  • Loading branch information
DecryptingElectrons committed Apr 24, 2024
2 parents df8d5e8 + b980a36 commit e89c51e
Show file tree
Hide file tree
Showing 220 changed files with 6,333 additions and 5,132 deletions.
8 changes: 8 additions & 0 deletions .construction_tools/revised_EMSC_region_data/access_note.txt
Original file line number Diff line number Diff line change
Expand Up @@ -15,3 +15,11 @@ Several European country borders have been sourced from UNHCR.
HUMDATA is the Humanitarian data Exchange which is provided by, OCHA Regional Office for Asia and the Pacific.

Japan prefectures have been added from HUMDATA

Turkey regions were accessed from the Overpass API, with this query:
https://overpass-turbo.eu/s/1I7F

[out:json][timeout:25];
{{geocodeArea:Turkey}}->.searchArea;
nwr["admin_level"="3"](area.searchArea);
out geom;
3 changes: 2 additions & 1 deletion .construction_tools/revised_EMSC_region_data/attr_IDs.json
Original file line number Diff line number Diff line change
Expand Up @@ -2,5 +2,6 @@
"https://datafinder.stats.govt.nz/layer/111193-territorial-authority-2023-clipped-generalised/":"NZNZ",
"https://www.istat.it/it/archivio/222527": "ITIT",
"https://im.unhcr.org/geoservices/export/index.html":"UNHC",
"https://data.humdata.org/dataset/cod-ab-jpn?": "HUJP"
"https://data.humdata.org/dataset/cod-ab-jpn?": "HUJP",
"https://overpass-turbo.eu/s/1I7F": "OOTU"
}
198 changes: 198 additions & 0 deletions .construction_tools/revised_EMSC_region_data/dataset_viewer.html
Original file line number Diff line number Diff line change
@@ -0,0 +1,198 @@
<!DOCTYPE html>
<html>
<head>
<script src="https://unpkg.com/leaflet@1.3.1/dist/leaflet.js"></script>
<link rel="stylesheet" href="https://unpkg.com/leaflet@1.3.1/dist/leaflet.css" />
</head>

<style>
body {
margin: 0;
padding: 0;
}

#mapid {
position: absolute;
top: 0;
bottom: 0;
right: 0;
left: 0;
z-index: 10;
}

#loadButton {
position: absolute;
top: 10px;
right: 10px;

padding: 8px;

font-size: large;
height: fit-content;
width: fit-content;

border-radius: 8px;

background-color: blue;

z-index: 1001;
}

.dragOver {
background-color: black;
opacity: 0.5;
}


</style>

<body>
<div id="map" style="width: 100%; height: 100vh;"></div>
<input type="file" id="fileInput" onchange="loadFileFromUserInput(event)" accept=".geojson" style="display: none;">
<button id="loadButton" onclick="loadButtonClicked()">Load GeoJSON<br>or drag and drop</button>

</body>

<script>
function preventDefaults (e) {
e.preventDefault()
e.stopPropagation()
}
// Prevent default drag behaviors
;['dragenter', 'dragover', 'dragleave', 'drop'].forEach(eventName => {
document.body.addEventListener(eventName, preventDefaults, false)
})

function loadButtonClicked() {
loadingDialog(true);
document.getElementById('fileInput').click();
}

function loadingDialog(show){
let button = document.getElementById('loadButton');
if(show){
button.innerText = 'Loading...';
button.style.color = 'gray';
}
else{
button.innerHTML = 'Load GeoJSON<br>or drag and drop';
button.style.color = 'white';
}

button.disabled = show;
}

function dragOverEventHandler(e) {
let dz = document.getElementById('loadButton');
dz.classList.add('dragOver');
}

function dragLeaveEventHandler(e) {
let dz = document.getElementById('loadButton');
dz.classList.remove('dragOver');
}

// Handle dropped files
function handleDrop(e) {
loadingDialog(true);
dragLeaveEventHandler(e);
let files = e.dataTransfer.files;
if(! files.length) {
return
}

for (let i = 0; i < files.length; i++) {
loadFileAndShow(files[i]);
}
}

document.getElementById('loadButton').addEventListener('dragenter', dragOverEventHandler, false)

document.getElementById('loadButton').addEventListener('dragleave', dragLeaveEventHandler, false)

document.getElementById('loadButton').addEventListener('drop', handleDrop, false)

document.getElementById("fileInput").addEventListener("cancel", function() {
loadingDialog(false);
});

function getRandomColor() {
let letters = '0123456789ABCDEF';
let color = '#';
for (var i = 0; i < 6; i++) {
color += letters[Math.floor(Math.random() * 16)];
}
return color;
}

function onEachFeature(feature, layer) {
// Create popup content from feature properties
let popupContent = "";
for (let property in feature.properties) {
popupContent += property + ": " + feature.properties[property] + "<br>";
}
layer.bindPopup(popupContent);
}

function styleFeature(feature) {
return {
fillColor: getRandomColor(),
fillOpacity: 0.8,
weight: 2
};
}

function centerMapToFeatures() {
let layers = map._layers;
let bounds = null;

for (let layer in layers) {
if (layers[layer]._latlngs) {
if (!bounds) {
bounds = layers[layer].getBounds();
} else {
bounds.extend(layers[layer].getBounds());
}
}
}

if (bounds) {
map.fitBounds(bounds);
}
}

function loadFileFromUserInput(event) {
loadingDialog(true);
let file = event.target.files[0];

if (!file) {
loadingDialog(false);
return;
}

loadFileAndShow(file);
}

function loadFileAndShow(file){
let reader = new FileReader();
reader.onload = function(e) {
let data = JSON.parse(e.target.result);
L.geoJSON(data, {
style: styleFeature, // Apply random color style
onEachFeature: onEachFeature // Handle each feature
}).addTo(map);
centerMapToFeatures();
loadingDialog(false);
};
reader.readAsText(file);
}

// Create Leaflet map
var map = L.map('map').setView([51.505, -0.09], 5);

// Add base tiles (e.g., OpenStreetMap)
L.tileLayer('https://{s}.tile.openstreetmap.org/{z}/{x}/{y}.png', {
attribution: '&copy; <a href="https://www.openstreetmap.org/copyright">OpenStreetMap</a> contributors'
}).addTo(map);
</script>
</html>
26 changes: 23 additions & 3 deletions GQHypocenterSearch/CMakeLists.txt
Original file line number Diff line number Diff line change
@@ -1,6 +1,25 @@
cmake_minimum_required(VERSION 3.10)
project(GQHypocenterSearch)
set(CUDA_NVCC_FLAGS "${CUDA_NVCC_FLAGS} $ENV{CUDA_NVCC_FLAGS} -lineinfo -g")

# Define default values for BLOCK_HYPOCS and TILE
if(NOT DEFINED BLOCK_HYPOCS)
set(BLOCK_HYPOCS_DEFAULT 512)
set(BLOCK_HYPOCS ${BLOCK_HYPOCS_DEFAULT})
endif()

if(NOT DEFINED TILE)
set(TILE_DEFAULT 5)
set(TILE ${TILE_DEFAULT})
endif()

if(NOT DEFINED TESTS)
set(TESTS_DEFAULT 16)
set(TESTS ${TESTS_DEFAULT})
endif()

# Add definitions for CUDA_NVCC_FLAGS based on BLOCK_HYPOCS and TILE
set(CUDA_NVCC_FLAGS "${CUDA_NVCC_FLAGS} $ENV{CUDA_NVCC_FLAGS} --use_fast_math -DTESTS=${TESTS} -DBLOCK_HYPOCS=${BLOCK_HYPOCS} -DTILE=${TILE}")
set(CMAKE_CXX_FLAGS "${CMAKE_CXX_FLAGS} -DBLOCK_HYPOCS=${BLOCK_HYPOCS} -DTESTS=${TESTS} -DTILE=${TILE}")

# Find CUDA package
find_package(CUDA REQUIRED)
Expand All @@ -25,8 +44,9 @@ foreach(arch ${CUDA_ARCH_LIST})
set(CUDA_NVCC_FLAGS_${arch} "-gencode arch=compute_${arch},code=sm_${arch}")
endforeach()

include_directories(/usr/lib/jvm/java-17-openjdk-amd64/include/)
include_directories(/usr/lib/jvm/java-17-openjdk-amd64/include/linux)
# Find JNI (Java Native Interface)
find_package(JNI REQUIRED)
include_directories(${JNI_INCLUDE_DIRS})

# Compile CUDA sources into a shared library
cuda_add_library(gq_hypocs SHARED ${SRC_FILES})
Expand Down
53 changes: 53 additions & 0 deletions GQHypocenterSearch/autotune.sh
Original file line number Diff line number Diff line change
@@ -0,0 +1,53 @@
#!/bin/bash

# Specify the values of BLOCK_HYPOCS and TILE
BLOCK_HYPOCS_VALUES=(256 384 512 768)
TILE_VALUES=(4 5 6 7 8 9 10 11 12 13 14)

#BLOCK_HYPOCS_VALUES=(32)
#TILE_VALUES=(5)

DEFAULT_TESTS=12
TESTS=${1:-$DEFAULT_TESTS}

filename="autotune_results.csv"

# Remove the existing file if it exists
rm -f "$filename"
touch "$filename"
echo "BLOCK_HYPOCS,TILE,Best_PPS">"$filename"

# Create the build directory if it doesn't exist
mkdir -p build

# CD into the build directory
cd build

# Loop over BLOCK_HYPOCS values
for BLOCK_HYPOCS in "${BLOCK_HYPOCS_VALUES[@]}"; do
# Loop over TILE values
for TILE in "${TILE_VALUES[@]}"; do
# Run cmake with specified BLOCK_HYPOCS and TILE values
cmake -DBLOCK_HYPOCS=$BLOCK_HYPOCS -DTILE=$TILE -DTESTS=$TESTS ..

# Compile the project using make
make

# Optionally, run your project here if needed
./bin/gq_test

# Optionally, clean the build directory after each iteration
# make clean
done
done

cd ..

highest_value=$(awk -F';' 'NR > 1 {print $3}' "$filename" | sort -n -r | head -n 1)

# Use grep to find the entire row with the highest value in the third column
row=$(grep "$highest_value" "$filename")

echo "Best solution:"
echo "$row"
echo "$row">best_solution.txt
2 changes: 2 additions & 0 deletions GQHypocenterSearch/src/globalquake.hpp
Original file line number Diff line number Diff line change
Expand Up @@ -22,4 +22,6 @@ bool run_hypocenter_search(float *stations,

bool init_depth_profiles(float *resols, int count);

size_t get_total_allocation_size(size_t points, size_t station_count, float depth_resolution);

#endif // _GLOBALQUAKE_H

0 comments on commit e89c51e

Please sign in to comment.