Skip to content

Commit

Permalink
cover depth revision
Browse files Browse the repository at this point in the history
  • Loading branch information
SteveYangFASTNDE committed Apr 24, 2024
1 parent cc24986 commit 25698b7
Show file tree
Hide file tree
Showing 3 changed files with 119 additions and 43 deletions.
Original file line number Diff line number Diff line change
Expand Up @@ -121,7 +121,6 @@ def execute_rebar_mapping(self):
rhf_spm = result_variables.get('rhf_spm')
rhf_sps = result_variables.get('rhf_sps')
rh_nsamp = result_variables.get('rh_nsamp')

IQR_df_1 = gpr_lr.Interquartile_Range(df_1)
IQR_df_1 = IQR_df_1.astype('float64')
data_length_feet = (IQR_df_1.shape[1] / rhf_spm) * 3.28
Expand Down Expand Up @@ -190,7 +189,7 @@ def read_csv(self, directory, index=0):
filepath_config = f"{directory}{'config'}{index}.csv"

df_1 = pd.read_csv(filepath_data, header=None)
df_2 = pd.read_csv(filepath_config)
df_2 = pd.read_csv(filepath_config, index_col=0)

return df_1, df_2

Expand Down
Original file line number Diff line number Diff line change
@@ -0,0 +1,58 @@
import os
from tqdm import tqdm
import urllib.request
import zipfile

def download_data_from_links(file_path, download_dir):
try:
# Check if the download directory exists, if not create it
if not os.path.exists(download_dir):
os.makedirs(download_dir)

with open(file_path, 'r') as file:
lines = file.readlines()[1:] # Read all lines except the first one
for line in lines:
# Strip leading and trailing whitespace
line = line.strip()
# Skip empty lines
if not line:
continue
# Extracting the download link from each line
download_link = line
# Extracting the zone number from the URL
zone_number = download_link.split('=')[-1].split('%20')[-1].split('.')[0]
# Constructing the filename with "GPR Zone"
file_name = f"GPR Zone {zone_number}.zip"
# Downloading the file to the specified directory
with tqdm(unit='B', unit_scale=True, desc=f"Downloading {file_name}") as pbar:
urllib.request.urlretrieve(download_link, os.path.join(download_dir, file_name), reporthook=lambda blocknum, blocksize, totalsize: pbar.update(blocksize))
except FileNotFoundError:
print("File not found.")
except Exception as e:
print(f"An error occurred: {e}")

def unzip_all(directory):
"""
Extracts all contents of zip files in the specified directory.
Parameters:
- directory (str): The directory containing the zip files.
"""
# Iterate through all files in the directory
for filename in os.listdir(directory):
if filename.endswith(".zip"):
# Get the full path of the zip file
filepath = os.path.join(directory, filename)

# Open the zip file
with zipfile.ZipFile(filepath, "r") as zip_ref:
# Use tqdm to show progress
# Set total size of the archive for tqdm to accurately show progress
total_size = sum((file.file_size for file in zip_ref.infolist()))
# Extract all contents to the current directory
with tqdm(total=total_size, unit='B', unit_scale=True, desc=f'Extracting {filename}') as pbar:
for file in zip_ref.infolist():
zip_ref.extract(file, directory)
pbar.update(file.file_size)

print(f"Extracted contents from {filename}")
Original file line number Diff line number Diff line change
Expand Up @@ -3,40 +3,75 @@
{
"cell_type": "code",
"execution_count": null,
"id": "e3ae13be",
"id": "d735d14e",
"metadata": {},
"outputs": [],
"source": [
"import sys\n",
"sys.path.append('C:/directory/path/downloaded_py_files/')\n",
"sys.path.append('C:/download/path/code')\n",
"from tqdm import tqdm\n",
"import warnings\n",
"from RebarCoverDepth import RebarCoverDepth\n",
"from DztToCsvConverter import DztToCsvConverter\n",
"from ContourProcessor import ContourProcessor\n",
"from XMLReader import XMLReader\n",
"import download_unzip as du\n",
"import GPR_plot\n",
"warnings.filterwarnings('ignore')\n",
"warnings.filterwarnings('ignore')"
]
},
{
"cell_type": "code",
"execution_count": null,
"id": "ec377074",
"metadata": {},
"outputs": [],
"source": [
"#Data path\n",
"home_dir = 'C:/download/path/data/'\n",
"\n",
"#Read XML file and create coordinate dataframe\n",
"xml_file = 'C:/directory/path/downloaded_py_files/'\n",
"xml_reader = XMLReader(xml_file)\n",
"df_coord = xml_reader.df_coord\n",
"#Download the GPR data\n",
"txt_file = home_dir + 'download_link.txt'\n",
"du.download_data_from_links(txt_file, home_dir)\n",
"\n",
"#Unzip the GPR data\n",
"du.unzip_all(home_dir)\n",
"\n",
"#Read XML file and create coordinate dataframe\n",
"xml_file = home_dir + '28-000002_2019-05-21_GPR.xml'\n",
"xml_reader = XMLReader(xml_file)\n",
"df_coord = xml_reader.df_coord"
]
},
{
"cell_type": "code",
"execution_count": null,
"id": "ed645ba8",
"metadata": {},
"outputs": [],
"source": [
"# Iterate over zones\n",
"for zone in tqdm(range(1, 5)):\n",
" file_path = f'C:/directory/path/downloaded_py_files/GPR Zone {zone:02d}'\n",
" converter = DztToCsvConverter(df_coord, zone, file_path)\n",
" converter.process_dzt_files()\n",
"\n",
"for zone in tqdm(range(1, 5), desc=\"Converting DZT to CSV\"):\n",
" file_path = home_dir + f'GPR Zone {zone:02d}'\n",
" converter = DztToCsvConverter(df_coord, zone, file_path)\n",
" converter.process_dzt_files()\n",
"\n",
"print(\"Conversion from DZT to CSV completed.\")"
]
},
{
"cell_type": "code",
"execution_count": null,
"id": "e3ae13be",
"metadata": {},
"outputs": [],
"source": [
"#Processing Zone 01\n",
"cover_depth_01 = RebarCoverDepth(df_coord,\n",
" 1, #zone_number\n",
" 670, #chunk_size\n",
" 5, #gpr_lanes\n",
" 'H:/GPR_work/CHARISMA-GPR/Mississipi_bridge/', #home_dir\n",
" home_dir, #home_dir\n",
" 10, #window\n",
" 0, #alpha\n",
" 0, #t0\n",
Expand All @@ -45,11 +80,11 @@
" 0.15, #vmin\n",
" 0.70, #vmax\n",
" 100, #num_clusters\n",
" 0.52, #amplitude_threshold\n",
" 0.53, #amplitude_threshold\n",
" 8, #depth_threshold\n",
" 20, #minimal_y_index\n",
" 1.5, #redundancy_filter\n",
" 0.25) #scaling_factor\n",
" 0.27) #scaling_factor\n",
"\n",
"x_points_list_01, y_points_list_01, z_points_list_01, df_chunk_01, time0ed_01, gained_01, dewowed_01, bgrmed_01, migrated_01, contrasted_01, located_01 = cover_depth_01.execute_rebar_mapping()\n",
"cover_depth_01.plot_combined_zone_contour(x_points_list_01, y_points_list_01, z_points_list_01, cover_depth_01.zone_number)\n",
Expand All @@ -74,7 +109,7 @@
" 2, #zone_number\n",
" 670, #chunk_size\n",
" 4, #gpr_lanes\n",
" 'H:/GPR_work/CHARISMA-GPR/Mississipi_bridge/', #home_dir\n",
" home_dir, #home_dir\n",
" 10, #window\n",
" 0, #alpha\n",
" 0, #t0\n",
Expand All @@ -83,11 +118,11 @@
" 0.15, #vmin\n",
" 0.70, #vmax\n",
" 100, #num_clusters\n",
" 0.52, #amplitude_threshold\n",
" 0.53, #amplitude_threshold\n",
" 8, #depth_threshold\n",
" 20, #minimal_y_index\n",
" 1.5, #redundancy_filter\n",
" 0.25) #scaling_factor\n",
" 0.27) #scaling_factor\n",
"\n",
"x_points_list_02, y_points_list_02, z_points_list_02, df_chunk_02, time0ed_02, gained_02, dewowed_02, bgrmed_02, migrated_02, contrasted_02, located_02 = cover_depth_02.execute_rebar_mapping()\n",
"cover_depth_02.plot_combined_zone_contour(x_points_list_02, y_points_list_02, z_points_list_02, cover_depth_02.zone_number)\n",
Expand All @@ -97,7 +132,7 @@
" 3, #zone_number\n",
" 670, #chunk_size\n",
" 3, #gpr_lanes\n",
" 'H:/GPR_work/CHARISMA-GPR/Mississipi_bridge/', #home_dir\n",
" home_dir, #home_dir\n",
" 10, #window\n",
" 0, #alpha\n",
" 0, #t0\n",
Expand All @@ -106,11 +141,11 @@
" 0.15, #vmin\n",
" 0.70, #vmax\n",
" 100, #num_clusters\n",
" 0.52, #amplitude_threshold\n",
" 0.53, #amplitude_threshold\n",
" 8, #depth_threshold\n",
" 20, #minimal_y_index\n",
" 1.5, #redundancy_filter\n",
" 0.25) #scaling_factor\n",
" 0.27) #scaling_factor\n",
"\n",
"x_points_list_03, y_points_list_03, z_points_list_03, df_chunk_03, time0ed_03, gained_03, dewowed_03, bgrmed_03, migrated_03, contrasted_03, located_03 = cover_depth_03.execute_rebar_mapping()\n",
"cover_depth_03.plot_combined_zone_contour(x_points_list_03, y_points_list_03, z_points_list_03, cover_depth_03.zone_number)\n",
Expand All @@ -120,7 +155,7 @@
" 4, #zone_number\n",
" 670, #chunk_size\n",
" 3, #gpr_lanes\n",
" 'H:/GPR_work/CHARISMA-GPR/Mississipi_bridge/', #home_dir\n",
" home_dir, #home_dir\n",
" 10, #window\n",
" 0, #alpha\n",
" 0, #t0\n",
Expand All @@ -129,11 +164,11 @@
" 0.15, #vmin\n",
" 0.70, #vmax\n",
" 100, #num_clusters\n",
" 0.52, #amplitude_threshold\n",
" 0.53, #amplitude_threshold\n",
" 8, #depth_threshold\n",
" 20, #minimal_y_index\n",
" 1.5, #redundancy_filter\n",
" 0.25) #scaling_factor\n",
" 0.27) #scaling_factor\n",
"\n",
"x_points_list_04, y_points_list_04, z_points_list_04, df_chunk_04, time0ed_04, gained_04, dewowed_04, bgrmed_04, migrated_04, contrasted_04, located_04 = cover_depth_04.execute_rebar_mapping()\n",
"cover_depth_04.plot_combined_zone_contour(x_points_list_04, y_points_list_04, z_points_list_04, cover_depth_04.zone_number)\n",
Expand All @@ -150,22 +185,6 @@
"processor.contour_scan_area_only(lists)\n",
"processor.contour_interpolate_entire(lists)"
]
},
{
"cell_type": "code",
"execution_count": null,
"id": "f1e39b1e",
"metadata": {},
"outputs": [],
"source": []
},
{
"cell_type": "code",
"execution_count": null,
"id": "6f38061e",
"metadata": {},
"outputs": [],
"source": []
}
],
"metadata": {
Expand All @@ -184,7 +203,7 @@
"name": "python",
"nbconvert_exporter": "python",
"pygments_lexer": "ipython3",
"version": "3.11.3"
"version": "3.9.18"
}
},
"nbformat": 4,
Expand Down

0 comments on commit 25698b7

Please sign in to comment.