diff --git a/.DS_Store b/.DS_Store index fb3ddd8..6bed6f5 100644 Binary files a/.DS_Store and b/.DS_Store differ diff --git a/.idea/workspace.xml b/.idea/workspace.xml new file mode 100644 index 0000000..cc9a6b7 --- /dev/null +++ b/.idea/workspace.xml @@ -0,0 +1,28 @@ + + + + + + + + + + + + + + { + "keyToString": { + "node.js.selected.package.tslint": "(autodetect)" + } +} + + + + \ No newline at end of file diff --git a/NAISRicon.svg b/NAISRicon.svg new file mode 100644 index 0000000..081677d --- /dev/null +++ b/NAISRicon.svg @@ -0,0 +1 @@ + \ No newline at end of file diff --git a/README.md b/README.md index b2746c4..32eccf3 100644 --- a/README.md +++ b/README.md @@ -1,5 +1,215 @@ -# NAISR -NAISR: A 3D Neural Additive Model for Interpretable Shape Representation +# NAISR: A 3D Neural Additive Model for Interpretable Shape Representation +Pytorch implementation for our `NAISR` paper
+[NAISR: A 3D Neural Additive Model for Interpretable Shape Representation](https://arxiv.org/abs/2303.09234), ICLR 2024 **Spotlight**.
+Yining Jiao, Carlton Zdanski, Julia Kimbell, Andrew Prince, Cameron Worden, Samuel Kirse, Christopher Rutter, Benjamin Shields, William Dunn, Jisan Mahmud, Marc Niethammer.
+UNC-Chapel Hill -Codes on the way... +#### [Paper](https://arxiv.org/abs/2303.09234) | [Project Page](https://uncbiag.github.io/NAISR/) | [Colab Demos](https://colab.research.google.com/drive/1OudGynEydIXpAgfA9lvi5d0L86kK-uwg?usp=sharing) + + +
+ +Please cite as: +``` +@inproceedings{ +jiao2024naisr, +title={\texttt{NAISR}: A 3D Neural Additive Model for Interpretable Shape Representation}, +author={Yining Jiao and Carlton Jude ZDANSKI and Julia S Kimbell and Andrew Prince and Cameron P Worden and Samuel Kirse and Christopher Rutter and Benjamin Shields and William Alexander Dunn and Jisan Mahmud and Marc Niethammer}, +booktitle={The Twelfth International Conference on Learning Representations}, +year={2024}, +url={https://openreview.net/forum?id=wg8NPfeMF9} +} +``` + +>[!NOTE] +>For the three datasets used in `NAISR` paper, we provided this [Colab demo](https://colab.research.google.com/drive/1OudGynEydIXpAgfA9lvi5d0L86kK-uwg) to investigate the shapes learned with `NAISR`.
+We also provide the instructions on this page to apply `NAISR` to your customized shape analysis questions. + + +## Installation +The code is tested with ``python=3.9``, ``torch=2.1.0``, ``torchvision=0.15.2``. +``` +git clone https://github.com/uncbiag/NAISR +cd NAISR +``` +Now, create a new conda environment and install required packages accordingly. +``` +conda create -n naisr python=3.9 +conda activate naisr +pip install -r requirements.txt +``` + + +## Data and Model Weights +We train and test our method on three datasets: Starman, ADNI Hippocampus, and Pediatric Airways. + +| Dataset | Description | Dataset Link | Model Link | +|-----------|----------------------------------------------|:------------------------------------:|:------------------------------------:| +|Starman | simulated 2D Starman shapes | [simulation code)][StarmanData] | [weights][StarmanModel] | +|ADNI Hippocampus | 1632 hippocampus from ADNI | [official site][ADNIData] | [weights][ADNIModel] | +|Pedeatric Airway | 357 pediatric airway shapes | NA | [weights][AirwayModel] | + + + +[ADNIData]: https://ida.loni.usc.edu/login.jsp?project=ADNI +[StarmanData]: + +[ADNIModel]: https://github.com/uncbiag/NAISR/releases/download/naisr_weights_v0/naisr_weights_adni.pth +[StarmanModel]: https://github.com/uncbiag/NAISR/releases/download/naisr_weights_v0/naisr_weights_starman.pth +[AirwayModel]: https://github.com/uncbiag/NAISR/releases/download/naisr_weights_v0/naisr_weights_pediatric_airway.pth + + +## Visualizations of Shape Space Extrapolation + +### Getting Started +One just need to use this [Colab demo](https://colab.research.google.com/drive/1OudGynEydIXpAgfA9lvi5d0L86kK-uwg) to explore the template shape space of {starmen, hippocampi and airways} used in the `NAISR` paper. + +### More functions +Our code repo provides more functions to visualize the shape space, e.g., as a matrix for a specific case or for the template shape (as Figure 3 in the main paper), + +To get the matrix of the the matrix of thetemplate shape extrapolation, pls use +``` +python evolution_shapematrix.py -e examples/hippocampus/naigsr_0920_base.json +``` + +To get the matrix of the shape extrapolation for a specific patient, pls use +``` +python evolution_shapematrix_specific.py -e examples/hippocampus/naigsr_0920_base.json +``` + + + +## Customize +One may also want to use `NAISR` on their own shape analysis problems. For this use we provide our best suggestions/instructions here through the illustration for the simulated starman dataset, + +### Data Preprocessing + +#### Alignment with Rigid Transformation +The shapes to explore need to be registered with a rigid transformation (translation + rotation). +If paired point clouds are available, we recommend to use ; otherwise, we recommend to use ICP to register the point clouds. +In our case, we use airway landmarks to learn the rigid transformation; and ICP algorithms to register the point clouds of the hippocampi. + +#### SDF Extraction + +### +```json + { + "Description" : [ "This experiment learns a shape representation for starman dataset." ], + "Device":0, + "DataSource": {"train": "/home/jyn/NAISR/examples/starman/2dshape_train_with_temp.csv", + "test": "/home/jyn/NAISR/examples/starman/2dshape_test_with_temp.csv"}, + "Split": null, + "Network": "DeepNAIGSR", + "NumEpochs": 300, + "LoggingRoot": "/playpen-raid/jyn/NAISR/log", + "ExperimentName": "DeepNAIGSR_STARMAN3D_0222_256_base", + + "EpochsTilCkpt": 10, + "StepsTilSummary": 1000, + "UseLBFGS": false, + "DoublePrecision": false, + "CheckpointPath": "", + "CodeLength": 256, + + "AdditionalSnapshots" : [ 50, 100, 200, 300, 400, 500 ], + "LearningRateSchedule" : [ + { + "Type": "Step", + "Initial": 0.00005, + "Interval": 1000, + "Factor": 0.5 + }, + { + "Type": "Step", + "Initial": 0.001, + "Interval": 1000, + "Factor": 0.5 + }], + "SamplesPerScene" : 750, + "BatchSize": 64, + "DataLoaderThreads": 4, + "ClampingDistance": 1, + + "Articulation": true, + "NumAtcParts": 1, + "TrainWithParts": false, + "Class": "starman", + "Attributes": ["cov_1", "cov_2"], + "TemplateAttributes": {"cov_1": 0, "cov_2": 0}, + "Backbone": "siren", + "PosEnc": false, + "InFeatures": 2, + "HiddenFeatures": 256, + "HidenLayers": 6, + "OutFeatures": 1, + "Loss": { + "whether_sdf": true, + "whether_normal_constraint": true, + "whether_inter_constraint": true, + "whether_eikonal": true, + "whether_code_regularization": true} +``` +### Training +For training, one needs to use `train_atlas_3dnaigsr.py` with different networking settings indicated with the `json` files, e.g., for the starman dataset, +``` +python train_atlas_3dnaigsr.py -e examples/starman/naigsr_0222_base.json +``` + +### Shape Reconstruction +For testing/reconstruction without covariates, one needs to `reconstruct_atlas.py` with different network settings indicated with the `json` files, e.g., for the starman dataset. +``` +python reconstruct_atlas.py -e examples/starman/naigsr_0222_base.json +``` + +For testing/reconstruction with covariates, one needs to `reconstruct_atlas.py` with different network settings indicated with the `json` files, e.g., for the starman dataset. +``` +python reconstruct_atlas_with_cov.py -e examples/starman/naigsr_0222_base.json +``` + + +### Shape Transport +For shape transport without covariates, one needs to `reconstruct_atlas.py` with different network settings indicated with the `json` files, e.g., for the starman dataset. +``` +python transport_general.py -e examples/starman/naigsr_0222_base.json +``` + +For shape transport with covariates, one needs to `reconstruct_atlas.py` with different network settings indicated with the `json` files, e.g., for the starman dataset. +``` +python transport.py -e examples/starman/naigsr_0222_base.json +``` + + +### Shape Evolution and Disentanglement + +One just needs to adjust this [Colab demo](https://colab.research.google.com/drive/1OudGynEydIXpAgfA9lvi5d0L86kK-uwg) with their own `NAISR` weights to explore the learned deform the template shape with the query covariates. + +To get the matrix of the template shape extrapolation, pls use +``` +python evolution_shapematrix.py -e examples/starman/naigsr_0222_base.json +``` + +To get the matrix of the shape extrapolation for a specific patient, pls use +``` +python evolution_shapematrix_specific.py -e examples/starman/naigsr_0222_base.json +``` + + + +More instructions on the way... + + + + + +## Cite this work +``` +@inproceedings{ +jiao2024naisr, +title={\texttt{NAISR}: A 3D Neural Additive Model for Interpretable Shape Representation}, +author={Yining Jiao and Carlton Jude ZDANSKI and Julia S Kimbell and Andrew Prince and Cameron P Worden and Samuel Kirse and Christopher Rutter and Benjamin Shields and William Alexander Dunn and Jisan Mahmud and Marc Niethammer}, +booktitle={The Twelfth International Conference on Learning Representations}, +year={2024}, +url={https://openreview.net/forum?id=wg8NPfeMF9} +} +``` diff --git a/docs/NAISR poster (2).png b/docs/NAISR poster (2).png deleted file mode 100644 index 487487c..0000000 Binary files a/docs/NAISR poster (2).png and /dev/null differ diff --git a/docs/NAISR_icon.png b/docs/NAISR_icon.png deleted file mode 100644 index 5288aa1..0000000 Binary files a/docs/NAISR_icon.png and /dev/null differ diff --git a/docs/index.html b/docs/index.html index 45dd3c3..2072c43 100644 --- a/docs/index.html +++ b/docs/index.html @@ -104,6 +104,16 @@

NAISR: A 3D Neural Additive Mo + + + + + + Video + + + diff --git a/examples/starman/naigsr_0222_base.json b/examples/starman/naigsr_0222_base.json index 71b3f70..f5ae11e 100644 --- a/examples/starman/naigsr_0222_base.json +++ b/examples/starman/naigsr_0222_base.json @@ -1,7 +1,8 @@ { "Description" : [ "This experiment learns a shape representation for starman dataset." ], "Device":0, - "DataSource": {"train": "/home/jyn/NAISR/examples/starman/2dshape_train_with_temp.csv", "test": "/home/jyn/NAISR/examples/starman/2dshape_test_with_temp.csv"}, + "DataSource": {"train": "examples/starman/2dshape_train_with_temp.csv", + "test": "examples/starman/2dshape_test_with_temp.csv"}, "Split": null, "Network" : "DeepNAIGSR", "NumEpochs" : 300, diff --git a/naisr/__init__.py b/naisr/__init__.py index 7e84edf..09ad2dd 100755 --- a/naisr/__init__.py +++ b/naisr/__init__.py @@ -10,4 +10,7 @@ #from naisr.model import NAISiren, NAIVF, BaselineVF, FCBaseline, NAIVF_withtempl, SirenlatentVF, NAIlatentVF_withtempl, LipNAIVF_withtempl, Baseline, BaselineVF, ICVF, NAIVF_with3dtempl, NAIVF_autotempl, NAIVF_fixedtempl, NAIVF_fixed, DeepSDF #NAISR, HyperSirenBaseline, SirenBaseline, from naisr.diff_operators import * from naisr.metrics import * -from naisr.model_naigsr import DeepNAIGSR \ No newline at end of file +from naisr.model_naigsr import DeepNAIGSR +from naisr.starman_dataset import * +from naisr.adni_dataset import * +from naisr.airway_dataset import * \ No newline at end of file diff --git a/naisr_meshing.py b/naisr_meshing.py index 0611406..bb52ac3 100755 --- a/naisr_meshing.py +++ b/naisr_meshing.py @@ -1148,10 +1148,9 @@ def convert_3d_sdf_samples_to_ply( print(numpy_3d_sdf_tensor.max()) print(numpy_3d_sdf_tensor.min()) try: - verts, faces, normals, values = measure.marching_cubes_lewiner( + verts, faces, normals, values = measure.marching_cubes( numpy_3d_sdf_tensor, level=0., spacing=[voxel_size] * 3 ) - except: path_aligned_surface = os.path.join(savedir, 'surface.stl') ply_filename_out = os.path.join(savedir, 'surface.ply') diff --git a/publicdata/.DS_Store b/publicdata/.DS_Store new file mode 100644 index 0000000..03d256a Binary files /dev/null and b/publicdata/.DS_Store differ diff --git a/publicdata/__init__.py b/publicdata/__init__.py new file mode 100644 index 0000000..1890665 --- /dev/null +++ b/publicdata/__init__.py @@ -0,0 +1,2 @@ +from publicdata.utils_2d import * +from publicdata.deformation import * \ No newline at end of file diff --git a/publicdata/deformation.py b/publicdata/deformation.py new file mode 100644 index 0000000..62a04a5 --- /dev/null +++ b/publicdata/deformation.py @@ -0,0 +1,103 @@ +import numpy as np +from scipy.ndimage import gaussian_filter + + +def rotate_polygon(polygon, rotation_deg=30): + """ + Rotate the polygon. + """ + theta = np.deg2rad(rotation_deg) + + rotation_matrix = np.array([ + [np.cos(theta), -np.sin(theta)], + [np.sin(theta), np.cos(theta)] + ]) + + # Apply the rotation + rotated_polygon = np.dot(polygon, rotation_matrix.T) + + return rotated_polygon + + +def translate_polygon(polygon, translation=(10, 10)): + """ + Translate the polygon. + """ + translated_polygon = polygon + translation + return translated_polygon + +def Gaussian(x, z, sigma=0.5): + return np.exp((-(np.linalg.norm(x - z, axis=1) ** 2)) / (2 * sigma ** 2)) + +def create_deformation(points, control_point, VECTOR): + deformed = Gaussian(points, control_point)[:, None] @ VECTOR[None, :] # + points + return deformed + +def random_deform_polygon(arr_contour_pts, arr_control_pts): + """ + Add random deformation to the polygon. + """ + + nx, ny = (5, 5) + x = np.linspace(-2, 2, nx) + y = np.linspace(-2, 2, ny) + xv, yv = np.meshgrid(x, y) + arr_control_grids = np.concatenate((xv.ravel()[:, None], yv.ravel()[:, None]), axis=1) + + arr_deformation_contour = np.zeros_like(arr_contour_pts.copy()) + arr_deformation_control = np.zeros_like(arr_control_pts.copy()) + for ith_con_pt in arr_control_grids: + a = (np.random.rand(2) - 0.5) * 0.25 + arr_deformation_contour += create_deformation(arr_contour_pts, ith_con_pt, a) + arr_deformation_control += create_deformation(arr_control_pts, ith_con_pt, a) + + arr_deformed_contour = arr_contour_pts + arr_deformation_contour + arr_deformed_controls = arr_control_pts + arr_deformation_control + return arr_deformed_contour, arr_deformed_controls + + +def deform_polygon_randomly(arr_contour_pts, arr_control_pts):#, rotation_deg=130): + """ + Combine the rotation, translation, and random deformation. + """ + + deformed = random_deform_polygon(arr_contour_pts, arr_control_pts) + #deformed = rotate_polygon(polygon, rotation_deg=rotation_deg) + + return deformed + +def show_shapely_polygon(polygon): + fig, ax = plt.subplots() + ax.plot(*polygon.exterior.xy, 'b-', label='Polygon') + ax.set_aspect('equal', adjustable='datalim') + ax.legend() + plt.show() + return + +# Example usage +polygon = np.array([ + [0, 0], + [1, 0], + [1, 1], + [0, 1] +]) + +if __name__ == "__main__": + import pyvista as pv + import numpy as np + from shapely.geometry import Polygon, Point + from shapely.geometry import Point, LineString, Polygon + import matplotlib.pyplot as plt + import pyvista as pv + + path_starman_vtk = "/Users/jyn/jyn/research/projects/NAISR/publicdata/deformetrica/examples/longitudinal_atlas/landmark/2d/starmen_for_simulation/data_ground_truth/ForSimulation__Template__GroundTruth.vtk" + path_control_points = "/Users/jyn/jyn/research/projects/NAISR/publicdata/deformetrica/examples/longitudinal_atlas/landmark/2d/starmen_for_simulation/data_ground_truth/ForSimulation__ControlPoints__GoundTruth.txt" + # "/Users/jyn/jyn/research/projects/NAISR/publicdata/deformetrica/examples/longitudinal_atlas/landmark/2d/starmen/data/ForInitialization__ControlPoints__FromLongitudinalAtlas.txt" + arr_contour_pts = np.load(path_control_points) + mesh = pv.read(path_starman_vtk) + arr_contour_pts = mesh.points[:, [0,1]] + deformed_polygon = deform_polygon_randomly(arr_contour_pts,arr_control_pts ) + polygon, _ = Polygon(deformed_polygon) + show_shapely_polygon(polygon) + + print(deformed_polygon) diff --git a/publicdata/demo.vtk b/publicdata/demo.vtk new file mode 100644 index 0000000..e3e8c6e Binary files /dev/null and b/publicdata/demo.vtk differ diff --git a/publicdata/demo_adni_dataset_info.py b/publicdata/demo_adni_dataset_info.py new file mode 100644 index 0000000..7385502 --- /dev/null +++ b/publicdata/demo_adni_dataset_info.py @@ -0,0 +1,256 @@ +import os +import numpy as np +import pandas as pd +import math +import pyvista as pv +import trimesh +from utils_3d import * +import skimage.measure as measure +import datetime +# def extract_subj_list(root_dataset): +# #root_dataset = "/Users/jyn/jyn/research/projects/NAISR/NAISR/publicdata/deformetrica/examples/longitudinal_atlas/landmark/3d/hippocampi/data" #"/home/jyn/NAISR/publicdata/deformetrica/examples/longitudinal_atlas/landmark/3d/hippocampi/data" +# list_scans = os.listdir(root_dataset) +# list_subjects = [] +# dict_subj_scans = {} +# for ith_scan in list_scans: +# if '.vtk' in ith_scan and 'sub' in ith_scan: +# ith_subj = int(ith_scan.split('_')[0][-4::]) +# list_subjects.append(ith_subj) +# # attach scans to subjects +# if ith_subj not in list(dict_subj_scans.keys()): +# dict_subj_scans[ith_subj] = [] +# dict_subj_scans[ith_subj].append(ith_scan) +# +# list_subjects = np.unique(np.array(list_subjects)) +# return list_subjects, dict_subj_scans + + +def extract_subj_list(root_dataset): + #root_dataset = "/Users/jyn/jyn/research/projects/NAISR/NAISR/publicdata/deformetrica/examples/longitudinal_atlas/landmark/3d/hippocampi/data" #"/home/jyn/NAISR/publicdata/deformetrica/examples/longitudinal_atlas/landmark/3d/hippocampi/data" + list_all_subjects = os.listdir(root_dataset) + list_of_subjects = [] + dict_subj_scans = {} + for ith_scan in list_all_subjects: + # current patient + if os.path.isdir(os.path.join(root_dataset, ith_scan)): + ith_subj = int(ith_scan.split('_')[-1]) + # current folder + current_scan_folder = os.path.join(root_dataset, ith_scan, "Hippocampal_Mask") + # attach scans to subjects + dict_subj_scans[ith_scan] = os.listdir(current_scan_folder) + list_of_subjects.append(ith_scan) + return list_of_subjects, dict_subj_scans + + +def transform_group_to_num(str_class): + if str_class == 'CN': + return 0 + elif str_class == 'MCI': + return 1 + elif str_class == 'AD': + return 2 +def extract_patient_convariates(root_adni_dataset): + #root_dataset = "/Users/jyn/jyn/research/projects/NAISR/NAISR/publicdata/deformetrica/examples/longitudinal_atlas/landmark/3d/hippocampi/data" # "/home/jyn/NAISR/publicdata/deformetrica/examples/longitudinal_atlas/landmark/3d/hippocampi/data" + list_subjects, dict_subj_scans = extract_subj_list(root_adni_dataset) + pd_demmog = pd.read_csv(ROOTPATH_DEMMOG) + pd_dataset_info = pd.read_csv(ROOTPATH_DATASET_INFO) + pd_dataset_info['Study Date'] = pd.to_datetime(pd_dataset_info['Study Date'], yearfirst=True) + list_data = [] + + for ith_subject in list_subjects: + # print("processing " + str(ith_subject)) + RID = int(ith_subject.split('_')[-1]) + df_current_subj = pd_demmog[pd_demmog['RID'] == RID] + list_edu_level = [] + # get ages + for i in range(len(df_current_subj)): + # get education + list_edu_level.append(df_current_subj.iloc[i]["PTEDUCAT"]) + + edu_level = np.array(list_edu_level).max() + + list_current_scans = dict_subj_scans[ith_subject] + + + current_datasetinfo = pd_dataset_info[pd_dataset_info['Subject ID']==ith_subject] + # id,PID,cov_1,cov_2,cov_3,cov_4,2dsdf,2dshape + for ith_scan in list_current_scans: + scandate = ith_scan.split("_")[0] + current_folder = os.path.join(root_adni_dataset, ith_subject, "Hippocampal_Mask", ith_scan) + current_scanfile_folder = os.path.join(current_folder, os.listdir(current_folder)[0]) + current_scanfile = os.listdir(current_scanfile_folder)[0] + current_path = os.path.join(current_scanfile_folder, current_scanfile) + + + time_diff = (current_datasetinfo['Study Date'] - pd.to_datetime(scandate)) / pd.Timedelta(1, 'D') + current_age = float(current_datasetinfo[time_diff.abs()<=1]['Age'].values[0]) + current_sex = float(current_datasetinfo[time_diff.abs() <= 1]['Sex'].values[0]=='M') + #except: + # print('a') + #try: + current_group = current_datasetinfo[time_diff.abs()<=1]['Research Group'].values[0] + current_group = float(transform_group_to_num(current_group)) + #except: + # print('a') + + #datetime.strftime(format) + list_data.append({'ID': ith_subject, + 'age': current_age, + 'sex':current_sex, + "edu": edu_level, + 'group': current_group, + "scan": current_scanfile, + "path": current_path + }) + + print("Done") + return list_subjects, dict_subj_scans, list_data + + + + +# def pair_covariates_with_shapes(list_subjects, dict_subj_scans, list_subjects_data): +# +# dict_subjects_data = pd.DataFrame.from_records(list_subjects_data) +# list_scan_data = [] +# for ith_subject in list_subjects: +# print("processing " + str(ith_subject)) +# current_scans = list(dict_subjects_data[dict_subjects_data['ID'] == ith_subject]['scans'])[0] +# current_paths = list(dict_subjects_data[dict_subjects_data['ID'] == ith_subject]['paths'])[0] +# +# for ith_scan in dict_subj_scans[ith_subject]: +# if '.vtk' in ith_scan and 'sub' in ith_scan: +# scantime = float(str(ith_scan.split("-")[2][1:3])) +# +# # current scan with covariates +# current_age = scantime / 12 + float(dict_subjects_data[dict_subjects_data['ID'] == ith_subject]['age']) +# current_edu_level = float(dict_subjects_data[dict_subjects_data['ID'] == ith_subject]['edu']) +# current_path = np.array(current_paths)[ith_scan == np.array(current_scans)][0] +# current_pid = ith_subject +# current_scan = ith_scan +# +# # +# generate_shapes_and_sdfs(current_path, ROOT_DATASET) +# +# list_scan_data.append({"ID": current_scan, +# "PID": current_pid, +# 'age': current_age, +# 'edu': current_edu_level, +# 'path': current_path}) +# +# return list_scan_data + + + +def create_adni_dataset_sheet(list_data): + + dict_subjects_data = pd.DataFrame.from_records(list_data) + list_scan_data = [] + for ith_dict in list_data: + current_path = ith_dict['path'] + print("processing " + str(current_path)) + + # current scan with covariates + current_age = ith_dict['age'] + current_edu_level = ith_dict['edu'] + current_pid = ith_dict['ID'] + current_scan = ith_dict['scan'] + + current_AD = int(ith_dict['group'] == 2) + current_MCI = int(ith_dict['group'] > 0) + current_sex = ith_dict['sex'] + + # + + #generate_shapes_and_sdfs(current_path, ROOT_DATASET) + #os.system('python create_sdf_from_mesh.py --subj ' + str(source_subj)) + + savepath_on_npy = os.path.join(ROOT_DATASET, '3dshape', current_scan.split('.')[0] + '_on.npy') + savepath_off_npy = os.path.join(ROOT_DATASET, '3dsdf', current_scan.split('.')[0] + '_off.npy') + savepath_on_pv = os.path.join(ROOT_DATASET, '3dvis', current_scan.split('.')[0] + '_on_aligned.stl') + + + list_scan_data.append({"id": current_scan, + "PID": current_pid, + 'age': current_age, + 'sex':current_sex, + 'edu': current_edu_level, + 'AD': current_AD, + 'MCI': current_MCI, + 'path': current_path, + '3dshape': savepath_on_npy, + '3dsdf': savepath_off_npy, + '3dvis': savepath_on_pv}) + + return list_scan_data + +def read_mesh_as_trimesh(path_mesh): + # unpacking + pv_data = pv.read(path_mesh) + #current_airway.faces = np.hstack((np.ones((faces.shape[0], 1)) * 3, faces)).ravel().astype('int') + faces = np.reshape(pv_data.faces, (-1, 4))[:, [1, 2, 3]] + surf_airway = trimesh.Trimesh(vertices=pv_data.points, faces=faces) + #surf_airway.show() + return surf_airway + + + + +# def generate_shapes_and_sdfs(path_vtk, root_dataset): +# +# +# # get id +# scan_id = path_vtk.split('/')[-1] +# +# # get savepath +# savepath_on_npy = os.path.join(root_dataset, '3dshape', scan_id.split('.')[0] + '_on.npy') +# savepath_off_npy = os.path.join(root_dataset, '3dsdf', scan_id.split('.')[0] + '_off.npy') +# savepath_on_pv = os.path.join(root_dataset, '3dshape', scan_id.split('.')[0] + '_on.vtk') +# +# #path_examples = "/Users/jyn/jyn/research/projects/NAISR/NAISR/publicdata/deformetrica/examples/longitudinal_atlas/landmark/3d/hippocampi/data/sub-ADNI002S0729_ses-M00.vtk" +# +# # off surface points +# mesh = read_mesh_as_trimesh(path_vtk) +# # off surface points +# arr_off = calculate_sdf_from_mesh(mesh) +# # on surface points +# pv_data = pv.read(path_vtk) +# pv_data.save(savepath_on_pv) +# +# np.save(savepath_on_npy, np.concatenate((np.array(mesh.vertices), np.array(mesh.vertex_normals)), axis=-1)) +# np.save(savepath_off_npy, arr_off) +# +# del arr_off, mesh, pv_data +# +# return + + + + + + +if __name__ == "__main__": + + path_seg = "/playpen-raid/jyn/NAISR/NAISR/examples/hippocampus/ADNI" #"/Users/jyn/jyn/research/projects/NAISR/NAISR/examples/hippocampus/ADNI" + list_subjects = os.listdir(path_seg) + + + root_adni_dataset = "/playpen-raid/jyn/NAISR/NAISR/examples/hippocampus/ADNI" #"/Users/jyn/jyn/research/projects/NAISR/NAISR/examples/hippocampus/ADNI" #"/Users/jyn/jyn/research/projects/NAISR/NAISR/publicdata/deformetrica/examples/longitudinal_atlas/landmark/3d/hippocampi/data" # "/playpen-raid/jyn/deformetrica/examples/longitudinal_atlas/landmark/3d/hippocampi/data" # "/home/jyn/NAISR/publicdata/deformetrica/examples/longitudinal_atlas/landmark/3d/hippocampi/data" + ROOTPATH_DEMMOG = "/playpen-raid/jyn/NAISR/NAISR/examples/hippocampus/PTDEMOG_18Aug2023.csv" #"/Users/jyn/jyn/research/projects/NAISR/NAISR/examples/hippocampus/PTDEMOG_18Aug2023.csv" # "/home/jyn/NAISR/examples/hippocampus/PTDEMOG_18Aug2023.csv" + ROOTPATH_DATASET_INFO = "/playpen-raid/jyn/NAISR/NAISR/examples/hippocampus/DATASET_INFO.csv" + list_subjects, dict_subj_scans, list_data = extract_patient_convariates(root_adni_dataset) + + ROOT_DATASET = "/playpen-raid/jyn/NAISR/NAISR/examples/hippocampus/" #"/playpen-raid/jyn/NAISR/NAISR/examples/hippocampus/ + + list_scan_data = create_adni_dataset_sheet(list_data) + + # save the data set info to a csv + savepath_dataset = "/home/jyn/NAISR/examples/hippocampus/3dshape.csv" #"/home/jyn/NAISR/examples/starman/2dshape_train.csv" + # save data + a = pd.DataFrame.from_records(list_scan_data) + a.loc[np.isnan(a['edu'].values), 'edu'] = a.loc[np.isfinite(a['edu'].values), 'edu'].mean() + a.to_csv(savepath_dataset) + + print('finished') + + diff --git a/publicdata/demo_investigate_adni.py b/publicdata/demo_investigate_adni.py new file mode 100644 index 0000000..be76071 --- /dev/null +++ b/publicdata/demo_investigate_adni.py @@ -0,0 +1,51 @@ +import os + +root_dataset = "/Users/jyn/jyn/research/projects/NAISR/NAISR/publicdata/deformetrica/examples/longitudinal_atlas/landmark/3d/hippocampi/data" #"/home/jyn/NAISR/publicdata/deformetrica/examples/longitudinal_atlas/landmark/3d/hippocampi/data" +list_scans = os.listdir(root_dataset) +list_subjects = [] +for ith_scan in list_scans: + if '.vtk' in ith_scan and 'sub' in ith_scan: + list_subjects.append(int(ith_scan.split('_')[0][-4::])) + +import xml.etree.ElementTree as ET +from bs4 import BeautifulSoup + +# Reading the data inside the xml +# file to a variable under the name +# data +path ='/Users/jyn/jyn/research/projects/NAISR/NAISR/publicdata/deformetrica/examples/longitudinal_atlas/landmark/3d/hippocampi/data_set.xml' +with open(path, 'r') as f: + data = f.read() + +# Passing the stored data inside +# the beautifulsoup parser, storing +# the returned object +Bs_data = BeautifulSoup(data, "xml") + +# Finding all instances of tag +# `unique` +b_unique = Bs_data.find_all('unique') + +print(b_unique) + +# Using find() to extract attributes +# of the first instance of the tag +b_name = Bs_data.find('child', {'name': 'Frank'}) + +print(b_name) + + +# Parse the XML file +tree = ET.parse('/Users/jyn/jyn/research/projects/NAISR/NAISR/publicdata/deformetrica/examples/longitudinal_atlas/landmark/3d/hippocampi/data_set.xml') # Replace with your XML file path +root = tree.getroot() +a = [] +b = [] +# Access elements and attributes +for child in root: + print(child.tag, child.attrib) + a.append(child.attrib) + +# Access specific element data +for element in root.findall('your_element'): # Replace 'your_element' with the desired element's tag name + value = element.text + print(value) diff --git a/publicdata/demo_pair_adni.py b/publicdata/demo_pair_adni.py new file mode 100644 index 0000000..7b0de72 --- /dev/null +++ b/publicdata/demo_pair_adni.py @@ -0,0 +1,349 @@ +import os +import numpy as np +import pandas as pd +import math +import pyvista as pv +import trimesh +from utils_3d import * +import skimage.measure as measure +import datetime +# def extract_subj_list(root_dataset): +# #root_dataset = "/Users/jyn/jyn/research/projects/NAISR/NAISR/publicdata/deformetrica/examples/longitudinal_atlas/landmark/3d/hippocampi/data" #"/home/jyn/NAISR/publicdata/deformetrica/examples/longitudinal_atlas/landmark/3d/hippocampi/data" +# list_scans = os.listdir(root_dataset) +# list_subjects = [] +# dict_subj_scans = {} +# for ith_scan in list_scans: +# if '.vtk' in ith_scan and 'sub' in ith_scan: +# ith_subj = int(ith_scan.split('_')[0][-4::]) +# list_subjects.append(ith_subj) +# # attach scans to subjects +# if ith_subj not in list(dict_subj_scans.keys()): +# dict_subj_scans[ith_subj] = [] +# dict_subj_scans[ith_subj].append(ith_scan) +# +# list_subjects = np.unique(np.array(list_subjects)) +# return list_subjects, dict_subj_scans + + +def extract_subj_list(root_dataset): + #root_dataset = "/Users/jyn/jyn/research/projects/NAISR/NAISR/publicdata/deformetrica/examples/longitudinal_atlas/landmark/3d/hippocampi/data" #"/home/jyn/NAISR/publicdata/deformetrica/examples/longitudinal_atlas/landmark/3d/hippocampi/data" + list_all_subjects = os.listdir(root_dataset) + list_of_subjects = [] + dict_subj_scans = {} + for ith_scan in list_all_subjects: + # current patient + if os.path.isdir(os.path.join(root_dataset, ith_scan)): + ith_subj = int(ith_scan.split('_')[-1]) + # current folder + current_scan_folder = os.path.join(root_dataset, ith_scan, "Hippocampal_Mask") + # attach scans to subjects + dict_subj_scans[ith_scan] = os.listdir(current_scan_folder) + list_of_subjects.append(ith_scan) + return list_of_subjects, dict_subj_scans + + +def transform_group_to_num(str_class): + if str_class == 'CN': + return 0. + elif str_class == 'MCI': + return 0.5 + elif str_class == 'AD': + return 1. +def extract_patient_convariates(root_adni_dataset): + #root_dataset = "/Users/jyn/jyn/research/projects/NAISR/NAISR/publicdata/deformetrica/examples/longitudinal_atlas/landmark/3d/hippocampi/data" # "/home/jyn/NAISR/publicdata/deformetrica/examples/longitudinal_atlas/landmark/3d/hippocampi/data" + list_subjects, dict_subj_scans = extract_subj_list(root_adni_dataset) + pd_demmog = pd.read_csv(ROOTPATH_DEMMOG) + pd_dataset_info = pd.read_csv(ROOTPATH_DATASET_INFO) + pd_dataset_info['Study Date'] = pd.to_datetime(pd_dataset_info['Study Date'], yearfirst=True) + list_data = [] + + for ith_subject in list_subjects: + # print("processing " + str(ith_subject)) + RID = int(ith_subject.split('_')[-1]) + df_current_subj = pd_demmog[pd_demmog['RID'] == RID] + list_edu_level = [] + # get ages + for i in range(len(df_current_subj)): + # get education + list_edu_level.append(df_current_subj.iloc[i]["PTEDUCAT"]) + + edu_level = np.array(list_edu_level).max() + + list_current_scans = dict_subj_scans[ith_subject] + + + current_datasetinfo = pd_dataset_info[pd_dataset_info['Subject ID']==ith_subject] + # id,PID,cov_1,cov_2,cov_3,cov_4,2dsdf,2dshape + for ith_scan in list_current_scans: + scandate = ith_scan.split("_")[0] + current_folder = os.path.join(root_adni_dataset, ith_subject, "Hippocampal_Mask", ith_scan) + current_scanfile_folder = os.path.join(current_folder, os.listdir(current_folder)[0]) + current_scanfile = os.listdir(current_scanfile_folder)[0] + current_path = os.path.join(current_scanfile_folder, current_scanfile) + + #date = str(int(scandate.split('-')[1])) + '/' + str(scandate.split('-')[2]) + '/' + str(scandate.split('-')[0]) + #try: + + #current_age = float(current_datasetinfo[current_datasetinfo['Study Date'] == scandate]['Age'].values[0]) + time_diff = (current_datasetinfo['Study Date'] - pd.to_datetime(scandate)) / pd.Timedelta(1, 'D') + current_age = float(current_datasetinfo[time_diff.abs()<=1]['Age'].values[0]) + #except: + # print('a') + #try: + current_group = current_datasetinfo[time_diff.abs()<=1]['Research Group'].values[0] + current_group = float(transform_group_to_num(current_group)) + #except: + # print('a') + + #datetime.strftime(format) + list_data.append({'ID': ith_subject, + 'age': current_age, + "edu": edu_level, + 'group': current_group, + "scan": current_scanfile, + "path": current_path + }) + + print("Done") + return list_subjects, dict_subj_scans, list_data + + + + +# def pair_covariates_with_shapes(list_subjects, dict_subj_scans, list_subjects_data): +# +# dict_subjects_data = pd.DataFrame.from_records(list_subjects_data) +# list_scan_data = [] +# for ith_subject in list_subjects: +# print("processing " + str(ith_subject)) +# current_scans = list(dict_subjects_data[dict_subjects_data['ID'] == ith_subject]['scans'])[0] +# current_paths = list(dict_subjects_data[dict_subjects_data['ID'] == ith_subject]['paths'])[0] +# +# for ith_scan in dict_subj_scans[ith_subject]: +# if '.vtk' in ith_scan and 'sub' in ith_scan: +# scantime = float(str(ith_scan.split("-")[2][1:3])) +# +# # current scan with covariates +# current_age = scantime / 12 + float(dict_subjects_data[dict_subjects_data['ID'] == ith_subject]['age']) +# current_edu_level = float(dict_subjects_data[dict_subjects_data['ID'] == ith_subject]['edu']) +# current_path = np.array(current_paths)[ith_scan == np.array(current_scans)][0] +# current_pid = ith_subject +# current_scan = ith_scan +# +# # +# generate_shapes_and_sdfs(current_path, ROOT_DATASET) +# +# list_scan_data.append({"ID": current_scan, +# "PID": current_pid, +# 'age': current_age, +# 'edu': current_edu_level, +# 'path': current_path}) +# +# return list_scan_data + +def pair_covariates_with_shapes(list_data): + + dict_subjects_data = pd.DataFrame.from_records(list_data) + list_scan_data = [] + for ith_dict in list_data: + current_path = ith_dict['path'] + + + # current scan with covariates + current_age = ith_dict['age'] + current_edu_level = ith_dict['edu'] + current_pid = ith_dict['ID'] + current_scan = ith_dict['scan'] + + # + from generate_shapes_and_sdfs import generate_shape_and_sdf_for_a_seg + #generate_shapes_and_sdfs(current_path, ROOT_DATASET) + #os.system('python create_sdf_from_mesh.py --subj ' + str(source_subj)) + + savepath_on_npy = os.path.join(ROOT_DATASET, '3dshape', current_scan.split('.')[0] + '_on.npy') + savepath_off_npy = os.path.join(ROOT_DATASET, '3dsdf', current_scan.split('.')[0] + '_off.npy') + savepath_on_pv_source = os.path.join(ROOT_DATASET, '3dvis', current_scan.split('.')[0] + '_on_source.stl') + savepath_on_pv_aligned = os.path.join(ROOT_DATASET, '3dvis', current_scan.split('.')[0] + '_on_aligned.stl') + + #generate_shape_and_sdf_for_a_seg(path_seg=current_path, root_dataset=ROOT_DATASET, path_target=PATH_TARGET) + + + if os.path.exists(savepath_on_npy) and os.path.exists(savepath_off_npy): + try: + a = np.load(savepath_off_npy) + except: + print("processing " + str(current_path)) + os.system('python generate_shapes_and_sdfs.py --path_seg ' + str(current_path) + ' --root_dataset ' + str(ROOT_DATASET) + " --path_target " + str(PATH_TARGET)) + print("Failure OF Extraction, re-extraction") + list_scan_data.append({"ID": current_scan, + "PID": current_pid, + 'age': current_age, + 'edu': current_edu_level, + 'path': current_path, + '3dshape': savepath_on_npy, + '3dsdf': savepath_off_npy, + '3dvis_source': savepath_on_pv_source, + '3dvis': savepath_on_pv_aligned}) + #continue + else: + #print("processing " + str(current_path)) + print("missing " + str(current_path)) + os.system('python generate_shapes_and_sdfs.py --path_seg ' + str(current_path) + ' --root_dataset ' + str(ROOT_DATASET) + " --path_target " + str(PATH_TARGET)) + #generate_shape_and_sdf_for_a_seg(path_seg=current_path, root_dataset=ROOT_DATASET, path_target=PATH_TARGET) + + + list_scan_data.append({"ID": current_scan, + "PID": current_pid, + 'age': current_age, + 'edu': current_edu_level, + 'path': current_path, + '3dshape': savepath_on_npy, + '3dsdf': savepath_off_npy, + '3dvis_source': savepath_on_pv_source, + '3dvis': savepath_on_pv_aligned}) + + return list_scan_data + +def read_mesh_as_trimesh(path_mesh): + # unpacking + pv_data = pv.read(path_mesh) + #current_airway.faces = np.hstack((np.ones((faces.shape[0], 1)) * 3, faces)).ravel().astype('int') + faces = np.reshape(pv_data.faces, (-1, 4))[:, [1, 2, 3]] + surf_airway = trimesh.Trimesh(vertices=pv_data.points, faces=faces) + #surf_airway.show() + return surf_airway + + + + +# def generate_shapes_and_sdfs(path_vtk, root_dataset): +# +# +# # get id +# scan_id = path_vtk.split('/')[-1] +# +# # get savepath +# savepath_on_npy = os.path.join(root_dataset, '3dshape', scan_id.split('.')[0] + '_on.npy') +# savepath_off_npy = os.path.join(root_dataset, '3dsdf', scan_id.split('.')[0] + '_off.npy') +# savepath_on_pv = os.path.join(root_dataset, '3dshape', scan_id.split('.')[0] + '_on.vtk') +# +# #path_examples = "/Users/jyn/jyn/research/projects/NAISR/NAISR/publicdata/deformetrica/examples/longitudinal_atlas/landmark/3d/hippocampi/data/sub-ADNI002S0729_ses-M00.vtk" +# +# # off surface points +# mesh = read_mesh_as_trimesh(path_vtk) +# # off surface points +# arr_off = calculate_sdf_from_mesh(mesh) +# # on surface points +# pv_data = pv.read(path_vtk) +# pv_data.save(savepath_on_pv) +# +# np.save(savepath_on_npy, np.concatenate((np.array(mesh.vertices), np.array(mesh.vertex_normals)), axis=-1)) +# np.save(savepath_off_npy, arr_off) +# +# del arr_off, mesh, pv_data +# +# return + + + + + + +if __name__ == "__main__": + + path_seg = "/Users/jyn/jyn/research/projects/NAISR/NAISR/examples/hippocampus/ADNI" + list_subjects = os.listdir(path_seg) + + + # path_examples = "/Users/jyn/jyn/research/projects/NAISR/NAISR/publicdata/deformetrica/examples/longitudinal_atlas/landmark/3d/hippocampi/data/sub-ADNI002S0729_ses-M00.vtk" + + # # off surface points + # mesh = read_mesh_as_trimesh(path_examples) + # arr_off = calculate_sdf_from_mesh(mesh) + + + root_adni_dataset = "/Users/jyn/jyn/research/projects/NAISR/NAISR/examples/hippocampus/ADNI" #"/Users/jyn/jyn/research/projects/NAISR/NAISR/publicdata/deformetrica/examples/longitudinal_atlas/landmark/3d/hippocampi/data" # "/playpen-raid/jyn/deformetrica/examples/longitudinal_atlas/landmark/3d/hippocampi/data" # "/home/jyn/NAISR/publicdata/deformetrica/examples/longitudinal_atlas/landmark/3d/hippocampi/data" + ROOTPATH_DEMMOG = "/Users/jyn/jyn/research/projects/NAISR/NAISR/examples/hippocampus/PTDEMOG_18Aug2023.csv" # "/home/jyn/NAISR/examples/hippocampus/PTDEMOG_18Aug2023.csv" + ROOTPATH_DATASET_INFO = "/Users/jyn/jyn/research/projects/NAISR/NAISR/examples/hippocampus/DATASET_INFO.csv" + list_subjects, dict_subj_scans, list_data = extract_patient_convariates(root_adni_dataset) + + ROOT_DATASET = "/Users/jyn/jyn/research/projects/NAISR/NAISR/examples/hippocampus/" #"/playpen-raid/jyn/NAISR/NAISR/examples/hippocampus/ + PATH_TARGET = "/Users/jyn/jyn/research/projects/NAISR/NAISR/examples/hippocampus/ADNI/941_S_1202/Hippocampal_Mask/2007-01-30_09_16_45.0/I147765/ADNI_941_S_1202_MR_Hippocampal_Mask_Hi_20090702092745323_S25680_I147765.nii" + #"/Users/jyn/jyn/research/projects/NAISR/NAISR/examples/hippocampus/ADNI/002_S_0295/Hippocampal_Mask/2006-04-18_08_20_30.0/I93328/ADNI_002_S_0295_MR_Hippocampal_Mask_Hi_20080228111448800_S13408_I93328.nii" + + + list_scan_data = pair_covariates_with_shapes(list_data) + + # save the data set info to a csv + savepath_dataset = "/Users/jyn/jyn/research/projects/NAISR/NAISR/examples/hippocampus/3dshape.csv" #"/home/jyn/NAISR/examples/starman/2dshape_train.csv" + + + # save data + a = pd.DataFrame.from_records(list_scan_data) + a.loc[np.isnan(a['edu'].values), 'edu'] = a.loc[np.isfinite(a['edu'].values), 'edu'].mean() + pd.DataFrame.from_records(list_scan_data).to_csv(savepath_dataset) + + print('finished') + + + +# +# root_dataset = "/Users/jyn/jyn/research/projects/NAISR/NAISR/publicdata/deformetrica/examples/longitudinal_atlas/landmark/3d/hippocampi/data" #"/home/jyn/NAISR/publicdata/deformetrica/examples/longitudinal_atlas/landmark/3d/hippocampi/data" +# list_subjects, dict_subj_scans = extract_subj_list(root_dataset) +# rootpath_demog = "/Users/jyn/jyn/research/projects/NAISR/NAISR/examples/hippocampus/PTDEMOG_18Aug2023.csv" +# pd_demmog = pd.read_csv(rootpath_demog) +# +# +# list_data = [] +# dict_patient_cov = {} +# for ith_subject in list_subjects: +# #print("processing " + str(ith_subject)) +# df_current_subj = pd_demmog[pd_demmog['RID']==ith_subject] +# +# list_ages_current_subject = [] +# list_birthday = [] +# list_edu_level = [] +# # get ages +# for i in range(len(df_current_subj)): +# # get age and birthday +# if (not pd.isna(df_current_subj.iloc[i]['PTDOBYY'])) and \ +# (not pd.isna(df_current_subj.iloc[i]['USERDATE'])): +# +# scantdate = int(str(df_current_subj.iloc[i]['USERDATE'])[0:4]) +# birthdate = int(str(df_current_subj.iloc[i]['PTDOBYY'])[0:4]) +# age = scantdate - birthdate +# #print(age) +# list_ages_current_subject.append(age) +# list_birthday.append(birthdate) +# +# # get education +# list_edu_level.append(df_current_subj.iloc[i]["PTEDUCAT"]) +# +# +# if len(np.unique(np.array(list_birthday))) > 1: +# print('insistent birthday for subject') +# print(ith_subject) +# +# +# age = np.array(list_subjects).min() +# edu_level = np.array(list_edu_level).max() +# # id,PID,cov_1,cov_2,cov_3,cov_4,2dsdf,2dshape +# dict_patient_cov[ith_subject] = age +# list_data.append({'ID': ith_subject, +# 'age': age, +# "edu": edu_level, +# }) +# +# +# print("Done") + + + + + + +# def load_dempgraphics_info(): +# rootpath_demog = "/Users/jyn/jyn/research/projects/NAISR/NAISR/examples/hippocampus/PTDEMOG_18Aug2023.csv" +# return + +import xml.etree.ElementTree as ET diff --git a/publicdata/demo_random_deform.py b/publicdata/demo_random_deform.py new file mode 100644 index 0000000..aa3342b --- /dev/null +++ b/publicdata/demo_random_deform.py @@ -0,0 +1,104 @@ +import numpy as np +from scipy.ndimage import gaussian_filter + + +def rotate_polygon(polygon, rotation_deg=30): + """ + Rotate the polygon. + """ + theta = np.deg2rad(rotation_deg) + + rotation_matrix = np.array([ + [np.cos(theta), -np.sin(theta)], + [np.sin(theta), np.cos(theta)] + ]) + + # Apply the rotation + rotated_polygon = np.dot(polygon, rotation_matrix.T) + + return rotated_polygon + + +def translate_polygon(polygon, translation=(10, 10)): + """ + Translate the polygon. + """ + translated_polygon = polygon + translation + return translated_polygon + +def Gaussian(x, z, sigma=0.5): + return np.exp((-(np.linalg.norm(x - z, axis=1) ** 2)) / (2 * sigma ** 2)) + +def create_deformation(points, control_point, VECTOR): + deformed = Gaussian(points, control_point)[:, None] @ VECTOR[None, :] # + points + return deformed + +def random_deform_polygon(arr_contour_pts, std_deviation=1.0, scale=2.0): + """ + Add random deformation to the polygon. + """ + # num_points = polygon.shape[0] + # random_deformation = (np.random.rand(num_points, 2) * 2 - 1) * scale + # random_deformation = gaussian_filter(random_deformation, sigma=std_deviation, mode='wrap') + # + # deformed_polygon = polygon + random_deformation + + control_points = np.array([[-2, ]]) + + nx, ny = (5, 5) + x = np.linspace(-2, 2, nx) + y = np.linspace(-2, 2, ny) + xv, yv = np.meshgrid(x, y) + arr_control_pts = np.concatenate((xv.ravel()[:, None], yv.ravel()[:, None]), axis=1) + + arr_deformed = arr_contour_pts.copy() + for ith_con_pt in arr_control_pts: + #Gaussian(x, ith_con_pt) + arr_deformed += create_deformation(arr_contour_pts, ith_con_pt, np.random.rand(2) * 0.5) + return arr_deformed + + +def deform_polygon(polygon, rotation_deg=130, translation=(10, 10), std_deviation=1.0, deformation_scale=2.0): + """ + Combine the rotation, translation, and random deformation. + """ + #rotated = rotate_polygon(polygon, rotation_deg) + #translated = translate_polygon(rotated, translation) + deformed = random_deform_polygon(polygon, std_deviation, deformation_scale) + + return deformed + +def show_shapely_polygon(polygon): + fig, ax = plt.subplots() + ax.plot(*polygon.exterior.xy, 'b-', label='Polygon') + ax.set_aspect('equal', adjustable='datalim') + ax.legend() + plt.show() + return + +# Example usage +polygon = np.array([ + [0, 0], + [1, 0], + [1, 1], + [0, 1] +]) + +import pyvista as pv +import numpy as np +from shapely.geometry import Polygon, Point +from shapely.geometry import Point, LineString, Polygon +import matplotlib.pyplot as plt +import pyvista as pv + +path_starman_vtk = "/Users/jyn/jyn/research/projects/NAISR/publicdata/deformetrica/examples/longitudinal_atlas/landmark/2d/starmen_for_simulation/data_ground_truth/ForSimulation__Template__GroundTruth.vtk" +path_control_points = "/Users/jyn/jyn/research/projects/NAISR/publicdata/deformetrica/examples/longitudinal_atlas/landmark/2d/starmen_for_simulation/data_ground_truth/ForSimulation__ControlPoints__GoundTruth.txt" +# "/Users/jyn/jyn/research/projects/NAISR/publicdata/deformetrica/examples/longitudinal_atlas/landmark/2d/starmen/data/ForInitialization__ControlPoints__FromLongitudinalAtlas.txt" + +mesh = pv.read(path_starman_vtk) +arr_contour_pts = mesh.points[:, [0,1]] +deformed_polygon = deform_polygon(arr_contour_pts ) +polygon = Polygon(deformed_polygon) +show_shapely_polygon(polygon) + +print(deformed_polygon) diff --git a/publicdata/demo_split_adni_dataset.py b/publicdata/demo_split_adni_dataset.py new file mode 100644 index 0000000..af4ed27 --- /dev/null +++ b/publicdata/demo_split_adni_dataset.py @@ -0,0 +1,154 @@ +import os +import numpy as np +import pandas as pd +import math +import pyvista as pv +import trimesh +from utils_3d import * +import skimage.measure as measure +import datetime +from sklearn.model_selection import train_test_split +import yaml + +def transform_group_to_num(str_class): + if str_class == 'CN': + return 0 + elif str_class == 'MCI': + return 1 + elif str_class == 'AD': + return 2 + +def extract_subj_list(root_dataset): + #root_dataset = "/Users/jyn/jyn/research/projects/NAISR/NAISR/publicdata/deformetrica/examples/longitudinal_atlas/landmark/3d/hippocampi/data" #"/home/jyn/NAISR/publicdata/deformetrica/examples/longitudinal_atlas/landmark/3d/hippocampi/data" + list_all_subjects = os.listdir(root_dataset) + list_of_subjects = [] + dict_subj_scans = {} + for ith_scan in list_all_subjects: + # current patient + if os.path.isdir(os.path.join(root_dataset, ith_scan)): + ith_subj = int(ith_scan.split('_')[-1]) + # current folder + current_scan_folder = os.path.join(root_dataset, ith_scan, "Hippocampal_Mask") + # attach scans to subjects + dict_subj_scans[ith_scan] = os.listdir(current_scan_folder) + list_of_subjects.append(ith_scan) + return list_of_subjects, dict_subj_scans + +def extract_patient_convariates(root_adni_dataset): + #root_dataset = "/Users/jyn/jyn/research/projects/NAISR/NAISR/publicdata/deformetrica/examples/longitudinal_atlas/landmark/3d/hippocampi/data" # "/home/jyn/NAISR/publicdata/deformetrica/examples/longitudinal_atlas/landmark/3d/hippocampi/data" + list_subjects, dict_subj_scans = extract_subj_list(root_adni_dataset) + pd_demmog = pd.read_csv(ROOTPATH_DEMMOG) + pd_dataset_info = pd.read_csv(ROOTPATH_DATASET_INFO) + pd_dataset_info['Study Date'] = pd.to_datetime(pd_dataset_info['Study Date'], yearfirst=True) + list_data = [] + + for ith_subject in list_subjects: + # print("processing " + str(ith_subject)) + RID = int(ith_subject.split('_')[-1]) + df_current_subj = pd_demmog[pd_demmog['RID'] == RID] + list_edu_level = [] + # get ages + for i in range(len(df_current_subj)): + # get education + list_edu_level.append(df_current_subj.iloc[i]["PTEDUCAT"]) + + edu_level = np.array(list_edu_level).max() + + list_current_scans = dict_subj_scans[ith_subject] + + + current_datasetinfo = pd_dataset_info[pd_dataset_info['Subject ID']==ith_subject] + # id,PID,cov_1,cov_2,cov_3,cov_4,2dsdf,2dshape + for ith_scan in list_current_scans: + scandate = ith_scan.split("_")[0] + current_folder = os.path.join(root_adni_dataset, ith_subject, "Hippocampal_Mask", ith_scan) + current_scanfile_folder = os.path.join(current_folder, os.listdir(current_folder)[0]) + current_scanfile = os.listdir(current_scanfile_folder)[0] + current_path = os.path.join(current_scanfile_folder, current_scanfile) + + #date = str(int(scandate.split('-')[1])) + '/' + str(scandate.split('-')[2]) + '/' + str(scandate.split('-')[0]) + #try: + + #current_age = float(current_datasetinfo[current_datasetinfo['Study Date'] == scandate]['Age'].values[0]) + time_diff = (current_datasetinfo['Study Date'] - pd.to_datetime(scandate)) / pd.Timedelta(1, 'D') + current_age = float(current_datasetinfo[time_diff.abs()<=1]['Age'].values[0]) + #except: + # print('a') + #try: + current_group = current_datasetinfo[time_diff.abs()<=1]['Research Group'].values[0] + current_group = float(transform_group_to_num(current_group)) + #except: + # print('a') + + #datetime.strftime(format) + list_data.append({'ID': ith_subject, + 'age': current_age, + "edu": edu_level, + 'group': current_group, + "scan": current_scanfile, + "path": current_path + }) + + print("Done") + return list_subjects, dict_subj_scans, list_data + + +if __name__ == "__main__": + + path_adni_dataset = "/home/jyn/NAISR/examples/hippocampus/3dshape.csv" + root_adni_dataset = "/playpen-raid/jyn/NAISR/NAISR/examples/hippocampus/ADNI" + ROOTPATH_DEMMOG = "/home/jyn/NAISR/examples/hippocampus/PTDEMOG_18Aug2023.csv" # "/home/jyn/NAISR/examples/hippocampus/PTDEMOG_18Aug2023.csv" + ROOTPATH_DATASET_INFO = "/home/jyn/NAISR/examples/hippocampus/DATASET_INFO.csv" + + df_dataset = pd.read_csv(path_adni_dataset) + np.unique(list(df_dataset['PID'].values)) + list_subjects, dict_subj_scans, list_data = extract_patient_convariates(root_adni_dataset) + train_split, test_split = train_test_split(list_subjects, train_size=0.8) + + + list_train = [] + list_test = [] + list_train_single = [] + list_train_multiple = [] + list_test_single = [] + list_test_multiple = [] + + for i_train in train_split: + list_current_scans = list(df_dataset[df_dataset['PID']==i_train]['ID'].values) #dict_subj_scans[i_train] + list_train += list_current_scans + if len(list_current_scans) == 1: + list_train_single += list_current_scans + elif len(list_current_scans) > 1: + list_train_multiple.append({"name": i_train, "value": list_current_scans}) + + for i_test in test_split: + list_current_scans = list(df_dataset[df_dataset['PID']==i_test]['ID'].values) #dict_subj_scans[i_test] + list_test += list_current_scans + if len(list_current_scans) == 1: + list_test_single += list_current_scans + elif len(list_current_scans) > 1: + list_test_multiple.append({"name": i_test, "value":list_current_scans}) + + dict_split = {'train': list_train, + 'test': list_test, + 'train_single': list_train_single, + 'train_multiple': list_train_multiple, + 'test_single': list_test_single, + 'test_multiple': list_test_multiple + } + + savepath = '/home/jyn/NAISR/examples/hippocampus/newsplit.yaml' + with open(savepath, 'w') as f: + yaml.dump(dict_split, f, default_flow_style=False, sort_keys=False) + + dict_timeline = {} + for ith_case in dict_split['train_multiple'] + dict_split['test_multiple']: + dict_timeline[str(ith_case['name'])] = ith_case['value'] + + savepath = '/home/jyn/NAISR/examples/pediatric_airway/timeline_patients.yaml' + with open(savepath, 'w') as f: + yaml.dump(dict_timeline, f, default_flow_style=False, sort_keys=False) + + print('finished') + + diff --git a/publicdata/demo_starman.py b/publicdata/demo_starman.py new file mode 100644 index 0000000..9a5120e --- /dev/null +++ b/publicdata/demo_starman.py @@ -0,0 +1,172 @@ +import pyvista as pv +import numpy as np +from pyvista import examples +import argparse + +from mesh_to_sdf import sample_sdf_near_surface + +import trimesh +import pyrender +import numpy as np +import SimpleITK as sitk +import trimesh +import mesh_to_sdf + +def Gaussian(x,z,sigma=0.5): + return np.exp((-(np.linalg.norm(x-z, axis=1)**2))/(2*sigma**2)) + + +def create_deformation(points, control_point): + deformed = Gaussian(points, control_point)[:, None] @ UP_VECTOR[None, :] #+ points + return deformed + +def apply_deformation(points, deformation, covariate_value=1.): + return points + deformation * covariate_value + + +def compute_normals(arr_contour): + arr_contour_pts = arr_contour.copy() + arr_contour_pts = arr_contour_pts[:, [0, 1]] + normals = [] + for i in range(len(arr_contour_pts)): + x1, y1 = arr_contour_pts[i] + x2, y2 = arr_contour_pts[(i+1) % len(arr_contour_pts)] # wrap around for the last point + dx = x2 - x1 + dy = y2 - y1 + normals.append((-dy, dx)) + arr_normals = np.array(normals) / np.linalg.norm(normals, axis=-1, keepdims=1) + arr_normals = np.concatenate((arr_normals, np.zeros_like(arr_normals[:, [0]])), axis=-1) + return arr_normals + + +def generate_off_surface_points(number_of_points=250000): + x = (np.random.rand(number_of_points) - 0.5) * 6 * 60 + y = (np.random.rand(number_of_points) - 0.5) * 6 * 60 + z = (np.random.rand(number_of_points) - 0.5) * 6 * 60 + + coords = np.concatenate((x[:, None], y[:, None], z[:, None]), axis=-1) + return coords + + +def apply_registration_to_surface(surf, R, T): + new_verts = T + np.matmul(R, np.array(surf.vertices).T) + new_surf = trimesh.Trimesh(vertices=new_verts.T, faces=surf.faces) + return new_surf + +def get_centroids(arr_bbx): + xmin, xmax, ymin, ymax, zmin, zmax = arr_bbx + arr_center = np.array([(xmin+xmax)/2, (ymin+ymax)/2, (zmin+zmax)/2]) + return arr_center + + + +def calculate_sdf_from_mesh(surf_mesh): + import trimesh + # get centered mesh: mesh + mesh = surf_mesh + centroid = get_centroids(mesh.bounds) + vertices = mesh.points - centroid + distances = np.max(np.linalg.norm(vertices, axis=1)) + + # get sdf field, the mesh is normalized automatically + points, sdf = sample_sdf_near_surface(surf_mesh, number_of_points=250000, sign_method='depth') + # get off-surface points + off_surf_points = generate_off_surface_points(number_of_points=250000) + centroid + off_surf_sdf = mesh_to_sdf.mesh_to_sdf(mesh, off_surf_points, surface_point_method='scan', sign_method='depth', bounding_radius=None, scan_count=100, scan_resolution=1000, sample_point_count=10000000, normal_sample_count=11) + off_surf_points = off_surf_points[off_surf_sdf > 0 * 0.1] + off_surf_sdf = off_surf_sdf[off_surf_sdf > 0 * 0.1] + # denormalize + points = points * distances + centroid + sdf *= distances + # + points = np.concatenate((points, off_surf_points), axis=-2) + sdf = np.concatenate((sdf[:, None], off_surf_sdf[:, None]), axis=0) + + npz_sdf = np.concatenate((points, sdf), axis=-1) + #np.save(savepath, npz_sdf) + return npz_sdf + + + + +UP_VECTOR = np.array([0., 1., 0.]) + + +a = np.array([0., 1., 0.]) + + +path_starman_vtk = "/Users/jyn/jyn/research/projects/NAISR/NAISR/publicdata/deformetrica/examples/longitudinal_atlas/landmark/2d/starmen_for_simulation/data_ground_truth/ForSimulation__Template__GroundTruth.vtk" +path_control_points = "/Users/jyn/jyn/research/projects/NAISR/NAISR/publicdata/deformetrica/examples/longitudinal_atlas/landmark/2d/starmen_for_simulation/data_ground_truth/ForSimulation__ControlPoints__GoundTruth.txt" + # "/Users/jyn/jyn/research/projects/NAISR/publicdata/deformetrica/examples/longitudinal_atlas/landmark/2d/starmen/data/ForInitialization__ControlPoints__FromLongitudinalAtlas.txt" + +mesh = pv.read(path_starman_vtk) +#mesh.points = mesh.points[:, [0,1]] + +control_points = np.loadtxt(path_control_points) +arr_cp = np.concatenate((control_points, np.zeros_like(control_points[:, [0]])), axis=1) + + +chart = pv.Chart2D() +x = mesh.points[:, 0] +y = mesh.points[:, 1] +_ = chart.line(x, y, "y", 4) +#chart.show() + + + + +pl = pv.Plotter() +pl.add_mesh(mesh) +pl.add_points(arr_cp[[2]], color='r') + +# 1. +arr_deformed = apply_deformation(points=mesh.points, + deformation=create_deformation(mesh.points, control_point=arr_cp[2]), + covariate_value=1.) +deformed_mesh = mesh.copy() +deformed_mesh.points = arr_deformed +deformed_mesh.normals = compute_normals(deformed_mesh.points) +import trimesh +#a=calculate_sdf_from_mesh(deformed_mesh) + +#deformed_mesh.save('./demo.vtk') +pl.add_mesh(deformed_mesh, color='y') + +# 0.1 +arr_deformed = apply_deformation(points=mesh.points, + deformation=create_deformation(mesh.points, control_point=arr_cp[0]), + covariate_value=0.1) +deformed_mesh1 = mesh.copy() +deformed_mesh1.points = arr_deformed +pl.add_mesh(deformed_mesh1, color='g') + + +# 0.5 +arr_deformed = apply_deformation(points=mesh.points, + deformation=create_deformation(mesh.points, control_point=arr_cp[0]), + covariate_value=0.5) +deformed_mesh2 = mesh.copy() +deformed_mesh2.points = arr_deformed +pl.add_mesh(deformed_mesh2, color='b') + +# 0.1 +arr_deformed = apply_deformation(points=mesh.points, + deformation=create_deformation(mesh.points, control_point=arr_cp[0]), + covariate_value=-0.1) +deformed_mesh3 = mesh.copy() +deformed_mesh3.points = arr_deformed +pl.add_mesh(deformed_mesh3, color='cyan') + + +# 0.5 +arr_deformed = apply_deformation(points=mesh.points, + deformation=create_deformation(mesh.points, control_point=arr_cp[0]), + covariate_value=-0.5) +deformed_mesh4 = mesh.copy() +deformed_mesh4.points = arr_deformed +pl.add_mesh(deformed_mesh4, color='purple') + + + +pl.view_xy() +pl.show() \ No newline at end of file diff --git a/publicdata/demo_vis_starman.py b/publicdata/demo_vis_starman.py new file mode 100644 index 0000000..2f114fa --- /dev/null +++ b/publicdata/demo_vis_starman.py @@ -0,0 +1,37 @@ +import numpy as np +import matplotlib.pyplot as plt + +from skimage import measure + + +# Construct some test data +x, y = np.ogrid[-np.pi:np.pi:100j, -np.pi:np.pi:100j] +r = np.sin(np.exp(np.sin(x)**3 + np.cos(y)**2)) + +# Find contours at a constant value of 0.8 +contours = measure.find_contours(r, 0.5) + +# Display the image and plot all contours found +fig, ax = plt.subplots() +ax.imshow(r, cmap=plt.cm.gray) + +for contour in contours: + ax.plot(contour[:, 1], contour[:, 0], linewidth=2) + +ax.axis('image') +ax.set_xticks([]) +ax.set_yticks([]) +plt.show() + +import os +import numpy as np +import matplotlib.pyplot as plt + +datapath = "/playpen-raid/jyn/NAISR/NAISR/examples/starman/train/2dsdf/0000_0_off.npy" +np_sdf = np.load(datapath) + + +plt.scatter(x=np_sdf[:, 0], y=np_sdf[:, 1], cmap='coolwarm', c=np_sdf[:, 2], s=1) +plt.colorbar() +plt.show() + diff --git a/publicdata/generate_shapes_and_sdfs.py b/publicdata/generate_shapes_and_sdfs.py new file mode 100644 index 0000000..29d970c --- /dev/null +++ b/publicdata/generate_shapes_and_sdfs.py @@ -0,0 +1,127 @@ +import os +import numpy as np +import pandas as pd +import math +import pyvista as pv +import trimesh +from utils_3d import * +import skimage.measure as measure +import datetime + + +def read_origin_spacing(path): + img = sitk.ReadImage(path) + arr = sitk.GetArrayFromImage(img).transpose(2, 1, 0) + arr_rst = np.zeros_like(arr) + if (np.unique(arr) != np.array([0, 20, 21], dtype=np.int16)).sum(): + arr = arr / 256 + print('Inconsistent label value: ' + path) + arr_rst[arr == 20] = 20 + Origin = np.array(img.GetOrigin()) + Spacing = np.array(img.GetSpacing()) + return arr_rst, Origin, Spacing + + +def reverse_coords(arr): + rev = [] + for x, y, z in arr: + rev.append([z, y, x]) + return np.array(rev) + + +def read_and_align_mesh(path_source, path_target): + mesh_target = surface_construction(path_target) + mesh_source = surface_construction(path_source) + + _, aligned_vertices, _ = trimesh.registration.icp(mesh_source.vertices, mesh_target.vertices, scale=False) + mesh_aligned = mesh_source.copy() + mesh_aligned.vertices = aligned_vertices + + # scene = pyrender.Scene() + # cloud = pyrender.Mesh.from_points(np.array(mesh_source.vertices), colors=(0,0,0.5)) + # scene.add(cloud) + # cloud = pyrender.Mesh.from_points(mesh_target.vertices, colors= (0,0.5,0)) + # scene.add(cloud) + # cloud = pyrender.Mesh.from_points(mesh_aligned.vertices, colors= (0.5,0,0)) + # scene.add(cloud) + # viewer = pyrender.Viewer(scene, use_raymond_lighting=True, point_size=2) + # # viewer.save_gif(savepath) + # viewer.close() + return mesh_aligned, mesh_source + + + + +def surface_construction(pathseg): + # unpacking + arr_airway, Origin, Spacing = read_origin_spacing(pathseg) + # polydata of airway surface segments + verts, faces, norm, val = \ + measure.marching_cubes_lewiner(arr_airway, + spacing=Spacing, + allow_degenerate=True) + current_airway = pv.PolyData() + current_airway.points = verts #(verts + Origin) #* np.array([-1, -1, 1]) # + np.array([-35, 0, 0]) + current_airway.faces = np.hstack((np.ones((faces.shape[0], 1)) * 3, faces)).ravel().astype('int') + airway_surface = current_airway#.extract_surface()#.smooth(n_iter=200) + + + + mesh = trimesh.Trimesh(vertices=airway_surface.points, faces=faces, ) + vertices = mesh.vertices - mesh.bounding_box.centroid + mesh.vertices = vertices + return mesh + + +def generate_shape_and_sdf_for_a_seg(path_seg, root_dataset, path_target): + + + # get id + scan_id = path_seg.split('/')[-1][0:-4] + + # get savepath + savepath_on_npy = os.path.join(root_dataset, '3dshape', scan_id.split('.')[0] + '_on.npy') + savepath_off_npy = os.path.join(root_dataset, '3dsdf', scan_id.split('.')[0] + '_off.npy') + savepath_on_pv_aligned = os.path.join(root_dataset, '3dvis', scan_id.split('.')[0] + '_on_aligned.stl') + savepath_on_pv_source = os.path.join(root_dataset, '3dvis', scan_id.split('.')[0] + '_on_source.stl') + # if os.path.exists(savepath_on_pv) and os.path.exists(savepath_on_npy) and os.path.exists(savepath_off_npy): + # return + #path_examples = "/Users/jyn/jyn/research/projects/NAISR/NAISR/publicdata/deformetrica/examples/longitudinal_atlas/landmark/3d/hippocampi/data/sub-ADNI002S0729_ses-M00.vtk" + + # off surface points + #mesh = surface_construction(path_seg) + mesh, mesh_source = read_and_align_mesh(path_seg, path_target) + # on surface points + mesh.export(savepath_on_pv_aligned) + mesh_source.export(savepath_on_pv_source) + + del mesh_source + + # off surface points + arr_off = calculate_sdf_from_mesh(mesh) + + np.save(savepath_on_npy, np.concatenate((np.array(mesh.vertices), np.array(mesh.vertex_normals)), axis=-1)) + np.save(savepath_off_npy, arr_off) + + #del arr_off, mesh, pv_data + + return + +def generate_volume_for_a_seg(path_seg, path_target): + # off surface points + mesh = read_and_align_mesh(path_seg, path_target) + + return + +if __name__ == "__main__": + parser = argparse.ArgumentParser( + "Given some groups (can be scans for one subject), this script generates their airway min-max plot") + parser.add_argument("--path_seg", type=str, required=True) + parser.add_argument("--root_dataset", type=str, required=True) + parser.add_argument("--path_target", type=str, required=True) + args = parser.parse_args() + + root_dataset = args.root_dataset + path_seg = args.path_seg + path_target = args.path_target + generate_shape_and_sdf_for_a_seg(path_seg, root_dataset, path_target) \ No newline at end of file diff --git a/publicdata/run_generation.py b/publicdata/run_generation.py new file mode 100644 index 0000000..6ba688b --- /dev/null +++ b/publicdata/run_generation.py @@ -0,0 +1,132 @@ + +from utils_2d import * +from deformation import * +import pandas as pd +from tqdm import trange +if __name__ == "__main__": + np.random.seed(980) + ''' + train + ''' + # define the dataset + NUM_OF_PATIENTS = 1000 + + # + path_starman_vtk = "/home/jyn/NAISR/publicdata/deformetrica/examples/longitudinal_atlas/landmark/2d/starmen_for_simulation/data_ground_truth/ForSimulation__Template__GroundTruth.vtk" + path_control_points = "/home/jyn/NAISR/publicdata/deformetrica/examples/longitudinal_atlas/landmark/2d/starmen_for_simulation/data_ground_truth/ForSimulation__ControlPoints__GoundTruth.txt" + # "/Users/jyn/jyn/research/projects/NAISR/publicdata/deformetrica/examples/longitudinal_atlas/landmark/2d/starmen/data/ForInitialization__ControlPoints__FromLongitudinalAtlas.txt" + + pv_template = pv.read(path_starman_vtk) + arr_control_pts = np.loadtxt(path_control_points) + + arr_resampled_pts = resample_polygon(pv_template.points, n_points=1000) + arr_resampled_lines = np.concatenate((np.ones(len(arr_resampled_pts)-1)[:, None]*2, + np.arange(len(arr_resampled_pts)-1)[:, None], + np.arange(len(arr_resampled_pts)-1)[:, None]+1), axis=-1) + arr_resampled_lines = list(arr_resampled_lines) + arr_resampled_lines.append(np.array([2, 999, 0])) + arr_resampled_lines = np.array(arr_resampled_lines).ravel() + + + pv_template.points = np.concatenate((arr_resampled_pts, np.zeros_like(arr_resampled_pts[:, [0]])), axis=-1) + pv_template.lines = arr_resampled_lines.astype('int') + pv_template.save("/home/jyn/NAISR/examples/starman/template.vtk") + np.save("/home/jyn/NAISR/examples/starman/control_template.npy", arr_control_pts) + + + rootpath = '/playpen-raid/jyn/NAISR/NAISR/examples/starman/train/' #"/home/jyn/NAISR/publicdata/starman" + if not os.path.exists(rootpath): + os.mkdir(rootpath) + + list_data = [] + for ith_subject in trange(NUM_OF_PATIENTS): + # generate longitudinal templates + num_of_observations = np.random.randint(1, 10) + + # + PID ='{0:04}'.format(ith_subject) + arr_deformed_contour, arr_deformed_controls = deform_polygon_randomly(pv_template.points[:, [0, 1]], arr_control_pts[:, [0, 1]]) + save_personalized_template(pv_template, arr_deformed_contour, rootpath, PID) + save_personalized_template_contour_pts( arr_deformed_contour, rootpath, PID) + save_personalized_template_control_pts(arr_deformed_controls, rootpath, PID) + + pv_deformed_contour = pv_template.copy() + arr_deformed_contour = np.concatenate((arr_deformed_contour, np.zeros_like(arr_deformed_contour[:, [0]])), axis=-1) + #arr_deformed_controls = np.concatenate((arr_deformed_controls, np.zeros_like(arr_deformed_controls[:, [0]])), axis=-1) + + pv_deformed_contour.points = arr_deformed_contour + list_data_of_current_subj = generate_dataset_for_one_subject(PID, + pv_deformed_contour, + arr_deformed_controls, + num_of_shapes=num_of_observations, + num_of_covariates=2, + rootoath=rootpath) + list_data+=list_data_of_current_subj + + # save the data set info to a csv + savepath_dataset = "/home/jyn/NAISR/examples/starman/2dshape_train.csv" + pd.DataFrame.from_records(list_data).to_csv(savepath_dataset) + + + ''' + test + ''' + + # define the dataset + NUM_OF_PATIENTS = 1000 + + # + path_starman_vtk = "/home/jyn/NAISR/publicdata/deformetrica/examples/longitudinal_atlas/landmark/2d/starmen_for_simulation/data_ground_truth/ForSimulation__Template__GroundTruth.vtk" + path_control_points = "/home/jyn/NAISR/publicdata/deformetrica/examples/longitudinal_atlas/landmark/2d/starmen_for_simulation/data_ground_truth/ForSimulation__ControlPoints__GoundTruth.txt" + # "/Users/jyn/jyn/research/projects/NAISR/publicdata/deformetrica/examples/longitudinal_atlas/landmark/2d/starmen/data/ForInitialization__ControlPoints__FromLongitudinalAtlas.txt" + + pv_template = pv.read(path_starman_vtk) + arr_control_pts = np.loadtxt(path_control_points) + + arr_resampled_pts = resample_polygon(pv_template.points, n_points=1000) + arr_resampled_lines = np.concatenate((np.ones(len(arr_resampled_pts)-1)[:, None]*2, + np.arange(len(arr_resampled_pts)-1)[:, None], + np.arange(len(arr_resampled_pts)-1)[:, None]+1), axis=-1) + arr_resampled_lines = list(arr_resampled_lines) + arr_resampled_lines.append(np.array([2, 999, 0])) + arr_resampled_lines = np.array(arr_resampled_lines).ravel() + + + pv_template.points = np.concatenate((arr_resampled_pts, np.zeros_like(arr_resampled_pts[:, [0]])), axis=-1) + pv_template.lines = arr_resampled_lines.astype('int') + pv_template.save("/home/jyn/NAISR/examples/starman/template.vtk") + np.save("/home/jyn/NAISR/examples/starman/control_template.npy", arr_control_pts) + + rootpath = '/playpen-raid/jyn/NAISR/NAISR/examples/starman/test/' #"/home/jyn/NAISR/publicdata/starman" + if not os.path.exists(rootpath): + os.mkdir(rootpath) + + list_data = [] + for ith_subject in trange(NUM_OF_PATIENTS): + # generate longitudinal templates + num_of_observations = np.random.randint(1, 10) + + # + PID ='{0:04}'.format(ith_subject) + arr_deformed_contour, arr_deformed_controls = deform_polygon_randomly(pv_template.points[:, [0, 1]], arr_control_pts[:, [0, 1]]) + save_personalized_template(pv_template, arr_deformed_contour, rootpath, PID) + save_personalized_template_contour_pts(arr_deformed_contour, rootpath, PID) + save_personalized_template_control_pts(arr_deformed_controls, rootpath, PID) + + + pv_deformed_contour = pv_template.copy() + arr_deformed_contour = np.concatenate((arr_deformed_contour, np.zeros_like(arr_deformed_contour[:, [0]])), axis=-1) + #arr_deformed_controls = np.concatenate((arr_deformed_controls, np.zeros_like(arr_deformed_controls[:, [0]])), axis=-1) + + pv_deformed_contour.points = arr_deformed_contour + list_data_of_current_subj = generate_dataset_for_one_subject(PID, + pv_deformed_contour, + arr_deformed_controls, + num_of_shapes=num_of_observations, + num_of_covariates=4, + rootoath=rootpath) + list_data+=list_data_of_current_subj + + # save the data set info to a csv + savepath_dataset = "/home/jyn/NAISR/examples/starman/2dshape_test.csv" + pd.DataFrame.from_records(list_data).to_csv(savepath_dataset) \ No newline at end of file diff --git a/publicdata/test_2dsdf.py b/publicdata/test_2dsdf.py new file mode 100644 index 0000000..68cdcca --- /dev/null +++ b/publicdata/test_2dsdf.py @@ -0,0 +1,86 @@ +import numpy as np +from shapely.geometry import Polygon, Point +from shapely.geometry import Point, LineString, Polygon +import matplotlib.pyplot as plt +import pyvista as pv + + + + + + +def show_shapely_polygon(polygo): + fig, ax = plt.subplots() + ax.plot(*polygon.exterior.xy, 'b-', label='Polygon') + ax.set_aspect('equal', adjustable='datalim') + ax.legend() + plt.show() + return + +def signed_distance_polygon(polygon, point): + """ + Calculate the signed distance from a point to a polygon. + + Parameters: + polygon (Polygon): Shapely Polygon object representing the polygon. + point (Point): Shapely Point object representing the point. + + Returns: + float: Signed distance from the point to the polygon. + """ + if polygon.contains(point): + return -point.distance(polygon.boundary) + else: + return point.distance(polygon.boundary) + +def extract_2d_sdf_from_contour(query_points, arr_contour_pts): + polygon = Polygon(arr_contour_pts) + #show_shapely_polygon(polygon) + # Define a point for which you want to calculate the signed distance + list_of_2dsdf = [] + for ith_query in query_points: + query_point = Point(ith_query) + # Calculate the signed distance + signed_dist = signed_distance_polygon(polygon, query_point) + # get 2dsdf + list_of_2dsdf.append(signed_dist) + return np.array(list_of_2dsdf)[:, None] + +def generate_off_surface_points(number_of_points=250000): + # the range of the data is around [-2, 2] + x = (np.random.rand(number_of_points) - 0.5) * 4 + y = (np.random.rand(number_of_points) - 0.5) * 4 + coords = np.concatenate((x[:, None], y[:, None]), axis=-1) + return coords + + + +def calculate_2dsdf_from_polygon(arr_contour_pts, number_of_points=500000): + query_points = generate_off_surface_points(number_of_points=number_of_points) + arr_sdf = extract_2d_sdf_from_contour(query_points, arr_contour_pts) + npz_sdf = np.concatenate((query_points, arr_sdf), axis=-1) + return npz_sdf + + + + +path_starman_vtk = "/Users/jyn/jyn/research/projects/NAISR/publicdata/deformetrica/examples/longitudinal_atlas/landmark/2d/starmen_for_simulation/data_ground_truth/ForSimulation__Template__GroundTruth.vtk" +path_control_points = "/Users/jyn/jyn/research/projects/NAISR/publicdata/deformetrica/examples/longitudinal_atlas/landmark/2d/starmen_for_simulation/data_ground_truth/ForSimulation__ControlPoints__GoundTruth.txt" + # "/Users/jyn/jyn/research/projects/NAISR/publicdata/deformetrica/examples/longitudinal_atlas/landmark/2d/starmen/data/ForInitialization__ControlPoints__FromLongitudinalAtlas.txt" + +mesh = pv.read(path_starman_vtk) + + +# Define the vertices of the polygon +polygon_vertices = mesh.points #[(0, 0), (2, 0), (2, 2), (1, 3), (0, 2)] + +polygon = Polygon(polygon_vertices) +show_shapely_polygon(polygon) +# Define a point for which you want to calculate the signed distance +query_point = Point(0, 0) +query_point2 = Point(polygon_vertices[0]) + +# Calculate the signed distance +signed_dist = signed_distance_polygon(polygon, query_point) +print("Signed distance:", signed_dist) + diff --git a/publicdata/utils_2d.py b/publicdata/utils_2d.py new file mode 100644 index 0000000..488c725 --- /dev/null +++ b/publicdata/utils_2d.py @@ -0,0 +1,305 @@ +import os + + +import argparse + +import numpy as np + +from shapely.geometry import Point, LineString, Polygon +import matplotlib.pyplot as plt +import pyvista as pv + +import utils +UP_VECTOR = np.array([0., 1.]) +FORWARD_VECTOR = np.array([1., 0.]) #* 0.4 + +def cond_mkdir(path): + if not os.path.exists(path): + os.makedirs(path) + +def resample_polygon(xy: np.ndarray, n_points: int = 100) -> np.ndarray: + # Cumulative Euclidean distance between successive polygon points. + # This will be the "x" for interpolation + d = np.cumsum(np.r_[0, np.sqrt((np.diff(xy, axis=0) ** 2).sum(axis=1))]) + + # get linearly spaced points along the cumulative Euclidean distance + d_sampled = np.linspace(0, d.max(), n_points) + + # interpolate x and y coordinates + xy_interp = np.c_[ + np.interp(d_sampled, d, xy[:, 0]), + np.interp(d_sampled, d, xy[:, 1]), + ] + + return xy_interp + + +def show_shapely_polygon(polygon): + fig, ax = plt.subplots() + ax.plot(*polygon.exterior.xy, 'b-', label='Polygon') + ax.set_aspect('equal', adjustable='datalim') + ax.legend() + plt.show() + return + + +def signed_distance_polygon(polygon, point): + """ + Calculate the signed distance from a point to a polygon. + + Parameters: + polygon (Polygon): Shapely Polygon object representing the polygon. + point (Point): Shapely Point object representing the point. + + Returns: + float: Signed distance from the point to the polygon. + """ + if polygon.contains(point): + return -point.distance(polygon.boundary) + else: + return point.distance(polygon.boundary) + + +def extract_2d_sdf_from_contour(query_points, arr_contour_pts): + polygon = Polygon(arr_contour_pts) + # show_shapely_polygon(polygon) + # Define a point for which you want to calculate the signed distance + list_of_2dsdf = [] + for ith_query in query_points: + query_point = Point(ith_query) + # Calculate the signed distance + signed_dist = signed_distance_polygon(polygon, query_point) + # get 2dsdf + list_of_2dsdf.append(signed_dist) + return np.array(list_of_2dsdf)[:, None] + + +def generate_off_surface_points(number_of_points=250000): + # the range of the data is around [-2, 2] + x = (np.random.rand(number_of_points) - 0.5) * 6 + y = (np.random.rand(number_of_points) - 0.5) * 6 + coords = np.concatenate((x[:, None], y[:, None]), axis=-1) + return coords + + +def calculate_2dsdf_from_polygon(arr_contour_pts, number_of_points=500000): + query_points = generate_off_surface_points(number_of_points=number_of_points) + arr_sdf = extract_2d_sdf_from_contour(query_points, arr_contour_pts) + npz_sdf = np.concatenate((query_points, arr_sdf), axis=-1) + return npz_sdf + + + + +def Gaussian(x, z, sigma=0.5): + return np.exp((-(np.linalg.norm(x - z, axis=1) ** 2)) / (2 * sigma ** 2)) + + +def create_deformation(points, control_point, VECTOR): + deformed = Gaussian(points, control_point)[:, None] @ VECTOR[None, :] # + points + return deformed + + +def apply_deformation(points, deformation, covariate_value=1.): + return points + deformation * covariate_value + + +def compute_normals(arr_contour): + arr_contour_pts = arr_contour.copy() + arr_contour_pts = arr_contour_pts[:, [0, 1]] + normals = [] + for i in range(len(arr_contour_pts)): + x1, y1 = arr_contour_pts[i] + x2, y2 = arr_contour_pts[(i + 1) % len(arr_contour_pts)] # wrap around for the last point + dx = x2 - x1 + dy = y2 - y1 + normals.append((-dy, dx)) + arr_normals = np.array(normals) / np.linalg.norm(normals, axis=-1, keepdims=1) + #arr_normals = np.concatenate((arr_normals, np.zeros_like(arr_normals[:, [0]])), axis=-1) + return arr_normals + + +def sampling_the_covariate_space(num_of_shapes, num_of_covariates=4): + + list_of_covariates = [] + for i in range(num_of_shapes): + list_of_covariates.append(np.random.rand(num_of_covariates)) + arr_covariates = 2 * (np.array(list_of_covariates) - 0.5) + return arr_covariates + + +def generate_shape_conditioned_on_covariates(arr_points, covariates, arr_control_pts): + covariates = [covariates[0], covariates[0], covariates[1], covariates[1]] + arr_deformed = arr_points.copy() + #arr_deformed = polydata.points.copy() + list_of_VECTORS = [UP_VECTOR, UP_VECTOR, FORWARD_VECTOR * (-1), FORWARD_VECTOR] + for ith_cov in range(len(covariates)): + if ith_cov < 2: + ith_control_pts = arr_control_pts[ith_cov] + arr_deformed = apply_deformation(points=arr_deformed, + deformation= create_deformation(arr_points, control_point=ith_control_pts, VECTOR=list_of_VECTORS[ith_cov]), + covariate_value=covariates[ith_cov]) + else: + ith_control_pts = arr_control_pts[ith_cov] + arr_deformed = apply_deformation(points=arr_deformed, + deformation= create_deformation(arr_points, control_point=ith_control_pts, VECTOR=list_of_VECTORS[ith_cov]), + covariate_value=covariates[ith_cov] + 1) + return arr_deformed + + + +def generate_dataset_for_one_subject(PID, + pv_contour, + arr_control_pts, + num_of_shapes, + num_of_covariates=2, + rootoath='/playpen-raid/jyn/NAISR/NAISR/examples/starmen/'): + + arr_contour_pts = pv_contour.points[:, [0,1]] + + # get path + path_2d_shape = os.path.join(rootoath, '2dshape') + path_2d_sdf = os.path.join(rootoath, '2dsdf') + cond_mkdir(path_2d_shape) + cond_mkdir(path_2d_sdf) + + + # sample covariates uniformly + arr_covariates = sampling_the_covariate_space(num_of_shapes, num_of_covariates) + + list_current_data = [] + for i in range(len(arr_covariates)): + # 2d shape, which are the on-surface points, with normal vectors + arr_deformed_points = generate_shape_conditioned_on_covariates(arr_contour_pts, arr_covariates[i], arr_control_pts) + arr_deformed_normals = compute_normals(arr_deformed_points) + + arr_on = np.concatenate((arr_deformed_points, arr_deformed_normals), axis=-1) + + # 2d sdf, which are points with sdf + arr_off = calculate_2dsdf_from_polygon(arr_deformed_points, number_of_points=5000) + + current_id = str(PID) + '_' + str(i) #str(arr_covariates[i][0]) + '_' + str(arr_covariates[i][1]) + '_' + str(arr_covariates[i][2]) + '_' + str(arr_covariates[i][3]) + # get savepath + filename_on_npy = current_id + '_on.npy' + filename_off_npy = current_id + '_off.npy' + + savepath_on_npy = os.path.join(path_2d_shape, filename_on_npy) + savepath_off_npy = os.path.join(path_2d_sdf, filename_off_npy) + + np.save(savepath_on_npy, arr_on) + np.save(savepath_off_npy, arr_off) + + # save visualize + filename_on_vtk = current_id + '_on.vtk' + + savepath_on_vis = os.path.join(path_2d_shape, filename_on_vtk) + pv_deformed = pv_contour.copy() + pv_deformed.points = np.concatenate((arr_on[:, [0,1]], np.zeros_like(arr_on[:, [0]])), axis=-1) + pv_deformed.save(savepath_on_vis) + + # + list_current_data.append({"id": current_id, + "PID": PID, + "cov_1": arr_covariates[i][0], + "cov_2": arr_covariates[i][1], + "2dsdf":savepath_off_npy, + "2dshape": savepath_on_npy}) + + return list_current_data + + +def save_personalized_template(pv_template, new_verts, rootpath, PID): + pv_new_template = pv_template.copy() + pv_new_template.points = np.concatenate((new_verts, np.zeros_like(new_verts[:, [0]])), axis=-1) + dir_template = os.path.join(rootpath, 'template') + cond_mkdir(dir_template) + savepath = os.path.join(dir_template, str(PID) + "_template.vtk") + pv_new_template.save(savepath) + return + +def save_personalized_template_contour_pts(new_verts, rootpath, PID): + #savepath = os.path.join(rootpath, 'template', str(PID) + "_template_contour_pts.npy") + dir_template = os.path.join(rootpath, 'template') + cond_mkdir(dir_template) + savepath = os.path.join(dir_template, str(PID) + "_template_contour_pts.npy") + np.save(savepath, new_verts) + return + +def save_personalized_template_control_pts(new_verts, rootpath, PID): + + #pv_new_template = pv_template.copy() + #pv_new_template.points = np.concatenate((new_verts, np.zeros_like(new_verts[:, [0]])), axis=-1) + dir_template = os.path.join(rootpath, 'template') + cond_mkdir(dir_template) + savepath = os.path.join(dir_template, str(PID) + "_template_control_pts.npy") + np.save(savepath, new_verts) + return + +if __name__ == "__main__": + path_starman_vtk = "/Users/jyn/jyn/research/projects/NAISR/publicdata/deformetrica/examples/longitudinal_atlas/landmark/2d/starmen_for_simulation/data_ground_truth/ForSimulation__Template__GroundTruth.vtk" + path_control_points = "/Users/jyn/jyn/research/projects/NAISR/publicdata/deformetrica/examples/longitudinal_atlas/landmark/2d/starmen_for_simulation/data_ground_truth/ForSimulation__ControlPoints__GoundTruth.txt" + # "/Users/jyn/jyn/research/projects/NAISR/publicdata/deformetrica/examples/longitudinal_atlas/landmark/2d/starmen/data/ForInitialization__ControlPoints__FromLongitudinalAtlas.txt" + + mesh = pv.read(path_starman_vtk) + # mesh.points = mesh.points[:, [0,1]] + control_points = np.loadtxt(path_control_points) + arr_cp = np.concatenate((control_points, np.zeros_like(control_points[:, [0]])), axis=1) + + chart = pv.Chart2D() + x = mesh.points[:, 0] + y = mesh.points[:, 1] + _ = chart.line(x, y, "y", 4) + # chart.show() + + + pl = pv.Plotter() + pl.add_mesh(mesh) + pl.add_points(arr_cp[[0]], color='r') + + # 1. + arr_deformed = apply_deformation(points=mesh.points, + deformation=create_deformation(mesh.points, control_point=arr_cp[0]), + covariate_value=1.) + deformed_mesh = mesh.copy() + deformed_mesh.points = arr_deformed + deformed_mesh.normals = compute_normals(deformed_mesh.points) + deformed_mesh.save('./demo.vtk') + pl.add_mesh(deformed_mesh, color='y') + pl.add_arrows(deformed_mesh.points, deformed_mesh.normals ) + + # 0.1 + arr_deformed = apply_deformation(points=mesh.points, + deformation=create_deformation(mesh.points, control_point=arr_cp[0]), + covariate_value=0.1) + deformed_mesh1 = mesh.copy() + deformed_mesh1.points = arr_deformed + pl.add_mesh(deformed_mesh1, color='g') + + # 0.5 + arr_deformed = apply_deformation(points=mesh.points, + deformation=create_deformation(mesh.points, control_point=arr_cp[0]), + covariate_value=0.5) + deformed_mesh2 = mesh.copy() + deformed_mesh2.points = arr_deformed + pl.add_mesh(deformed_mesh2, color='b') + + # 0.1 + arr_deformed = apply_deformation(points=mesh.points, + deformation=create_deformation(mesh.points, control_point=arr_cp[0]), + covariate_value=-0.1) + deformed_mesh3 = mesh.copy() + deformed_mesh3.points = arr_deformed + pl.add_mesh(deformed_mesh3, color='cyan') + + # 0.5 + arr_deformed = apply_deformation(points=mesh.points, + deformation=create_deformation(mesh.points, control_point=arr_cp[0]), + covariate_value=-0.5) + deformed_mesh4 = mesh.copy() + deformed_mesh4.points = arr_deformed + pl.add_mesh(deformed_mesh4, color='purple') + + pl.view_xy() + pl.show() + + diff --git a/publicdata/utils_3d.py b/publicdata/utils_3d.py new file mode 100644 index 0000000..e67b31e --- /dev/null +++ b/publicdata/utils_3d.py @@ -0,0 +1,62 @@ +import argparse + +from mesh_to_sdf import sample_sdf_near_surface + +import trimesh +import pyrender +import numpy as np +import SimpleITK as sitk +import trimesh +import mesh_to_sdf + +def calculate_sdf_from_mesh(surf_mesh): + # get centered mesh: mesh + mesh = surf_mesh.copy() + centroid = surf_mesh.bounding_box.centroid + vertices = surf_mesh.vertices - surf_mesh.bounding_box.centroid + distances = np.max(np.linalg.norm(vertices, axis=1)) + + # get sdf field, the mesh is normalized automatically + points, sdf = sample_sdf_near_surface(surf_mesh, number_of_points=250000, sign_method='depth') + # get off-surface points + off_surf_points = generate_off_surface_points(number_of_points=250000) + centroid + off_surf_sdf = mesh_to_sdf.mesh_to_sdf(mesh, off_surf_points, surface_point_method='scan', sign_method='depth', bounding_radius=None, scan_count=100, scan_resolution=1000, sample_point_count=10000000, normal_sample_count=11) + off_surf_points = off_surf_points[off_surf_sdf > 0 * 0.1] + off_surf_sdf = off_surf_sdf[off_surf_sdf > 0 * 0.1] + # denormalize + points = points * distances + centroid + sdf *= distances + # + points = np.concatenate((points, off_surf_points), axis=-2) + sdf = np.concatenate((sdf[:, None], off_surf_sdf[:, None]), axis=0) + + + ''' + colors = np.zeros(points.shape) + colors[sdf[:, 0] < 0, 2] = 1 + colors[sdf[:, 0] > 6, 1] = 1 + + cloud = pyrender.Mesh.from_points(points, colors=colors) + scene = pyrender.Scene() + scene.add(cloud) + viewer = pyrender.Viewer(scene, use_raymond_lighting=True, point_size=2) + #viewer.save_gif(savepath) + viewer.close() + ''' + npz_sdf = np.concatenate((points, sdf), axis=-1) + #np.save(savepath, npz_sdf) + del points, sdf, off_surf_points, off_surf_sdf + return npz_sdf + + + +def generate_off_surface_points(number_of_points=250000): + + # numbers set according to the hippocampus dataset + x = (np.random.rand(number_of_points) - 0.5) * 60 + y = (np.random.rand(number_of_points) - 0.5) * 60 + z = (np.random.rand(number_of_points) - 0.5) * 60 + + coords = np.concatenate((x[:, None], y[:, None], z[:, None]), axis=-1) + return coords + diff --git a/visualizer.py b/visualizer.py index b403183..521ef80 100755 --- a/visualizer.py +++ b/visualizer.py @@ -16,7 +16,6 @@ from naisr import diff_operators import pickle -import pymeshfix as pmf import pandas as pd