Commit 2258bedd authored by Philipp Schubert's avatar Philipp Schubert
Browse files

removed version restriction of pyopengl-accelerate >=3.1.5 as it does not exist yet in conda

parent 94a3cb0d
Pipeline #68546 passed with stages
in 22 minutes and 4 seconds
......@@ -13,7 +13,8 @@ Refactored version of SyConn for automated synaptic connectivity inference based
Current features:
- introduction of classes for handling of supervoxels (e.g. cell fragments, predicted cellular
organelles like mitochondria, vesicle clouds etc.) and agglomerated supervoxels
- prediction of subcellular structures, supervoxel extraction and mesh generation
- prediction of sub-cellular structures, supervoxel extraction and mesh
generation
- (sub-) cellular compartment (spines, bouton and axon/dendrite/soma) and cell type classification with multiview- [\[2\]](https://www.nature.com/articles/s41467-019-10836-3) and with skeleton-based approaches [\[1\]](https://www.nature.com/articles/nmeth.4206)
- glia identification and separation [\[2\]](https://www.nature.com/articles/s41467-019-10836-3)
- connectivity matrix export
......
......@@ -18,7 +18,7 @@ dependencies:
- vigra
- freeglut
- pyopengl
- pyopengl-accelerate >=3.1.5
- pyopengl-accelerate
- python-snappy
- ipython
- h5py
......
......@@ -172,7 +172,7 @@ if __name__ == '__main__':
# # START SyConn
log.info('Step 0/8 - Predicting sub-cellular structures')
# TODO: launch all inferences in parallel
# exec_dense_prediction.predict_myelin() # myelin is not needed before `run_create_neuron_ssd`
exec_dense_prediction.predict_myelin()
# TODO: if performed, work-in paths of the resulting KDs to the config
# TODO: might also require adaptions in init_cell_subcell_sds
# exec_dense_prediction.predict_cellorganelles()
......
......@@ -317,18 +317,18 @@ def map_subcell_extract_props(kd_seg_path: str, kd_organelle_paths: dict,
dir_props = f"{global_params.config.temp_path}/tmp_props/"
dir_meshes = f"{global_params.config.temp_path}/tmp_meshes/"
# # remove previous temporary results.
# if os.path.isdir(dir_props):
# if not overwrite:
# msg = f'Could not start extraction of supervoxel objects ' \
# f'because temporary files already existed at "{dir_props}" ' \
# f'and overwrite was set to False.'
# log_proc.error(msg)
# raise FileExistsError(msg)
# log.debug(f'Found existing cache folder at {dir_props}. Removing it now.')
# shutil.rmtree(dir_props)
# if os.path.isdir(dir_meshes):
# shutil.rmtree(dir_meshes)
# remove previous temporary results.
if os.path.isdir(dir_props):
if not overwrite:
msg = f'Could not start extraction of supervoxel objects ' \
f'because temporary files already existed at "{dir_props}" ' \
f'and overwrite was set to False.'
log_proc.error(msg)
raise FileExistsError(msg)
log.debug(f'Found existing cache folder at {dir_props}. Removing it now.')
shutil.rmtree(dir_props)
if os.path.isdir(dir_meshes):
shutil.rmtree(dir_meshes)
os.makedirs(dir_props)
os.makedirs(dir_meshes)
......@@ -530,15 +530,15 @@ def map_subcell_extract_props(kd_seg_path: str, kd_organelle_paths: dict,
all_times.append(time.time() - start)
step_names.append("write cell SD")
# # clear temporary files
# if global_params.config.use_new_meshing:
# shutil.rmtree(dir_meshes)
# for p in dict_paths_tmp:
# os.remove(p)
# shutil.rmtree(cd_dir, ignore_errors=True)
# shutil.rmtree(dir_props, ignore_errors=True)
# if qu.batchjob_enabled(): # remove job directory of `map_subcell_extract_props`
# shutil.rmtree(os.path.abspath(path_to_out + "/../"), ignore_errors=True)
# clear temporary files
if global_params.config.use_new_meshing:
shutil.rmtree(dir_meshes)
for p in dict_paths_tmp:
os.remove(p)
shutil.rmtree(cd_dir, ignore_errors=True)
shutil.rmtree(dir_props, ignore_errors=True)
if qu.batchjob_enabled(): # remove job directory of `map_subcell_extract_props`
shutil.rmtree(os.path.abspath(path_to_out + "/../"), ignore_errors=True)
log.debug("Time overview [map_subcell_extract_props]:")
for ii in range(len(all_times)):
......@@ -556,12 +556,7 @@ def _map_subcell_extract_props_thread(args):
kd_subcell_ps = args[3] # Dict
worker_nr = args[4]
generate_sv_mesh = args[5]
# TODO: Currently min obj size is applied to property dicts and only indirectly to the mesh
# dicts, meaning: Meshes are generated regardless of the object size and stored to file but
# only collected for the object IDs in the property dicts. -> prevent saving meshes of small
# objects in the first place.
# worker_dir_meshes = f"{global_params.config.temp_path}/tmp_meshes/meshes_{worker_nr}/"
worker_dir_meshes = f"{global_params.config.temp_path}/tmp_meshes_{worker_nr}/"
worker_dir_meshes = f"{global_params.config.temp_path}/tmp_meshes/meshes_{worker_nr}/"
os.makedirs(worker_dir_meshes, exist_ok=True)
worker_dir_props = f"{global_params.config.temp_path}/tmp_props/props_{worker_nr}/"
os.makedirs(worker_dir_props, exist_ok=True)
......@@ -868,8 +863,7 @@ def _write_props_to_sc_thread(args):
# using min. number of voxels as a lower bound for number of vertices.
for worker_nr, chunk_ids in sc_mesh_worker_dc[k].items():
for ch_id in chunk_ids:
# p = f"{global_tmp_path}/tmp_meshes/meshes_{worker_nr}/" \
p = f"{global_tmp_path}/tmp_meshes_{worker_nr}/" \
p = f"{global_tmp_path}/tmp_meshes/meshes_{worker_nr}/" \
f"{organelle}_{worker_nr}_ch{ch_id}.pkl"
with open(p, "rb") as pkl_file:
partial_mesh_dc = pkl.load(pkl_file)
......@@ -893,8 +887,7 @@ def _write_props_to_sc_thread(args):
worker_ids[worker_id].update(ch_ids)
for worker_nr, chunk_ids in worker_ids.items():
for ch_id in chunk_ids:
# p = f"{global_tmp_path}/tmp_meshes/meshes_{worker_nr}/" \
p = f"{global_tmp_path}/tmp_meshes_{worker_nr}/" \
p = f"{global_tmp_path}/tmp_meshes/meshes_{worker_nr}/" \
f"{organelle}_{worker_nr}_ch{ch_id}.pkl"
with open(p, "rb") as pkl_file:
partial_mesh_dc = pkl.load(pkl_file)
......@@ -1114,8 +1107,7 @@ def _write_props_to_sv_thread(args):
# 'caches.'.format(len(obj_keys), len(worker_ids)))
for worker_nr, chunk_ids in worker_ids.items():
for ch_id in chunk_ids:
# p = f"{global_tmp_path}/tmp_meshes/meshes_{worker_nr}/" \
p = f"{global_tmp_path}/tmp_meshes_{worker_nr}/" \
p = f"{global_tmp_path}/tmp_meshes/meshes_{worker_nr}/" \
f"sv_{worker_nr}_ch{ch_id}.pkl"
pkl_file = open(p, "rb")
partial_mesh_dc = pkl.load(pkl_file)
......
Markdown is supported
0% or .
You are about to add 0 people to the discussion. Proceed with caution.
Finish editing this message first!
Please register or to comment