Commit 08722739 authored by Benoit Martin's avatar Benoit Martin

Merge branch 'ci' into 'master'

Update ci and bugfix

Closes #24

See merge request aramislab/deformetrica!3
parents 1b43fece debad2c1
Pipeline #50174496 passed with stages
in 37 minutes and 38 seconds
...@@ -44,11 +44,12 @@ stages: ...@@ -44,11 +44,12 @@ stages:
script: script:
- id && pwd && ls -alh - id && pwd && ls -alh
- git clone https://gitlab-ci-token:${CI_JOB_TOKEN}@gitlab.icm-institute.org/aramislab/conda-recipes.git - git clone https://gitlab-ci-token:${CI_JOB_TOKEN}@gitlab.icm-institute.org/aramislab/conda-recipes.git
- mkdir conda-recipes/deformetrica/build && cd conda-recipes/deformetrica/build - mkdir deformetrica-build && cd deformetrica-build
- conda install conda-build anaconda-client && conda update conda conda-build anaconda-client - conda install conda-build anaconda-client && conda update conda conda-build anaconda-client
- export GIT_SSL_NO_VERIFY=1 - export GIT_SSL_NO_VERIFY=1
- conda-build --py 3.5 --py 3.6 -c pytorch -c anaconda -c conda-forge --output-folder . ../ - conda-build --py 3.5 --py 3.6 -c pytorch -c anaconda -c conda-forge --output-folder . ../conda-recipes/deformetrica
- anaconda --token=$ANACONDA_TOKEN upload --user Aramislab --force **/*.tar.bz2 - anaconda --token=$ANACONDA_TOKEN upload --user Aramislab --force --no-progress **/*.tar.bz2
- conda-build purge && cd .. && rm -rf deformetrica-build
######### #########
......
...@@ -4,8 +4,10 @@ All notable changes to this project will be documented in this file. ...@@ -4,8 +4,10 @@ All notable changes to this project will be documented in this file.
The format is based on [Keep a Changelog](http://keepachangelog.com/en/1.0.0/) The format is based on [Keep a Changelog](http://keepachangelog.com/en/1.0.0/)
and this project adheres to [Semantic Versioning](http://semver.org/spec/v2.0.0.html). and this project adheres to [Semantic Versioning](http://semver.org/spec/v2.0.0.html).
## [4.0.0] - 2018-06-11
## [4.0.0] - 2018-06-14
### Added ### Added
- Bugfix: version file not found. issue #24
- Easy install with `conda install -c pytorch -c conda-forge -c anaconda -c aramislab deformetrica`, without any manual compilation. - Easy install with `conda install -c pytorch -c conda-forge -c anaconda -c aramislab deformetrica`, without any manual compilation.
- All existing deformetrica functionalities now work with 2d or 3d gray level images. - All existing deformetrica functionalities now work with 2d or 3d gray level images.
- A L-BFGS optimization method can now be used for registration, regression, deterministic and bayesian atlases. - A L-BFGS optimization method can now be used for registration, regression, deterministic and bayesian atlases.
......
This diff is collapsed.
This diff is collapsed.
This diff is collapsed.
This diff is collapsed.
This diff is collapsed.
This diff is collapsed.
This diff is collapsed.
This diff is collapsed.
This diff is collapsed.
This diff is collapsed.
This diff is collapsed.
This diff is collapsed.
This diff is collapsed.
<?xml version="1.0" ?>
<data-set deformetrica-min-version="3.0.0">
<!-- <subject id="021_013">
<visit id="021_013">
<filename object_id="Aorta">/Users/alexandre.bone/Desktop/Marfan2018/1_dataset/2_decimated_meshes/021_013y_Ao.vtk</filename>
</visit>
</subject> -->
<subject id="021_015">
<visit id="021_015">
<filename object_id="Aorta">/Users/alexandre.bone/Desktop/Marfan2018/1_dataset/2_decimated_meshes/021_015y_Ao.vtk</filename>
</visit>
</subject>
<subject id="027_009">
<visit id="027_009">
<filename object_id="Aorta">/Users/alexandre.bone/Desktop/Marfan2018/1_dataset/2_decimated_meshes/027_009y_Ao.vtk</filename>
</visit>
</subject>
<subject id="027_013">
<visit id="027_013">
<filename object_id="Aorta">/Users/alexandre.bone/Desktop/Marfan2018/1_dataset/2_decimated_meshes/027_013y_Ao.vtk</filename>
</visit>
</subject>
<subject id="027_014">
<visit id="027_014">
<filename object_id="Aorta">/Users/alexandre.bone/Desktop/Marfan2018/1_dataset/2_decimated_meshes/027_014y_Ao.vtk</filename>
</visit>
</subject>
<subject id="027_015">
<visit id="027_015">
<filename object_id="Aorta">/Users/alexandre.bone/Desktop/Marfan2018/1_dataset/2_decimated_meshes/027_015y_Ao.vtk</filename>
</visit>
</subject>
<subject id="028_008">
<visit id="028_008">
<filename object_id="Aorta">/Users/alexandre.bone/Desktop/Marfan2018/1_dataset/2_decimated_meshes/028_008y_Ao.vtk</filename>
</visit>
</subject>
<subject id="028_009">
<visit id="028_009">
<filename object_id="Aorta">/Users/alexandre.bone/Desktop/Marfan2018/1_dataset/2_decimated_meshes/028_009y_Ao.vtk</filename>
</visit>
</subject>
<subject id="028_012">
<visit id="028_012">
<filename object_id="Aorta">/Users/alexandre.bone/Desktop/Marfan2018/1_dataset/2_decimated_meshes/028_012y_Ao.vtk</filename>
</visit>
</subject>
<subject id="028_013">
<visit id="028_013">
<filename object_id="Aorta">/Users/alexandre.bone/Desktop/Marfan2018/1_dataset/2_decimated_meshes/028_013y_Ao.vtk</filename>
</visit>
</subject>
<subject id="031_011">
<visit id="031_011">
<filename object_id="Aorta">/Users/alexandre.bone/Desktop/Marfan2018/1_dataset/2_decimated_meshes/031_011y_Ao.vtk</filename>
</visit>
</subject>
<subject id="031_012">
<visit id="031_012">
<filename object_id="Aorta">/Users/alexandre.bone/Desktop/Marfan2018/1_dataset/2_decimated_meshes/031_012y_Ao.vtk</filename>
</visit>
</subject>
</data-set>
<?xml version="1.0"?>
<model>
<model-type>RigidAtlas</model-type>
<dimension>3</dimension>
<template>
<object id="Aorta">
<deformable-object-type>SurfaceMesh</deformable-object-type>
<attachment-type>Current</attachment-type>
<kernel-type>torch</kernel-type>
<kernel-width>5</kernel-width>
<filename>/Users/alexandre.bone/Desktop/Marfan2018/1_dataset/2_decimated_meshes/021_011y_Ao.vtk</filename>
</object>
</template>
</model>
<?xml version="1.0"?>
<optimization-parameters>
<optimization-method-type>ScipyLBFGS</optimization-method-type>
<max-iterations>100</max-iterations>
<!-- <max-line-search-iterations>5</max-line-search-iterations> -->
<save-every-n-iters>20</save-every-n-iters>
<initial-step-size>1e-5</initial-step-size>
<convergence-tolerance>1e-6</convergence-tolerance>
<!-- <use-cuda>On</use-cuda> -->
</optimization-parameters>
...@@ -14,7 +14,7 @@ except ImportError: # for pip <= 9.0.3 ...@@ -14,7 +14,7 @@ except ImportError: # for pip <= 9.0.3
setup( setup(
name='Deformetrica', name='Deformetrica',
version=open('VERSION').read(), version=open('VERSION', encoding='utf-8').read(),
url='http://www.deformetrica.org', url='http://www.deformetrica.org',
description='Software for the statistical analysis of 2D and 3D shape data.', description='Software for the statistical analysis of 2D and 3D shape data.',
long_description=open('README.md', encoding='utf-8').read(), long_description=open('README.md', encoding='utf-8').read(),
...@@ -32,7 +32,7 @@ setup( ...@@ -32,7 +32,7 @@ setup(
}, },
classifiers=[ classifiers=[
'Framework :: Deformetrica', 'Framework :: Deformetrica',
'Development Status :: 4.0.0 - dev', 'Development Status :: 4.0.1',
'Environment :: Console', 'Environment :: Console',
'Operating System :: OS Independent', 'Operating System :: OS Independent',
'Programming Language :: Python', 'Programming Language :: Python',
......
...@@ -160,21 +160,23 @@ class McmcSaem(AbstractEstimator): ...@@ -160,21 +160,23 @@ class McmcSaem(AbstractEstimator):
# Call the write method of the statistical model. # Call the write method of the statistical model.
if population_RER is None: population_RER = self.population_RER if population_RER is None: population_RER = self.population_RER
if individual_RER is None: individual_RER = self.individual_RER if individual_RER is None: individual_RER = self.individual_RER
self.statistical_model.write(self.dataset, self.population_RER, self.individual_RER, update_fixed_effects=False) self.statistical_model.write(self.dataset, population_RER, individual_RER, update_fixed_effects=False)
# Save the recorded model parameters trajectory. # Save the recorded model parameters trajectory.
# self.model_parameters_trajectory is a list of dictionaries # self.model_parameters_trajectory is a list of dictionaries
np.save(os.path.join( np.save(os.path.join(
Settings().output_dir, Settings().output_dir,
self.statistical_model.name + '__EstimatedParameters__Trajectory.npy'), self.statistical_model.name + '__EstimatedParameters__Trajectory.npy'),
np.array(self.model_parameters_trajectory)) np.array(self.model_parameters_trajectory[
:int(self.current_iteration / float(self.save_model_parameters_every_n_iters))]))
# Save the memorized individual random effects samples. # Save the memorized individual random effects samples.
if self.current_iteration > self.number_of_burn_in_iterations: if self.current_iteration > self.number_of_burn_in_iterations:
np.save(os.path.join( np.save(os.path.join(
Settings().output_dir, Settings().output_dir,
self.statistical_model.name + '__EstimatedParameters__IndividualRandomEffectsSamples.npy'), self.statistical_model.name + '__EstimatedParameters__IndividualRandomEffectsSamples.npy'),
self.individual_random_effects_samples_stack) self.individual_random_effects_samples_stack[:(self.current_iteration -
self.number_of_burn_in_iterations - 1)])
#################################################################################################################### ####################################################################################################################
### Private_maximize_over_remaining_fixed_effects() method and associated utilities: ### Private_maximize_over_remaining_fixed_effects() method and associated utilities:
......
...@@ -2,7 +2,7 @@ import _pickle as pickle ...@@ -2,7 +2,7 @@ import _pickle as pickle
from decimal import Decimal from decimal import Decimal
import numpy as np import numpy as np
from scipy.optimize import minimize from scipy.optimize import minimize, brute
from core.estimators.abstract_estimator import AbstractEstimator from core.estimators.abstract_estimator import AbstractEstimator
from support.utilities.general_settings import Settings from support.utilities.general_settings import Settings
...@@ -90,18 +90,18 @@ class ScipyOptimize(AbstractEstimator): ...@@ -90,18 +90,18 @@ class ScipyOptimize(AbstractEstimator):
'disp': True 'disp': True
}) })
elif self.method == 'GridSearch':
x = brute(self._cost, self._get_parameters_range(x0), Ns=4, disp=True)
self._set_parameters(self._unvectorize_parameters(x))
self.write()
else: else:
raise RuntimeError('Unknown optimization method.') raise RuntimeError('Unknown optimization method.')
# Finalization -------------------------------------------------------------------------------------------------
except StopIteration: except StopIteration:
print('>> STOP: TOTAL NO. of ITERATIONS EXCEEDS LIMIT') print('>> STOP: TOTAL NO. of ITERATIONS EXCEEDS LIMIT')
# Finalization -------------------------------------------------------------------------------------------------
# self._set_parameters(self._unvectorize_parameters(result.x)) # Probably already done in _callback.
# if self.verbose > 0 and self.method == 'L-BFGS-B':
# print('>> ' + result.message.decode("utf-8"))
def print(self): def print(self):
""" """
Print information. Print information.
...@@ -215,6 +215,10 @@ class ScipyOptimize(AbstractEstimator): ...@@ -215,6 +215,10 @@ class ScipyOptimize(AbstractEstimator):
out.update(self.individual_RER) out.update(self.individual_RER)
return out return out
def _get_parameters_range(self, x):
dx = self._vectorize_parameters(self.statistical_model.get_fixed_effects_variability())
return tuple([(x[k] - dx[k], x[k] + dx[k]) for k in range(len(x))])
def _vectorize_parameters(self, parameters): def _vectorize_parameters(self, parameters):
""" """
Returns a 1D numpy array from a dictionary of numpy arrays. Returns a 1D numpy array from a dictionary of numpy arrays.
......
This diff is collapsed.
...@@ -13,7 +13,7 @@ class PointCloud(Landmark): ...@@ -13,7 +13,7 @@ class PointCloud(Landmark):
def __init__(self): def __init__(self):
Landmark.__init__(self) Landmark.__init__(self)
self.centers = None self.centers = None
self.normals = None#This is going to be point weights, uniform for now TODO: read somewhere e.g. in the vtk the weights of the points. self.normals = None #This is going to be point weights, uniform for now TODO: read somewhere e.g. in the vtk the weights of the points.
def update(self): def update(self):
Landmark.update(self) Landmark.update(self)
......
...@@ -8,6 +8,7 @@ from in_out.xml_parameters import XmlParameters ...@@ -8,6 +8,7 @@ from in_out.xml_parameters import XmlParameters
from launch.compute_parallel_transport import compute_parallel_transport from launch.compute_parallel_transport import compute_parallel_transport
from launch.estimate_bayesian_atlas import estimate_bayesian_atlas from launch.estimate_bayesian_atlas import estimate_bayesian_atlas
from launch.estimate_deterministic_atlas import estimate_deterministic_atlas from launch.estimate_deterministic_atlas import estimate_deterministic_atlas
from launch.estimate_rigid_atlas import estimate_rigid_atlas
from launch.estimate_geodesic_regression import estimate_geodesic_regression from launch.estimate_geodesic_regression import estimate_geodesic_regression
from launch.estimate_longitudinal_atlas import estimate_longitudinal_atlas from launch.estimate_longitudinal_atlas import estimate_longitudinal_atlas
from launch.estimate_longitudinal_metric_model import estimate_longitudinal_metric_model from launch.estimate_longitudinal_metric_model import estimate_longitudinal_metric_model
...@@ -17,15 +18,6 @@ from launch.compute_shooting import run_shooting ...@@ -17,15 +18,6 @@ from launch.compute_shooting import run_shooting
from support.utilities.general_settings import Settings from support.utilities.general_settings import Settings
def info():
version = open(os.path.dirname(os.path.realpath(__file__)) + '/../VERSION').read()
return """
##############################
##### Deformetrica {version} #####
##############################
""".format(version=version)
def main(): def main():
import logging import logging
logger = logging.getLogger(__name__) logger = logging.getLogger(__name__)
...@@ -59,9 +51,6 @@ def main(): ...@@ -59,9 +51,6 @@ def main():
logger.debug('Using verbosity level: ' + args.verbosity) logger.debug('Using verbosity level: ' + args.verbosity)
logging.basicConfig(level=log_level, format=logger_format) logging.basicConfig(level=log_level, format=logger_format)
# Basic info printing
logger.info(info())
""" """
Read xml files, set general settings, and call the adapted function. Read xml files, set general settings, and call the adapted function.
""" """
...@@ -89,6 +78,9 @@ def main(): ...@@ -89,6 +78,9 @@ def main():
elif xml_parameters.model_type == 'BayesianAtlas'.lower(): elif xml_parameters.model_type == 'BayesianAtlas'.lower():
estimate_bayesian_atlas(xml_parameters) estimate_bayesian_atlas(xml_parameters)
elif xml_parameters.model_type == 'RigidAtlas'.lower():
estimate_rigid_atlas(xml_parameters)
elif xml_parameters.model_type == 'Regression'.lower(): elif xml_parameters.model_type == 'Regression'.lower():
estimate_geodesic_regression(xml_parameters) estimate_geodesic_regression(xml_parameters)
......
...@@ -168,4 +168,4 @@ def estimate_bayesian_atlas(xml_parameters): ...@@ -168,4 +168,4 @@ def estimate_bayesian_atlas(xml_parameters):
end_time = time.time() end_time = time.time()
print('>> Estimation took: ' + str(time.strftime("%H:%M:%S", time.gmtime(end_time - start_time)))) print('>> Estimation took: ' + str(time.strftime("%H:%M:%S", time.gmtime(end_time - start_time))))
return model return model, estimator.individual_RER['momenta']
This diff is collapsed.
...@@ -6,7 +6,7 @@ import sys ...@@ -6,7 +6,7 @@ import sys
from unit_tests.test_attachments import DistanceTests from unit_tests.test_attachments import DistanceTests
from unit_tests.test_array_readers_and_writers import ArrayReadersAndWritersTests from unit_tests.test_array_readers_and_writers import ArrayReadersAndWritersTests
from unit_tests.test_kernel_factory import KernelFactory, Kernel, KeopsVersusCuda, CheckForKeopsMemoryLeak from unit_tests.test_kernel_factory import KernelFactory, Kernel, KeopsVersusCuda
from unit_tests.test_parallel_transport import ParallelTransportTests from unit_tests.test_parallel_transport import ParallelTransportTests
from unit_tests.test_shooting import ShootingTests from unit_tests.test_shooting import ShootingTests
...@@ -18,8 +18,6 @@ TEST_MODULES = [KernelFactory, Kernel, KeopsVersusCuda, ...@@ -18,8 +18,6 @@ TEST_MODULES = [KernelFactory, Kernel, KeopsVersusCuda,
ParallelTransportTests, DistanceTests, ArrayReadersAndWritersTests, ParallelTransportTests, DistanceTests, ArrayReadersAndWritersTests,
PolyLineTests, PointCloudTests, SurfaceMeshTests, ShootingTests] PolyLineTests, PointCloudTests, SurfaceMeshTests, ShootingTests]
# TEST_MODULES = [CheckForKeopsMemoryLeak]
def main(): def main():
import logging import logging
......
...@@ -371,28 +371,3 @@ class KeopsVersusCuda(unittest.TestCase): ...@@ -371,28 +371,3 @@ class KeopsVersusCuda(unittest.TestCase):
self.assertTrue(np.allclose(keops_dcp_2, torch_dcp_2, rtol=1e-05, atol=1e-05)) self.assertTrue(np.allclose(keops_dcp_2, torch_dcp_2, rtol=1e-05, atol=1e-05))
self.assertTrue(np.allclose(keops_dmom_1, torch_dmom_1, rtol=1e-05, atol=1e-05)) self.assertTrue(np.allclose(keops_dmom_1, torch_dmom_1, rtol=1e-05, atol=1e-05))
self.assertTrue(np.allclose(keops_dmom_2, torch_dmom_2, rtol=1e-05, atol=1e-05)) self.assertTrue(np.allclose(keops_dmom_2, torch_dmom_2, rtol=1e-05, atol=1e-05))
class CheckForKeopsMemoryLeak(unittest.TestCase):
def test_keops_gpu_convolve_in_loop(self):
# Parameters.
kernel_width = 10.
number_of_control_points = 100
dimension = 3
tensor_scalar_type = torch.cuda.FloatTensor
# tensor_scalar_type = torch.FloatTensor
# Set the global settings accordingly.
Settings().dimension = dimension
Settings().tensor_scalar_type = tensor_scalar_type
# Instantiate the needed objects.
keops_kernel = kernel_factory.factory(kernel_factory.Type.KEOPS, kernel_width)
random_control_points = torch.from_numpy(
np.random.randn(number_of_control_points, dimension)).type(tensor_scalar_type).requires_grad_()
random_momenta = torch.from_numpy(
np.random.randn(number_of_control_points, dimension)).type(tensor_scalar_type).requires_grad_()
# Infinite loop.
while True:
_ = keops_kernel.convolve(random_control_points, random_control_points, random_momenta)
\ No newline at end of file
Markdown is supported
0% or
You are about to add 0 people to the discussion. Proceed with caution.
Finish editing this message first!
Please register or to comment