Loading .gitlab-ci.yml +1 −1 Original line number Diff line number Diff line Loading @@ -11,7 +11,7 @@ lint: script: - conda install python=3.6 - pip install -e . - pip install flake8 pep8-naming flake8-docstrings - pip install "pydocstyle<4" flake8 pep8-naming flake8-docstrings - flake8 --config=.flake8 scared tests setup.py test: Loading docs/source/api_reference/distinguishers/mia.rst 0 → 100644 +8 −0 Original line number Diff line number Diff line Distinguishers MIA API reference ================================ .. toctree:: :maxdepth: 1 .. automodule:: scared.distinguishers.mia :members: scared/__init__.py +7 −2 Original line number Diff line number Diff line Loading @@ -13,10 +13,15 @@ from .distinguishers import ( # noqa: F401 DPADistinguisherMixin, DPADistinguisher, CPADistinguisherMixin, CPAAlternativeDistinguisherMixin, CPADistinguisher, CPAAlternativeDistinguisher, PartitionedDistinguisherMixin, PartitionedDistinguisher, ANOVADistinguisherMixin, ANOVADistinguisher, NICVDistinguisherMixin, NICVDistinguisher, SNRDistinguisherMixin, SNRDistinguisher NICVDistinguisherMixin, NICVDistinguisher, SNRDistinguisherMixin, SNRDistinguisher, MIADistinguisher ) from .ttest import TTestAccumulator, TTestAnalysis, TTestError, TTestContainer # noqa:F401 from .analysis import BaseAnalysis, CPAAnalysis, DPAAnalysis, ANOVAAnalysis, NICVAnalysis, SNRAnalysis, BasePartitionedAnalysis # noqa:F401 from .analysis import ( # noqa:F401 BaseAnalysis, CPAAnalysis, DPAAnalysis, ANOVAAnalysis, NICVAnalysis, SNRAnalysis, BasePartitionedAnalysis, MIAAnalysis ) from .preprocesses import preprocess, Preprocess, PreprocessError # noqa:F401 from . import container as _container Loading scared/analysis.py +26 −2 Original line number Diff line number Diff line from . import selection_functions as _sf, container as _container, models, distinguishers import numpy as _np import logging logger = logging.getLogger(__name__) class BaseAnalysis: Loading Loading @@ -98,6 +101,7 @@ class BaseAnalysis: return base_batch_size def _final_compute(self): logger.info(f'Starting final computing.') self.compute_results() if self.convergence_step and len(self._batches_processed) > 1: self._compute_convergence_traces() Loading @@ -118,10 +122,14 @@ class BaseAnalysis: raise TypeError(f'container should be a type Container, not {type(container)}.') batch_size = self._compute_batch_size(container.batch_size) for batch in container.batches(batch_size=batch_size): logger.info(f'Starting run on container {container}, with batch size {batch_size}.') for i, batch in enumerate(container.batches(batch_size=batch_size)): logger.info(f'Process batch number {i} starting.') self.process(batch) self.compute_convergence() logger.info(f'Batches processing finished.') self._final_compute() logger.info(f'Run on container {container} finished.') def compute_intermediate_values(self, metadata): """Compute intermediate leakage values for this instance from metadata. Loading @@ -130,6 +138,7 @@ class BaseAnalysis: metadata (mapping): a dict-like object containing the data to be used with selection function. """ logger.info(f'Computing intermediate values for metadata {metadata}.') return self.model(self.selection_function(**metadata)) def process(self, traces_batch): Loading @@ -146,8 +155,11 @@ class BaseAnalysis: intermediate_values = self.compute_intermediate_values(traces_batch.metadatas) if self.convergence_traces is None and self.convergence_step: logger.info(f'Initialize convergence traces.') self.convergence_traces = _np.empty(intermediate_values.shape[1:] + (0, ), dtype=self.precision) logger.info(f'Will call distinguisher update with {traces_batch}.') self.update( data=intermediate_values, traces=traces_batch.samples Loading @@ -159,6 +171,7 @@ class BaseAnalysis: This method is used internally by `run`, but can also be used to have a finer control on the process. """ logger.info(f'Compute convergence results.') if self.convergence_step: self._batches_processed.append(self.processed_traces) if self._batches_processed[-1] - self._batches_processed[0] >= self.convergence_step: Loading @@ -167,6 +180,7 @@ class BaseAnalysis: self._compute_convergence_traces() def _compute_convergence_traces(self): logger.info(f'Update convergence traces.') self.convergence_traces = _np.append(self.convergence_traces, self.scores[..., None], axis=-1) def compute_results(self): Loading @@ -175,8 +189,10 @@ class BaseAnalysis: This method is used internally by `run`, but can also be used to have a finer control on the process. """ logger.info(f'Computing results ...') self.results = self.compute() self.scores = self.discriminant(self.results) logger.info(f'Results computed.') class CPAAnalysis(BaseAnalysis, distinguishers.CPADistinguisherMixin): Loading @@ -195,8 +211,8 @@ class DPAAnalysis(BaseAnalysis, distinguishers.DPADistinguisherMixin): class BasePartitionedAnalysis(BaseAnalysis): def __init__(self, partitions=None, *args, **kwargs): super().__init__(*args, **kwargs) distinguishers.partitioned._set_partitions(self, partitions) return super().__init__(*args, **kwargs) class ANOVAAnalysis(BasePartitionedAnalysis, distinguishers.ANOVADistinguisherMixin): Loading @@ -209,3 +225,11 @@ class NICVAnalysis(BasePartitionedAnalysis, distinguishers.NICVDistinguisherMixi class SNRAnalysis(BasePartitionedAnalysis, distinguishers.SNRDistinguisherMixin): __doc__ = distinguishers.SNRDistinguisherMixin.__doc__ + BaseAnalysis.__doc__ class MIAAnalysis(BasePartitionedAnalysis, distinguishers.MIADistinguisherMixin): __doc__ = distinguishers.MIADistinguisherMixin.__doc__ + BaseAnalysis.__doc__ def __init__(self, bins_number=128, bin_edges=None, *args, **kwargs): distinguishers.mia._set_histogram_parameters(self, bins_number=bins_number, bin_edges=bin_edges) return super().__init__(*args, **kwargs) scared/distinguishers/__init__.py +1 −0 Original line number Diff line number Diff line Loading @@ -7,3 +7,4 @@ from .partitioned import ( # noqa: F401 SNRDistinguisher, SNRDistinguisherMixin, NICVDistinguisher, NICVDistinguisherMixin ) from .mia import MIADistinguisher, MIADistinguisherMixin # noqa: F401 Loading
.gitlab-ci.yml +1 −1 Original line number Diff line number Diff line Loading @@ -11,7 +11,7 @@ lint: script: - conda install python=3.6 - pip install -e . - pip install flake8 pep8-naming flake8-docstrings - pip install "pydocstyle<4" flake8 pep8-naming flake8-docstrings - flake8 --config=.flake8 scared tests setup.py test: Loading
docs/source/api_reference/distinguishers/mia.rst 0 → 100644 +8 −0 Original line number Diff line number Diff line Distinguishers MIA API reference ================================ .. toctree:: :maxdepth: 1 .. automodule:: scared.distinguishers.mia :members:
scared/__init__.py +7 −2 Original line number Diff line number Diff line Loading @@ -13,10 +13,15 @@ from .distinguishers import ( # noqa: F401 DPADistinguisherMixin, DPADistinguisher, CPADistinguisherMixin, CPAAlternativeDistinguisherMixin, CPADistinguisher, CPAAlternativeDistinguisher, PartitionedDistinguisherMixin, PartitionedDistinguisher, ANOVADistinguisherMixin, ANOVADistinguisher, NICVDistinguisherMixin, NICVDistinguisher, SNRDistinguisherMixin, SNRDistinguisher NICVDistinguisherMixin, NICVDistinguisher, SNRDistinguisherMixin, SNRDistinguisher, MIADistinguisher ) from .ttest import TTestAccumulator, TTestAnalysis, TTestError, TTestContainer # noqa:F401 from .analysis import BaseAnalysis, CPAAnalysis, DPAAnalysis, ANOVAAnalysis, NICVAnalysis, SNRAnalysis, BasePartitionedAnalysis # noqa:F401 from .analysis import ( # noqa:F401 BaseAnalysis, CPAAnalysis, DPAAnalysis, ANOVAAnalysis, NICVAnalysis, SNRAnalysis, BasePartitionedAnalysis, MIAAnalysis ) from .preprocesses import preprocess, Preprocess, PreprocessError # noqa:F401 from . import container as _container Loading
scared/analysis.py +26 −2 Original line number Diff line number Diff line from . import selection_functions as _sf, container as _container, models, distinguishers import numpy as _np import logging logger = logging.getLogger(__name__) class BaseAnalysis: Loading Loading @@ -98,6 +101,7 @@ class BaseAnalysis: return base_batch_size def _final_compute(self): logger.info(f'Starting final computing.') self.compute_results() if self.convergence_step and len(self._batches_processed) > 1: self._compute_convergence_traces() Loading @@ -118,10 +122,14 @@ class BaseAnalysis: raise TypeError(f'container should be a type Container, not {type(container)}.') batch_size = self._compute_batch_size(container.batch_size) for batch in container.batches(batch_size=batch_size): logger.info(f'Starting run on container {container}, with batch size {batch_size}.') for i, batch in enumerate(container.batches(batch_size=batch_size)): logger.info(f'Process batch number {i} starting.') self.process(batch) self.compute_convergence() logger.info(f'Batches processing finished.') self._final_compute() logger.info(f'Run on container {container} finished.') def compute_intermediate_values(self, metadata): """Compute intermediate leakage values for this instance from metadata. Loading @@ -130,6 +138,7 @@ class BaseAnalysis: metadata (mapping): a dict-like object containing the data to be used with selection function. """ logger.info(f'Computing intermediate values for metadata {metadata}.') return self.model(self.selection_function(**metadata)) def process(self, traces_batch): Loading @@ -146,8 +155,11 @@ class BaseAnalysis: intermediate_values = self.compute_intermediate_values(traces_batch.metadatas) if self.convergence_traces is None and self.convergence_step: logger.info(f'Initialize convergence traces.') self.convergence_traces = _np.empty(intermediate_values.shape[1:] + (0, ), dtype=self.precision) logger.info(f'Will call distinguisher update with {traces_batch}.') self.update( data=intermediate_values, traces=traces_batch.samples Loading @@ -159,6 +171,7 @@ class BaseAnalysis: This method is used internally by `run`, but can also be used to have a finer control on the process. """ logger.info(f'Compute convergence results.') if self.convergence_step: self._batches_processed.append(self.processed_traces) if self._batches_processed[-1] - self._batches_processed[0] >= self.convergence_step: Loading @@ -167,6 +180,7 @@ class BaseAnalysis: self._compute_convergence_traces() def _compute_convergence_traces(self): logger.info(f'Update convergence traces.') self.convergence_traces = _np.append(self.convergence_traces, self.scores[..., None], axis=-1) def compute_results(self): Loading @@ -175,8 +189,10 @@ class BaseAnalysis: This method is used internally by `run`, but can also be used to have a finer control on the process. """ logger.info(f'Computing results ...') self.results = self.compute() self.scores = self.discriminant(self.results) logger.info(f'Results computed.') class CPAAnalysis(BaseAnalysis, distinguishers.CPADistinguisherMixin): Loading @@ -195,8 +211,8 @@ class DPAAnalysis(BaseAnalysis, distinguishers.DPADistinguisherMixin): class BasePartitionedAnalysis(BaseAnalysis): def __init__(self, partitions=None, *args, **kwargs): super().__init__(*args, **kwargs) distinguishers.partitioned._set_partitions(self, partitions) return super().__init__(*args, **kwargs) class ANOVAAnalysis(BasePartitionedAnalysis, distinguishers.ANOVADistinguisherMixin): Loading @@ -209,3 +225,11 @@ class NICVAnalysis(BasePartitionedAnalysis, distinguishers.NICVDistinguisherMixi class SNRAnalysis(BasePartitionedAnalysis, distinguishers.SNRDistinguisherMixin): __doc__ = distinguishers.SNRDistinguisherMixin.__doc__ + BaseAnalysis.__doc__ class MIAAnalysis(BasePartitionedAnalysis, distinguishers.MIADistinguisherMixin): __doc__ = distinguishers.MIADistinguisherMixin.__doc__ + BaseAnalysis.__doc__ def __init__(self, bins_number=128, bin_edges=None, *args, **kwargs): distinguishers.mia._set_histogram_parameters(self, bins_number=bins_number, bin_edges=bin_edges) return super().__init__(*args, **kwargs)
scared/distinguishers/__init__.py +1 −0 Original line number Diff line number Diff line Loading @@ -7,3 +7,4 @@ from .partitioned import ( # noqa: F401 SNRDistinguisher, SNRDistinguisherMixin, NICVDistinguisher, NICVDistinguisherMixin ) from .mia import MIADistinguisher, MIADistinguisherMixin # noqa: F401