Skip to content

Commit

Permalink
code: plot filtered data and cleaning spectrum
Browse files Browse the repository at this point in the history
  • Loading branch information
samiBendou committed Oct 23, 2020
1 parent f92d9d0 commit 143f9af
Show file tree
Hide file tree
Showing 6 changed files with 212 additions and 136 deletions.
14 changes: 3 additions & 11 deletions src/lib/cpa.py
Original file line number Diff line number Diff line change
Expand Up @@ -20,14 +20,6 @@
COUNT_CLS = 256 # Traces with the same byte value in a given position
BLOCK_SIZE = aes.BLOCK_SIZE

class Models(Enum):
"""CPA power consumption models.
"""

SBOX_R0 = 0
INV_SBOX_R10 = 1


class Statistics:
def __init__(self, handler=None):
Expand Down Expand Up @@ -204,7 +196,7 @@ class Models(Enum):
SBOX_R0 = 0
INV_SBOX_R10 = 1

def __init__(self, model, channel=None, traces=None, samples=None):
def __init__(self, model=None, channel=None, traces=None, samples=None):
"""Allocates memory, accumulates traces and initialize model.
Parameters
Expand All @@ -218,7 +210,7 @@ def __init__(self, model, channel=None, traces=None, samples=None):
samples : int
Count of time samples in the signals.
"""
self.model = model
self.model = model or Handler.Models(value=0)
self.blocks = None
self.key = None
self.iterations = 0
Expand All @@ -235,7 +227,7 @@ def __init__(self, model, channel=None, traces=None, samples=None):
samples = samples or traces.shape[1]
self.clear(samples).set_model(model).set_key(channel).set_blocks(channel).accumulate(traces)
else:
self.clear(samples or 0).set_model(model)
self.clear(samples or 0).set_model(self.model)

def clear(self, samples=0):
self.iterations = 0
Expand Down
2 changes: 1 addition & 1 deletion src/lib/data.py
Original file line number Diff line number Diff line change
Expand Up @@ -20,7 +20,7 @@
from enum import Enum
from warnings import warn

from lib.cpa import Models, Handler
from lib.cpa import Handler


class Serializable:
Expand Down
55 changes: 51 additions & 4 deletions src/lib/traces.py
Original file line number Diff line number Diff line change
Expand Up @@ -6,7 +6,54 @@
"""

import numpy as np
from scipy import stats
from scipy import stats, fft, signal


def _pearsonr_from_ref(r, st, sh):
return list(map(lambda s: stats.pearsonr(r, st[s])[0], sh))


class Statistics:
def __init__(self, leak=None, filters=None, noise=None):
self.iterations = 0
self.cropped = None
self.sum = None
self.mean = None
self.spectrum = None
self.freqs = None

if leak is not None:
self.update(leak, filters, noise)

def update(self, leak, filters=None, noise=None):
self.iterations += len(leak)
self.cropped = signal.detrend(adjust(leak.traces, None if self.sum is None else self.sum.shape[0]), axis=1)
self.cropped -= np.mean(self.cropped, axis=1).reshape((self.cropped.shape[0], 1))
if filters is not None:
for f in filters:
if f is None:
continue
b, a, *_ = f
self.cropped = signal.filtfilt(b, a, self.cropped, axis=1)
elif noise is not None and noise.iterations > 0:
filtered = fft.fft(self.cropped, axis=1) - fft.fft(noise.cropped, axis=1)
self.cropped = np.real(fft.ifft(filtered, axis=1))

if self.sum is None:
self.sum = np.sum(self.cropped, axis=0)
else:
self.sum += np.sum(self.cropped, axis=0)
self.mean = np.divide(self.sum, self.iterations)
self.spectrum = np.absolute(fft.fft(self.mean))
size = len(self.spectrum)
self.freqs = np.argsort(np.fft.fftfreq(size, 1.0 / 200e6)[:size // 2] / 1e6)

def clear(self):
self.iterations = 0
self.cropped = None
self.sum = None
self.mean = None
self.spectrum = None


def crop(traces, end=None):
Expand Down Expand Up @@ -75,6 +122,9 @@ def adjust(traces, n=None, fill=0):
def sync(traces, step=1, stop=None):
"""Synchronize trace signals by correlating them
WARNING: this method may cause segfault
when the memory adjacent to traces cannot be used
This function implements an algorithm based on Pearson's
correlation to synchronize signals peaks.
Expand Down Expand Up @@ -106,9 +156,6 @@ def sync(traces, step=1, stop=None):
stop = min(stop or m, m)
shifts = list(range(0, stop, step))

def _pearsonr_from_ref(r, st, sh):
return list(map(lambda s: stats.pearsonr(r, st[s])[0], sh))

for trace in traces:
strided = np.lib.stride_tricks.as_strided(trace, shape, strides_pos)
try:
Expand Down
Loading

0 comments on commit 143f9af

Please sign in to comment.