First commit

This commit is contained in:
Cyril 2024-03-10 21:45:05 +01:00
commit 7693c29676
102 changed files with 11831 additions and 0 deletions

View file

@ -0,0 +1,39 @@
class CalibrateShaperConfig:
def __init__(self, config):
self.printer = config.get_printer();
shaper_type = config.get('shaper_type', 'mzv')
self.shaper_type_x = config.get('shaper_type_x' , shaper_type)
self.shaper_freq_x = config.getfloat('shaper_freq_x', 0., minval=0.)
self.shaper_type_y = config.get('shaper_type_y' , shaper_type)
self.shaper_freq_y = config.getfloat('shaper_freq_y', 0., minval=0.)
# Register commands
gcode = config.get_printer().lookup_object('gcode')
gcode.register_command("SAVE_INPUT_SHAPER", self.cmd_save_input_shaper)
def get_status(self, eventtime):
return {}
def cmd_save_input_shaper(self, gcmd):
self.shaper_freq_x = gcmd.get_float('SHAPER_FREQ_X',
self.shaper_freq_x, minval=0.)
self.shaper_type_x = gcmd.get('SHAPER_TYPE_X', self.shaper_type_x)
self.shaper_freq_y = gcmd.get_float('SHAPER_FREQ_Y',
self.shaper_freq_y, minval=0.)
self.shaper_type_y = gcmd.get('SHAPER_TYPE_Y', self.shaper_type_y)
configfile = self.printer.lookup_object('configfile')
configfile.set('input_shaper', 'shaper_type_x', self.shaper_type_x)
configfile.set('input_shaper', 'shaper_freq_x',
'%.1f' % (self.shaper_freq_x,))
configfile.set('input_shaper', 'shaper_type_y', self.shaper_type_y)
configfile.set('input_shaper', 'shaper_freq_y',
'%.1f' % (self.shaper_freq_y,))
def load_config(config):
return CalibrateShaperConfig(config)

View file

@ -0,0 +1,118 @@
########################################
# Improved Shapers Configurations
########################################
[respond]
[calibrate_shaper_config]
[gcode_shell_command resonance_graph]
command: /usr/data/printer_data/config/Helper-Script/improved-shapers/scripts/calibrate_shaper.py
timeout: 600.0
verbose: True
[gcode_shell_command belts_graph]
command: /usr/data/printer_data/config/Helper-Script/improved-shapers/scripts/graph_belts.py
timeout: 600.0
verbose: True
[gcode_macro INPUT_SHAPER_CALIBRATION]
description: Measure X and Y Axis Resonances and Save values
gcode:
{% if printer["configfile"].config["temperature_fan mcu_fan"] %}
SET_TEMPERATURE_FAN_TARGET TEMPERATURE_FAN=mcu_fan TARGET=30
{% endif %}
{% if printer.toolhead.homed_axes != "xyz" %}
RESPOND TYPE=command MSG="Homing..."
G28
{% endif %}
RESPOND TYPE=command MSG="Measuring X and Y Resonances..."
SHAPER_CALIBRATE
M400
{% if printer["configfile"].config["temperature_fan mcu_fan"] %}
SET_TEMPERATURE_FAN_TARGET TEMPERATURE_FAN=mcu_fan TARGET=50
{% endif %}
CXSAVE_CONFIG
[gcode_macro TEST_RESONANCES_GRAPHS]
description: Test X and Y Axis Resonances and Generate Graphs
gcode:
{% set x_png = params.X_PNG|default("/usr/data/printer_data/config/Helper-Script/improved-shapers/resonances_x.png") %}
{% set y_png = params.Y_PNG|default("/usr/data/printer_data/config/Helper-Script/improved-shapers/resonances_y.png") %}
{% if printer["configfile"].config["temperature_fan mcu_fan"] %}
SET_TEMPERATURE_FAN_TARGET TEMPERATURE_FAN=mcu_fan TARGET=30
{% endif %}
{% if printer.toolhead.homed_axes != "xyz" %}
RESPOND TYPE=command MSG="Homing..."
G28
{% endif %}
RESPOND TYPE=command MSG="Testing X Resonances..."
TEST_RESONANCES AXIS=X NAME=x
M400
RESPOND TYPE=command MSG="Generating X Graphs... This may take some time."
RUN_SHELL_COMMAND CMD=resonance_graph PARAMS="/tmp/resonances_x_x.csv -o {x_png}"
RESPOND TYPE=command MSG="X Graph (resonances_x.png) is available in /Helper-Script/improved-shapers folder."
RESPOND TYPE=command MSG="Testing Y Resonances..."
TEST_RESONANCES AXIS=Y NAME=y
M400
RESPOND TYPE=command MSG="Generating Y Graphs... This may take some time."
RUN_SHELL_COMMAND CMD=resonance_graph PARAMS="/tmp/resonances_y_y.csv -o {y_png}"
RESPOND TYPE=command MSG="Y Graph (resonances_y.png) is available in /Helper-Script/improved-shapers folder."
{% if printer["configfile"].config["temperature_fan mcu_fan"] %}
SET_TEMPERATURE_FAN_TARGET TEMPERATURE_FAN=mcu_fan TARGET=50
{% endif %}
[gcode_macro BELTS_SHAPER_CALIBRATION]
description: Perform a custom half-axis test to analyze and compare the frequency profiles of individual belts on CoreXY printers
gcode:
{% set min_freq = params.FREQ_START|default(5)|float %}
{% set max_freq = params.FREQ_END|default(133.33)|float %}
{% set hz_per_sec = params.HZ_PER_SEC|default(1)|float %}
{% set png_width = params.PNG_WIDTH|default(8)|float %}
{% set png_height = params.PNG_HEIGHT|default(4.8)|float %}
{% set png_out_path = params.PNG_OUT_PATH|default("/usr/data/printer_data/config/Helper-Script/improved-shapers/belts_calibration.png") %}
{% if printer["configfile"].config["temperature_fan mcu_fan"] %}
SET_TEMPERATURE_FAN_TARGET TEMPERATURE_FAN=mcu_fan TARGET=30
{% endif %}
{% if printer.toolhead.homed_axes != "xyz" %}
RESPOND TYPE=command MSG="Homing..."
G28
{% endif %}
TEST_RESONANCES AXIS=1,1 OUTPUT=raw_data NAME=b FREQ_START={min_freq} FREQ_END={max_freq} HZ_PER_SEC={hz_per_sec}
M400
TEST_RESONANCES AXIS=1,-1 OUTPUT=raw_data NAME=a FREQ_START={min_freq} FREQ_END={max_freq} HZ_PER_SEC={hz_per_sec}
M400
RESPOND TYPE=command MSG="Generating belts comparative frequency profile..."
RESPOND TYPE=command MSG="This may take some time (3-5min)."
RUN_SHELL_COMMAND CMD=belts_graph PARAMS="-w {png_width} -l {png_height} -n -o {png_out_path} -k /usr/share/klipper /tmp/raw_data_axis=1.000,-1.000_a.csv /tmp/raw_data_axis=1.000,1.000_b.csv"
RESPOND TYPE=command MSG="Graph (belts_calibration.png) is available in /Helper-Script/improved-shapers folder."
{% if printer["configfile"].config["temperature_fan mcu_fan"] %}
SET_TEMPERATURE_FAN_TARGET TEMPERATURE_FAN=mcu_fan TARGET=50
{% endif %}
[gcode_macro EXCITATE_AXIS_AT_FREQ]
description: Maintain a specified excitation frequency for a period of time to diagnose and locate a vibration source
gcode:
{% set frequency = params.FREQUENCY|default(25)|int %}
{% set time = params.TIME|default(10)|int %}
{% set axis = params.AXIS|default("x")|string|lower %}
{% if axis not in ["x", "y", "a", "b"] %}
{ action_raise_error("AXIS selection is invalid. Should be either x, y, a or b!") }
{% endif %}
{% if axis == "a" %}
{% set axis = "1,-1" %}
{% elif axis == "b" %}
{% set axis = "1,1" %}
{% endif %}
{% if printer.toolhead.homed_axes != "xyz" %}
RESPOND TYPE=command MSG="Homing..."
G28
{% endif %}
TEST_RESONANCES OUTPUT=raw_data AXIS={axis} FREQ_START={frequency-1} FREQ_END={frequency+1} HZ_PER_SEC={1/(time/3)}
M400

View file

@ -0,0 +1,186 @@
#!/usr/bin/env python3
###!/usr/data/rootfs/usr/bin/python3
# Shaper auto-calibration script
#
# Copyright (C) 2020 Dmitry Butyugin <dmbutyugin@google.com>
# Copyright (C) 2020 Kevin O'Connor <kevin@koconnor.net>
#
# This file may be distributed under the terms of the GNU GPLv3 license.
from __future__ import print_function
import importlib, optparse, os, sys, pathlib
from textwrap import wrap
import numpy as np, matplotlib
import shaper_calibrate
import json
MAX_TITLE_LENGTH=65
def parse_log(logname):
with open(logname) as f:
for header in f:
if not header.startswith('#'):
break
if not header.startswith('freq,psd_x,psd_y,psd_z,psd_xyz'):
# Raw accelerometer data
return np.loadtxt(logname, comments='#', delimiter=',')
# Parse power spectral density data
data = np.loadtxt(logname, skiprows=1, comments='#', delimiter=',')
calibration_data = shaper_calibrate.CalibrationData(
freq_bins=data[:,0], psd_sum=data[:,4],
psd_x=data[:,1], psd_y=data[:,2], psd_z=data[:,3])
calibration_data.set_numpy(np)
# If input shapers are present in the CSV file, the frequency
# response is already normalized to input frequencies
if 'mzv' not in header:
calibration_data.normalize_to_frequencies()
return calibration_data
######################################################################
# Shaper calibration
######################################################################
# Find the best shaper parameters
def calibrate_shaper(datas, csv_output, max_smoothing):
helper = shaper_calibrate.ShaperCalibrate(printer=None)
if isinstance(datas[0], shaper_calibrate.CalibrationData):
calibration_data = datas[0]
for data in datas[1:]:
calibration_data.add_data(data)
else:
# Process accelerometer data
calibration_data = helper.process_accelerometer_data(datas[0])
for data in datas[1:]:
calibration_data.add_data(helper.process_accelerometer_data(data))
calibration_data.normalize_to_frequencies()
shaper, all_shapers, resp = helper.find_best_shaper(
calibration_data, max_smoothing, print)
if csv_output is not None:
helper.save_calibration_data(
csv_output, calibration_data, all_shapers)
return shaper.name, all_shapers, calibration_data, resp
######################################################################
# Plot frequency response and suggested input shapers
######################################################################
def plot_freq_response(lognames, calibration_data, shapers,
selected_shaper, max_freq):
freqs = calibration_data.freq_bins
psd = calibration_data.psd_sum[freqs <= max_freq]
px = calibration_data.psd_x[freqs <= max_freq]
py = calibration_data.psd_y[freqs <= max_freq]
pz = calibration_data.psd_z[freqs <= max_freq]
freqs = freqs[freqs <= max_freq]
fontP = matplotlib.font_manager.FontProperties()
fontP.set_size('small')
fig, ax = matplotlib.pyplot.subplots()
ax.set_xlabel('Frequency, Hz')
ax.set_xlim([0, max_freq])
ax.set_ylabel('Power spectral density')
ax.plot(freqs, psd, label='X+Y+Z', color='purple')
ax.plot(freqs, px, label='X', color='red')
ax.plot(freqs, py, label='Y', color='green')
ax.plot(freqs, pz, label='Z', color='blue')
title = "Frequency response and shapers (%s)" % (', '.join(lognames))
ax.set_title("\n".join(wrap(title, MAX_TITLE_LENGTH)))
ax.xaxis.set_minor_locator(matplotlib.ticker.MultipleLocator(5))
ax.yaxis.set_minor_locator(matplotlib.ticker.AutoMinorLocator())
ax.ticklabel_format(axis='y', style='scientific', scilimits=(0,0))
ax.grid(which='major', color='grey')
ax.grid(which='minor', color='lightgrey')
ax2 = ax.twinx()
ax2.set_ylabel('Shaper vibration reduction (ratio)')
best_shaper_vals = None
for shaper in shapers:
label = "%s (%.1f Hz, vibr=%.1f%%, sm~=%.2f, accel<=%.f)" % (
shaper.name.upper(), shaper.freq,
shaper.vibrs * 100., shaper.smoothing,
round(shaper.max_accel / 100.) * 100.)
linestyle = 'dotted'
if shaper.name == selected_shaper:
linestyle = 'dashdot'
best_shaper_vals = shaper.vals
ax2.plot(freqs, shaper.vals, label=label, linestyle=linestyle)
ax.plot(freqs, psd * best_shaper_vals,
label='After\nshaper', color='cyan')
# A hack to add a human-readable shaper recommendation to legend
ax2.plot([], [], ' ',
label="Recommended shaper: %s" % (selected_shaper.upper()))
ax.legend(loc='upper left', prop=fontP)
ax2.legend(loc='upper right', prop=fontP)
fig.tight_layout()
return fig
######################################################################
# Startup
######################################################################
def setup_matplotlib(output_to_file):
global matplotlib
if output_to_file:
matplotlib.rcParams.update({'figure.autolayout': True})
matplotlib.use('Agg')
import matplotlib.pyplot, matplotlib.dates, matplotlib.font_manager
import matplotlib.ticker
def main():
# Parse command-line arguments
usage = "%prog [options] <logs>"
opts = optparse.OptionParser(usage)
opts.add_option("-o", "--output", type="string", dest="output",
default=None, help="filename of output graph")
opts.add_option("-c", "--csv", type="string", dest="csv",
default=None, help="filename of output csv file")
opts.add_option("-f", "--max_freq", type="float", default=200.,
help="maximum frequency to graph")
opts.add_option("-s", "--max_smoothing", type="float", default=None,
help="maximum shaper smoothing to allow")
opts.add_option("-w", "--width", type="float", dest="width",
default=8.3, help="width (inches) of the graph(s)")
opts.add_option("-l", "--height", type="float", dest="height",
default=11.6, help="height (inches) of the graph(s)")
options, args = opts.parse_args()
if len(args) < 1:
opts.error("Incorrect number of arguments")
if options.max_smoothing is not None and options.max_smoothing < 0.05:
opts.error("Too small max_smoothing specified (must be at least 0.05)")
# Parse data
datas = [parse_log(fn) for fn in args]
# Calibrate shaper and generate outputs
selected_shaper, shapers, calibration_data, resp = calibrate_shaper(
datas, options.csv, options.max_smoothing)
resp['logfile'] = args[0]
if not options.csv or options.output:
# Draw graph
setup_matplotlib(options.output is not None)
fig = plot_freq_response(args, calibration_data, shapers,
selected_shaper, options.max_freq)
# Show graph
if options.output is None:
matplotlib.pyplot.show()
else:
pathlib.Path(options.output).unlink(missing_ok=True)
fig.set_size_inches(options.width, options.height)
fig.savefig(options.output)
resp['png'] = options.output
print(json.dumps(resp))
print
if __name__ == '__main__':
main()

View file

@ -0,0 +1,573 @@
#!/usr/bin/env python3
#################################################
######## CoreXY BELTS CALIBRATION SCRIPT ########
#################################################
# Written by Frix_x#0161 #
# Be sure to make this script executable using SSH: type 'chmod +x ./graph_belts.py' when in the folder!
#####################################################################
################ !!! DO NOT EDIT BELOW THIS LINE !!! ################
#####################################################################
import optparse, matplotlib, sys, importlib, os, pathlib
from collections import namedtuple
import numpy as np
import matplotlib.pyplot, matplotlib.dates, matplotlib.font_manager
import matplotlib.ticker, matplotlib.gridspec, matplotlib.colors
import matplotlib.patches
import locale
import time
import glob
import shaper_calibrate
from datetime import datetime
matplotlib.use('Agg')
ALPHABET = "ABCDEFGHIJKLMNOPQRSTUVWXYZ" # For paired peaks names
PEAKS_DETECTION_THRESHOLD = 0.20
CURVE_SIMILARITY_SIGMOID_K = 0.6
DC_GRAIN_OF_SALT_FACTOR = 0.75
DC_THRESHOLD_METRIC = 1.5e9
DC_MAX_UNPAIRED_PEAKS_ALLOWED = 4
# Define the SignalData namedtuple
SignalData = namedtuple('CalibrationData', ['freqs', 'psd', 'peaks', 'paired_peaks', 'unpaired_peaks'])
KLIPPAIN_COLORS = {
"purple": "#70088C",
"orange": "#FF8D32",
"dark_purple": "#150140",
"dark_orange": "#F24130",
"red_pink": "#F2055C"
}
# Set the best locale for time and date formating (generation of the titles)
try:
locale.setlocale(locale.LC_TIME, locale.getdefaultlocale())
except locale.Error:
locale.setlocale(locale.LC_TIME, 'C')
# Override the built-in print function to avoid problem in Klipper due to locale settings
original_print = print
def print_with_c_locale(*args, **kwargs):
original_locale = locale.setlocale(locale.LC_ALL, None)
locale.setlocale(locale.LC_ALL, 'C')
original_print(*args, **kwargs)
locale.setlocale(locale.LC_ALL, original_locale)
print = print_with_c_locale
def is_file_open(filepath):
for proc in os.listdir('/proc'):
if proc.isdigit():
for fd in glob.glob(f'/proc/{proc}/fd/*'):
try:
if os.path.samefile(fd, filepath):
return True
except FileNotFoundError:
# Klipper has already released the CSV file
pass
except PermissionError:
# Unable to check for this particular process due to permissions
pass
return False
######################################################################
# Computation of the PSD graph
######################################################################
# Calculate estimated "power spectral density" using existing Klipper tools
def calc_freq_response(data):
helper = shaper_calibrate.ShaperCalibrate(printer=None)
return helper.process_accelerometer_data(data)
# Calculate or estimate a "similarity" factor between two PSD curves and scale it to a percentage. This is
# used here to quantify how close the two belts path behavior and responses are close together.
def compute_curve_similarity_factor(signal1, signal2):
freqs1 = signal1.freqs
psd1 = signal1.psd
freqs2 = signal2.freqs
psd2 = signal2.psd
# Interpolate PSDs to match the same frequency bins and do a cross-correlation
psd2_interp = np.interp(freqs1, freqs2, psd2)
cross_corr = np.correlate(psd1, psd2_interp, mode='full')
# Find the peak of the cross-correlation and compute a similarity normalized by the energy of the signals
peak_value = np.max(cross_corr)
similarity = peak_value / (np.sqrt(np.sum(psd1**2) * np.sum(psd2_interp**2)))
# Apply sigmoid scaling to get better numbers and get a final percentage value
scaled_similarity = sigmoid_scale(-np.log(1 - similarity), CURVE_SIMILARITY_SIGMOID_K)
return scaled_similarity
# This find all the peaks in a curve by looking at when the derivative term goes from positive to negative
# Then only the peaks found above a threshold are kept to avoid capturing peaks in the low amplitude noise of a signal
def detect_peaks(psd, freqs, window_size=5, vicinity=3):
# Smooth the curve using a moving average to avoid catching peaks everywhere in noisy signals
kernel = np.ones(window_size) / window_size
smoothed_psd = np.convolve(psd, kernel, mode='valid')
mean_pad = [np.mean(psd[:window_size])] * (window_size // 2)
smoothed_psd = np.concatenate((mean_pad, smoothed_psd))
# Find peaks on the smoothed curve
smoothed_peaks = np.where((smoothed_psd[:-2] < smoothed_psd[1:-1]) & (smoothed_psd[1:-1] > smoothed_psd[2:]))[0] + 1
detection_threshold = PEAKS_DETECTION_THRESHOLD * psd.max()
smoothed_peaks = smoothed_peaks[smoothed_psd[smoothed_peaks] > detection_threshold]
# Refine peak positions on the original curve
refined_peaks = []
for peak in smoothed_peaks:
local_max = peak + np.argmax(psd[max(0, peak-vicinity):min(len(psd), peak+vicinity+1)]) - vicinity
refined_peaks.append(local_max)
return np.array(refined_peaks), freqs[refined_peaks]
# This function create pairs of peaks that are close in frequency on two curves (that are known
# to be resonances points and must be similar on both belts on a CoreXY kinematic)
def pair_peaks(peaks1, freqs1, psd1, peaks2, freqs2, psd2):
# Compute a dynamic detection threshold to filter and pair peaks efficiently
# even if the signal is very noisy (this get clipped to a maximum of 10Hz diff)
distances = []
for p1 in peaks1:
for p2 in peaks2:
distances.append(abs(freqs1[p1] - freqs2[p2]))
distances = np.array(distances)
median_distance = np.median(distances)
iqr = np.percentile(distances, 75) - np.percentile(distances, 25)
threshold = median_distance + 1.5 * iqr
threshold = min(threshold, 10)
# Pair the peaks using the dynamic thresold
paired_peaks = []
unpaired_peaks1 = list(peaks1)
unpaired_peaks2 = list(peaks2)
while unpaired_peaks1 and unpaired_peaks2:
min_distance = threshold + 1
pair = None
for p1 in unpaired_peaks1:
for p2 in unpaired_peaks2:
distance = abs(freqs1[p1] - freqs2[p2])
if distance < min_distance:
min_distance = distance
pair = (p1, p2)
if pair is None: # No more pairs below the threshold
break
p1, p2 = pair
paired_peaks.append(((p1, freqs1[p1], psd1[p1]), (p2, freqs2[p2], psd2[p2])))
unpaired_peaks1.remove(p1)
unpaired_peaks2.remove(p2)
return paired_peaks, unpaired_peaks1, unpaired_peaks2
######################################################################
# Computation of a basic signal spectrogram
######################################################################
def compute_spectrogram(data):
import scipy
N = data.shape[0]
Fs = N / (data[-1, 0] - data[0, 0])
# Round up to a power of 2 for faster FFT
M = 1 << int(.5 * Fs - 1).bit_length()
window = np.kaiser(M, 6.)
def _specgram(x):
x_detrended = x - np.mean(x) # Detrending by subtracting the mean value
return scipy.signal.spectrogram(
x_detrended, fs=Fs, window=window, nperseg=M, noverlap=M//2,
detrend='constant', scaling='density', mode='psd')
d = {'x': data[:, 1], 'y': data[:, 2], 'z': data[:, 3]}
f, t, pdata = _specgram(d['x'])
for axis in 'yz':
pdata += _specgram(d[axis])[2]
return pdata, t, f
######################################################################
# Computation of the differential spectrogram
######################################################################
# Interpolate source_data (2D) to match target_x and target_y in order to
# get similar time and frequency dimensions for the differential spectrogram
def interpolate_2d(target_x, target_y, source_x, source_y, source_data):
import scipy
# Create a grid of points in the source and target space
source_points = np.array([(x, y) for y in source_y for x in source_x])
target_points = np.array([(x, y) for y in target_y for x in target_x])
# Flatten the source data to match the flattened source points
source_values = source_data.flatten()
# Interpolate and reshape the interpolated data to match the target grid shape and replace NaN with zeros
interpolated_data = scipy.interpolate.griddata(source_points, source_values, target_points, method='nearest')
interpolated_data = interpolated_data.reshape((len(target_y), len(target_x)))
interpolated_data = np.nan_to_num(interpolated_data)
return interpolated_data
# Main logic function to combine two similar spectrogram - ie. from both belts paths - by substracting signals in order to create
# a new composite spectrogram. This result of a divergent but mostly centered new spectrogram (center will be white) with some colored zones
# highlighting differences in the belts paths. The summative spectrogram is used for the MHI calculation.
def combined_spectrogram(data1, data2):
pdata1, bins1, t1 = compute_spectrogram(data1)
pdata2, bins2, t2 = compute_spectrogram(data2)
# Interpolate the spectrograms
pdata2_interpolated = interpolate_2d(bins1, t1, bins2, t2, pdata2)
# Cobine them in two form: a summed diff for the MHI computation and a diverging diff for the spectrogram colors
combined_sum = np.abs(pdata1 - pdata2_interpolated)
combined_divergent = pdata1 - pdata2_interpolated
return combined_sum, combined_divergent, bins1, t1
# Compute a composite and highly subjective value indicating the "mechanical health of the printer (0 to 100%)" that represent the
# likelihood of mechanical issues on the printer. It is based on the differential spectrogram sum of gradient, salted with a bit
# of the estimated similarity cross-correlation from compute_curve_similarity_factor() and with a bit of the number of unpaired peaks.
# This result in a percentage value quantifying the machine behavior around the main resonances that give an hint if only touching belt tension
# will give good graphs or if there is a chance of mechanical issues in the background (above 50% should be considered as probably problematic)
def compute_mhi(combined_data, similarity_coefficient, num_unpaired_peaks):
# filtered_data = combined_data[combined_data > 100]
filtered_data = np.abs(combined_data)
# First compute a "total variability metric" based on the sum of the gradient that sum the magnitude of will emphasize regions of the
# spectrogram where there are rapid changes in magnitude (like the edges of resonance peaks).
total_variability_metric = np.sum(np.abs(np.gradient(filtered_data)))
# Scale the metric to a percentage using the threshold (found empirically on a large number of user data shared to me)
base_percentage = (np.log1p(total_variability_metric) / np.log1p(DC_THRESHOLD_METRIC)) * 100
# Adjust the percentage based on the similarity_coefficient to add a grain of salt
adjusted_percentage = base_percentage * (1 - DC_GRAIN_OF_SALT_FACTOR * (similarity_coefficient / 100))
# Adjust the percentage again based on the number of unpaired peaks to add a second grain of salt
peak_confidence = num_unpaired_peaks / DC_MAX_UNPAIRED_PEAKS_ALLOWED
final_percentage = (1 - peak_confidence) * adjusted_percentage + peak_confidence * 100
# Ensure the result lies between 0 and 100 by clipping the computed value
final_percentage = np.clip(final_percentage, 0, 100)
return final_percentage, mhi_lut(final_percentage)
# LUT to transform the MHI into a textual value easy to understand for the users of the script
def mhi_lut(mhi):
if 0 <= mhi <= 30:
return "Excellent mechanical health"
elif 30 < mhi <= 45:
return "Good mechanical health"
elif 45 < mhi <= 55:
return "Acceptable mechanical health"
elif 55 < mhi <= 70:
return "Potential signs of a mechanical issue"
elif 70 < mhi <= 85:
return "Likely a mechanical issue"
elif 85 < mhi <= 100:
return "Mechanical issue detected"
######################################################################
# Graphing
######################################################################
def plot_compare_frequency(ax, lognames, signal1, signal2, max_freq):
# Get the belt name for the legend to avoid putting the full file name
signal1_belt = (lognames[0].split('/')[-1]).split('_')[-1][0]
signal2_belt = (lognames[1].split('/')[-1]).split('_')[-1][0]
if signal1_belt == 'A' and signal2_belt == 'B':
signal1_belt += " (axis 1,-1)"
signal2_belt += " (axis 1, 1)"
elif signal1_belt == 'B' and signal2_belt == 'A':
signal1_belt += " (axis 1, 1)"
signal2_belt += " (axis 1,-1)"
else:
print("Warning: belts doesn't seem to have the correct name A and B (extracted from the filename.csv)")
# Plot the two belts PSD signals
ax.plot(signal1.freqs, signal1.psd, label="Belt " + signal1_belt, color=KLIPPAIN_COLORS['purple'])
ax.plot(signal2.freqs, signal2.psd, label="Belt " + signal2_belt, color=KLIPPAIN_COLORS['orange'])
# Trace the "relax region" (also used as a threshold to filter and detect the peaks)
psd_lowest_max = min(signal1.psd.max(), signal2.psd.max())
peaks_warning_threshold = PEAKS_DETECTION_THRESHOLD * psd_lowest_max
ax.axhline(y=peaks_warning_threshold, color='black', linestyle='--', linewidth=0.5)
ax.fill_between(signal1.freqs, 0, peaks_warning_threshold, color='green', alpha=0.15, label='Relax Region')
# Trace and annotate the peaks on the graph
paired_peak_count = 0
unpaired_peak_count = 0
offsets_table_data = []
for _, (peak1, peak2) in enumerate(signal1.paired_peaks):
label = ALPHABET[paired_peak_count]
amplitude_offset = abs(((signal2.psd[peak2[0]] - signal1.psd[peak1[0]]) / max(signal1.psd[peak1[0]], signal2.psd[peak2[0]])) * 100)
frequency_offset = abs(signal2.freqs[peak2[0]] - signal1.freqs[peak1[0]])
offsets_table_data.append([f"Peaks {label}", f"{frequency_offset:.1f} Hz", f"{amplitude_offset:.1f} %"])
ax.plot(signal1.freqs[peak1[0]], signal1.psd[peak1[0]], "x", color='black')
ax.plot(signal2.freqs[peak2[0]], signal2.psd[peak2[0]], "x", color='black')
ax.plot([signal1.freqs[peak1[0]], signal2.freqs[peak2[0]]], [signal1.psd[peak1[0]], signal2.psd[peak2[0]]], ":", color='gray')
ax.annotate(label + "1", (signal1.freqs[peak1[0]], signal1.psd[peak1[0]]),
textcoords="offset points", xytext=(8, 5),
ha='left', fontsize=13, color='black')
ax.annotate(label + "2", (signal2.freqs[peak2[0]], signal2.psd[peak2[0]]),
textcoords="offset points", xytext=(8, 5),
ha='left', fontsize=13, color='black')
paired_peak_count += 1
for peak in signal1.unpaired_peaks:
ax.plot(signal1.freqs[peak], signal1.psd[peak], "x", color='black')
ax.annotate(str(unpaired_peak_count + 1), (signal1.freqs[peak], signal1.psd[peak]),
textcoords="offset points", xytext=(8, 5),
ha='left', fontsize=13, color='red', weight='bold')
unpaired_peak_count += 1
for peak in signal2.unpaired_peaks:
ax.plot(signal2.freqs[peak], signal2.psd[peak], "x", color='black')
ax.annotate(str(unpaired_peak_count + 1), (signal2.freqs[peak], signal2.psd[peak]),
textcoords="offset points", xytext=(8, 5),
ha='left', fontsize=13, color='red', weight='bold')
unpaired_peak_count += 1
# Compute the similarity (using cross-correlation of the PSD signals)
ax2 = ax.twinx() # To split the legends in two box
ax2.yaxis.set_visible(False)
similarity_factor = compute_curve_similarity_factor(signal1, signal2)
ax2.plot([], [], ' ', label=f'Estimated similarity: {similarity_factor:.1f}%')
ax2.plot([], [], ' ', label=f'Number of unpaired peaks: {unpaired_peak_count}')
print(f"Belts estimated similarity: {similarity_factor:.1f}%")
# Setting axis parameters, grid and graph title
ax.set_xlabel('Frequency (Hz)')
ax.set_xlim([0, max_freq])
ax.set_ylabel('Power spectral density')
psd_highest_max = max(signal1.psd.max(), signal2.psd.max())
ax.set_ylim([0, psd_highest_max + psd_highest_max * 0.05])
ax.xaxis.set_minor_locator(matplotlib.ticker.AutoMinorLocator())
ax.yaxis.set_minor_locator(matplotlib.ticker.AutoMinorLocator())
ax.ticklabel_format(axis='y', style='scientific', scilimits=(0,0))
ax.grid(which='major', color='grey')
ax.grid(which='minor', color='lightgrey')
fontP = matplotlib.font_manager.FontProperties()
fontP.set_size('small')
ax.set_title('Belts Frequency Profiles (estimated similarity: {:.1f}%)'.format(similarity_factor), fontsize=10, color=KLIPPAIN_COLORS['dark_orange'], weight='bold')
# Print the table of offsets ontop of the graph below the original legend (upper right)
if len(offsets_table_data) > 0:
columns = ["", "Frequency delta", "Amplitude delta", ]
offset_table = ax.table(cellText=offsets_table_data, colLabels=columns, bbox=[0.66, 0.75, 0.33, 0.15], loc='upper right', cellLoc='center')
offset_table.auto_set_font_size(False)
offset_table.set_fontsize(8)
offset_table.auto_set_column_width([0, 1, 2])
offset_table.set_zorder(100)
cells = [key for key in offset_table.get_celld().keys()]
for cell in cells:
offset_table[cell].set_facecolor('white')
offset_table[cell].set_alpha(0.6)
ax.legend(loc='upper left', prop=fontP)
ax2.legend(loc='center right', prop=fontP)
return similarity_factor, unpaired_peak_count
def plot_difference_spectrogram(ax, data1, data2, signal1, signal2, similarity_factor, max_freq):
combined_sum, combined_divergent, bins, t = combined_spectrogram(data1, data2)
# Compute the MHI value from the differential spectrogram sum of gradient, salted with
# the similarity factor and the number or unpaired peaks from the belts frequency profile
# Be careful, this value is highly opinionated and is pretty experimental!
mhi, textual_mhi = compute_mhi(combined_sum, similarity_factor, len(signal1.unpaired_peaks) + len(signal2.unpaired_peaks))
print(f"[experimental] Mechanical Health Indicator: {textual_mhi.lower()} ({mhi:.1f}%)")
ax.set_title(f"Differential Spectrogram", fontsize=14, color=KLIPPAIN_COLORS['dark_orange'], weight='bold')
ax.plot([], [], ' ', label=f'{textual_mhi} (experimental)')
# Draw the differential spectrogram with a specific custom norm to get orange or purple values where there is signal or white near zeros
colors = [KLIPPAIN_COLORS['dark_orange'], KLIPPAIN_COLORS['orange'], 'white', KLIPPAIN_COLORS['purple'], KLIPPAIN_COLORS['dark_purple']]
cm = matplotlib.colors.LinearSegmentedColormap.from_list('klippain_divergent', list(zip([0, 0.25, 0.5, 0.75, 1], colors)))
norm = matplotlib.colors.TwoSlopeNorm(vmin=np.min(combined_divergent), vcenter=0, vmax=np.max(combined_divergent))
ax.pcolormesh(t, bins, combined_divergent.T, cmap=cm, norm=norm, shading='gouraud')
ax.set_xlabel('Frequency (hz)')
ax.set_xlim([0., max_freq])
ax.set_ylabel('Time (s)')
ax.set_ylim([0, bins[-1]])
fontP = matplotlib.font_manager.FontProperties()
fontP.set_size('medium')
ax.legend(loc='best', prop=fontP)
# Plot vertical lines for unpaired peaks
unpaired_peak_count = 0
for _, peak in enumerate(signal1.unpaired_peaks):
ax.axvline(signal1.freqs[peak], color=KLIPPAIN_COLORS['red_pink'], linestyle='dotted', linewidth=1.5)
ax.annotate(f"Peak {unpaired_peak_count + 1}", (signal1.freqs[peak], t[-1]*0.05),
textcoords="data", color=KLIPPAIN_COLORS['red_pink'], rotation=90, fontsize=10,
verticalalignment='bottom', horizontalalignment='right')
unpaired_peak_count +=1
for _, peak in enumerate(signal2.unpaired_peaks):
ax.axvline(signal2.freqs[peak], color=KLIPPAIN_COLORS['red_pink'], linestyle='dotted', linewidth=1.5)
ax.annotate(f"Peak {unpaired_peak_count + 1}", (signal2.freqs[peak], t[-1]*0.05),
textcoords="data", color=KLIPPAIN_COLORS['red_pink'], rotation=90, fontsize=10,
verticalalignment='bottom', horizontalalignment='right')
unpaired_peak_count +=1
# Plot vertical lines and zones for paired peaks
for idx, (peak1, peak2) in enumerate(signal1.paired_peaks):
label = ALPHABET[idx]
x_min = min(peak1[1], peak2[1])
x_max = max(peak1[1], peak2[1])
ax.axvline(x_min, color=KLIPPAIN_COLORS['dark_purple'], linestyle='dotted', linewidth=1.5)
ax.axvline(x_max, color=KLIPPAIN_COLORS['dark_purple'], linestyle='dotted', linewidth=1.5)
ax.fill_between([x_min, x_max], 0, np.max(combined_divergent), color=KLIPPAIN_COLORS['dark_purple'], alpha=0.3)
ax.annotate(f"Peaks {label}", (x_min, t[-1]*0.05),
textcoords="data", color=KLIPPAIN_COLORS['dark_purple'], rotation=90, fontsize=10,
verticalalignment='bottom', horizontalalignment='right')
return
######################################################################
# Custom tools
######################################################################
# Simple helper to compute a sigmoid scalling (from 0 to 100%)
def sigmoid_scale(x, k=1):
return 1 / (1 + np.exp(-k * x)) * 100
# Original Klipper function to get the PSD data of a raw accelerometer signal
def compute_signal_data(data, max_freq):
calibration_data = calc_freq_response(data)
freqs = calibration_data.freq_bins[calibration_data.freq_bins <= max_freq]
psd = calibration_data.get_psd('all')[calibration_data.freq_bins <= max_freq]
peaks, _ = detect_peaks(psd, freqs)
return SignalData(freqs=freqs, psd=psd, peaks=peaks, paired_peaks=None, unpaired_peaks=None)
######################################################################
# Startup and main routines
######################################################################
def parse_log(logname):
with open(logname) as f:
for header in f:
if not header.startswith('#'):
break
if not header.startswith('freq,psd_x,psd_y,psd_z,psd_xyz'):
# Raw accelerometer data
return np.loadtxt(logname, comments='#', delimiter=',')
# Power spectral density data or shaper calibration data
raise ValueError("File %s does not contain raw accelerometer data and therefore "
"is not supported by this script. Please use the official Klipper "
"graph_accelerometer.py script to process it instead." % (logname,))
def belts_calibration(lognames, klipperdir="~/klipper", max_freq=200., graph_spectogram=True, width=8.3, height=11.6):
for filename in lognames[:2]:
# Wait for the file handler to be released by Klipper
while is_file_open(filename):
time.sleep(2)
# Parse data
datas = [parse_log(fn) for fn in lognames]
if len(datas) > 2:
raise ValueError("Incorrect number of .csv files used (this function needs two files to compare them)")
# Compute calibration data for the two datasets with automatic peaks detection
signal1 = compute_signal_data(datas[0], max_freq)
signal2 = compute_signal_data(datas[1], max_freq)
# Pair the peaks across the two datasets
paired_peaks, unpaired_peaks1, unpaired_peaks2 = pair_peaks(signal1.peaks, signal1.freqs, signal1.psd,
signal2.peaks, signal2.freqs, signal2.psd)
signal1 = signal1._replace(paired_peaks=paired_peaks, unpaired_peaks=unpaired_peaks1)
signal2 = signal2._replace(paired_peaks=paired_peaks, unpaired_peaks=unpaired_peaks2)
if graph_spectogram:
fig = matplotlib.pyplot.figure()
gs = matplotlib.gridspec.GridSpec(2, 1, height_ratios=[4, 3])
ax1 = fig.add_subplot(gs[0])
ax2 = fig.add_subplot(gs[1])
else:
fig, ax1 = matplotlib.pyplot.subplots()
# Add title
try:
filename = lognames[0].split('/')[-1]
dt = datetime.strptime(f"{filename.split('_')[1]} {filename.split('_')[2]}", "%Y%m%d %H%M%S")
title_line2 = dt.strftime('%x %X')
except:
print("Warning: CSV filenames look to be different than expected (%s , %s)" % (lognames[0], lognames[1]))
title_line2 = lognames[0].split('/')[-1] + " / " + lognames[1].split('/')[-1]
fig.suptitle(title_line2)
# Plot the graphs
similarity_factor, _ = plot_compare_frequency(ax1, lognames, signal1, signal2, max_freq)
if graph_spectogram:
plot_difference_spectrogram(ax2, datas[0], datas[1], signal1, signal2, similarity_factor, max_freq)
fig.set_size_inches(width, height)
fig.tight_layout()
fig.subplots_adjust(top=0.89)
return fig
def main():
# Parse command-line arguments
usage = "%prog [options] <raw logs>"
opts = optparse.OptionParser(usage)
opts.add_option("-o", "--output", type="string", dest="output",
default=None, help="filename of output graph")
opts.add_option("-f", "--max_freq", type="float", default=200.,
help="maximum frequency to graph")
opts.add_option("-k", "--klipper_dir", type="string", dest="klipperdir",
default="~/klipper", help="main klipper directory")
opts.add_option("-n", "--no_spectogram", action="store_false", dest="no_spectogram",
default=True, help="disable plotting of spectogram")
opts.add_option("-w", "--width", type="float", dest="width",
default=8.3, help="width (inches) of the graph(s)")
opts.add_option("-l", "--height", type="float", dest="height",
default=11.6, help="height (inches) of the graph(s)")
options, args = opts.parse_args()
if len(args) < 1:
opts.error("Incorrect number of arguments")
if options.output is None:
opts.error("You must specify an output file.png to use the script (option -o)")
fig = belts_calibration(args, options.klipperdir, options.max_freq, options.no_spectogram,
options.width, options.height)
pathlib.Path(options.output).unlink(missing_ok=True)
fig.savefig(options.output)
if __name__ == '__main__':
main()

View file

@ -0,0 +1,372 @@
# Automatic calibration of input shapers
#
# Copyright (C) 2020 Dmitry Butyugin <dmbutyugin@google.com>
#
# This file may be distributed under the terms of the GNU GPLv3 license.
import collections, importlib, logging, math, multiprocessing, traceback
import shaper_defs
MIN_FREQ = 5.
MAX_FREQ = 200.
WINDOW_T_SEC = 0.5
MAX_SHAPER_FREQ = 150.
TEST_DAMPING_RATIOS=[0.075, 0.1, 0.15]
AUTOTUNE_SHAPERS = ['zv', 'mzv', 'ei', '2hump_ei', '3hump_ei']
######################################################################
# Frequency response calculation and shaper auto-tuning
######################################################################
class CalibrationData:
def __init__(self, freq_bins, psd_sum, psd_x, psd_y, psd_z):
self.freq_bins = freq_bins
self.psd_sum = psd_sum
self.psd_x = psd_x
self.psd_y = psd_y
self.psd_z = psd_z
self._psd_list = [self.psd_sum, self.psd_x, self.psd_y, self.psd_z]
self._psd_map = {'x': self.psd_x, 'y': self.psd_y, 'z': self.psd_z,
'all': self.psd_sum}
self.data_sets = 1
def add_data(self, other):
np = self.numpy
joined_data_sets = self.data_sets + other.data_sets
for psd, other_psd in zip(self._psd_list, other._psd_list):
# `other` data may be defined at different frequency bins,
# interpolating to fix that.
other_normalized = other.data_sets * np.interp(
self.freq_bins, other.freq_bins, other_psd)
psd *= self.data_sets
psd[:] = (psd + other_normalized) * (1. / joined_data_sets)
self.data_sets = joined_data_sets
def set_numpy(self, numpy):
self.numpy = numpy
def normalize_to_frequencies(self):
for psd in self._psd_list:
# Avoid division by zero errors
psd /= self.freq_bins + .1
# Remove low-frequency noise
psd[self.freq_bins < MIN_FREQ] = 0.
def get_psd(self, axis='all'):
return self._psd_map[axis]
CalibrationResult = collections.namedtuple(
'CalibrationResult',
('name', 'freq', 'vals', 'vibrs', 'smoothing', 'score', 'max_accel'))
class ShaperCalibrate:
def __init__(self, printer):
self.printer = printer
self.error = printer.command_error if printer else Exception
try:
self.numpy = importlib.import_module('numpy')
except ImportError:
raise self.error(
"Failed to import `numpy` module, make sure it was "
"installed via `~/klippy-env/bin/pip install` (refer to "
"docs/Measuring_Resonances.md for more details).")
def background_process_exec(self, method, args):
if self.printer is None:
return method(*args)
import queuelogger
parent_conn, child_conn = multiprocessing.Pipe()
def wrapper():
queuelogger.clear_bg_logging()
try:
res = method(*args)
except:
child_conn.send((True, traceback.format_exc()))
child_conn.close()
return
child_conn.send((False, res))
child_conn.close()
# Start a process to perform the calculation
calc_proc = multiprocessing.Process(target=wrapper)
calc_proc.daemon = True
calc_proc.start()
# Wait for the process to finish
reactor = self.printer.get_reactor()
gcode = self.printer.lookup_object("gcode")
eventtime = last_report_time = reactor.monotonic()
while calc_proc.is_alive():
if eventtime > last_report_time + 5.:
last_report_time = eventtime
gcode.respond_info("Wait for calculations..", log=False)
eventtime = reactor.pause(eventtime + .1)
# Return results
is_err, res = parent_conn.recv()
if is_err:
raise self.error("Error in remote calculation: %s" % (res,))
calc_proc.join()
parent_conn.close()
return res
def _split_into_windows(self, x, window_size, overlap):
# Memory-efficient algorithm to split an input 'x' into a series
# of overlapping windows
step_between_windows = window_size - overlap
n_windows = (x.shape[-1] - overlap) // step_between_windows
shape = (window_size, n_windows)
strides = (x.strides[-1], step_between_windows * x.strides[-1])
return self.numpy.lib.stride_tricks.as_strided(
x, shape=shape, strides=strides, writeable=False)
def _psd(self, x, fs, nfft):
# Calculate power spectral density (PSD) using Welch's algorithm
np = self.numpy
window = np.kaiser(nfft, 6.)
# Compensation for windowing loss
scale = 1.0 / (window**2).sum()
# Split into overlapping windows of size nfft
overlap = nfft // 2
x = self._split_into_windows(x, nfft, overlap)
# First detrend, then apply windowing function
x = window[:, None] * (x - np.mean(x, axis=0))
# Calculate frequency response for each window using FFT
result = np.fft.rfft(x, n=nfft, axis=0)
result = np.conjugate(result) * result
result *= scale / fs
# For one-sided FFT output the response must be doubled, except
# the last point for unpaired Nyquist frequency (assuming even nfft)
# and the 'DC' term (0 Hz)
result[1:-1,:] *= 2.
# Welch's algorithm: average response over windows
psd = result.real.mean(axis=-1)
# Calculate the frequency bins
freqs = np.fft.rfftfreq(nfft, 1. / fs)
return freqs, psd
def calc_freq_response(self, raw_values):
np = self.numpy
if raw_values is None:
return None
if isinstance(raw_values, np.ndarray):
data = raw_values
else:
samples = raw_values.get_samples()
if not samples:
return None
data = np.array(samples)
N = data.shape[0]
T = data[-1,0] - data[0,0]
SAMPLING_FREQ = N / T
# Round up to the nearest power of 2 for faster FFT
M = 1 << int(SAMPLING_FREQ * WINDOW_T_SEC - 1).bit_length()
if N <= M:
return None
# Calculate PSD (power spectral density) of vibrations per
# frequency bins (the same bins for X, Y, and Z)
fx, px = self._psd(data[:,1], SAMPLING_FREQ, M)
fy, py = self._psd(data[:,2], SAMPLING_FREQ, M)
fz, pz = self._psd(data[:,3], SAMPLING_FREQ, M)
return CalibrationData(fx, px+py+pz, px, py, pz)
def process_accelerometer_data(self, data):
calibration_data = self.background_process_exec(
self.calc_freq_response, (data,))
if calibration_data is None:
raise self.error(
"Internal error processing accelerometer data %s" % (data,))
calibration_data.set_numpy(self.numpy)
return calibration_data
def _estimate_shaper(self, shaper, test_damping_ratio, test_freqs):
np = self.numpy
A, T = np.array(shaper[0]), np.array(shaper[1])
inv_D = 1. / A.sum()
omega = 2. * math.pi * test_freqs
damping = test_damping_ratio * omega
omega_d = omega * math.sqrt(1. - test_damping_ratio**2)
W = A * np.exp(np.outer(-damping, (T[-1] - T)))
S = W * np.sin(np.outer(omega_d, T))
C = W * np.cos(np.outer(omega_d, T))
return np.sqrt(S.sum(axis=1)**2 + C.sum(axis=1)**2) * inv_D
def _estimate_remaining_vibrations(self, shaper, test_damping_ratio,
freq_bins, psd):
vals = self._estimate_shaper(shaper, test_damping_ratio, freq_bins)
# The input shaper can only reduce the amplitude of vibrations by
# SHAPER_VIBRATION_REDUCTION times, so all vibrations below that
# threshold can be igonred
vibr_threshold = psd.max() / shaper_defs.SHAPER_VIBRATION_REDUCTION
remaining_vibrations = self.numpy.maximum(
vals * psd - vibr_threshold, 0).sum()
all_vibrations = self.numpy.maximum(psd - vibr_threshold, 0).sum()
return (remaining_vibrations / all_vibrations, vals)
def _get_shaper_smoothing(self, shaper, accel=5000, scv=5.):
half_accel = accel * .5
A, T = shaper
inv_D = 1. / sum(A)
n = len(T)
# Calculate input shaper shift
ts = sum([A[i] * T[i] for i in range(n)]) * inv_D
# Calculate offset for 90 and 180 degrees turn
offset_90 = offset_180 = 0.
for i in range(n):
if T[i] >= ts:
# Calculate offset for one of the axes
offset_90 += A[i] * (scv + half_accel * (T[i]-ts)) * (T[i]-ts)
offset_180 += A[i] * half_accel * (T[i]-ts)**2
offset_90 *= inv_D * math.sqrt(2.)
offset_180 *= inv_D
return max(offset_90, offset_180)
def fit_shaper(self, shaper_cfg, calibration_data, max_smoothing):
np = self.numpy
test_freqs = np.arange(shaper_cfg.min_freq, MAX_SHAPER_FREQ, .2)
freq_bins = calibration_data.freq_bins
psd = calibration_data.psd_sum[freq_bins <= MAX_FREQ]
freq_bins = freq_bins[freq_bins <= MAX_FREQ]
best_res = None
results = []
for test_freq in test_freqs[::-1]:
shaper_vibrations = 0.
shaper_vals = np.zeros(shape=freq_bins.shape)
shaper = shaper_cfg.init_func(
test_freq, shaper_defs.DEFAULT_DAMPING_RATIO)
shaper_smoothing = self._get_shaper_smoothing(shaper)
if max_smoothing and shaper_smoothing > max_smoothing and best_res:
return best_res
# Exact damping ratio of the printer is unknown, pessimizing
# remaining vibrations over possible damping values
for dr in TEST_DAMPING_RATIOS:
vibrations, vals = self._estimate_remaining_vibrations(
shaper, dr, freq_bins, psd)
shaper_vals = np.maximum(shaper_vals, vals)
if vibrations > shaper_vibrations:
shaper_vibrations = vibrations
max_accel = self.find_shaper_max_accel(shaper)
# The score trying to minimize vibrations, but also accounting
# the growth of smoothing. The formula itself does not have any
# special meaning, it simply shows good results on real user data
shaper_score = shaper_smoothing * (shaper_vibrations**1.5 +
shaper_vibrations * .2 + .01)
results.append(
CalibrationResult(
name=shaper_cfg.name, freq=test_freq, vals=shaper_vals,
vibrs=shaper_vibrations, smoothing=shaper_smoothing,
score=shaper_score, max_accel=max_accel))
if best_res is None or best_res.vibrs > results[-1].vibrs:
# The current frequency is better for the shaper.
best_res = results[-1]
# Try to find an 'optimal' shapper configuration: the one that is not
# much worse than the 'best' one, but gives much less smoothing
selected = best_res
for res in results[::-1]:
if res.vibrs < best_res.vibrs * 1.1 and res.score < selected.score:
selected = res
return selected
def _bisect(self, func):
left = right = 1.
while not func(left):
right = left
left *= .5
if right == left:
while func(right):
right *= 2.
while right - left > 1e-8:
middle = (left + right) * .5
if func(middle):
left = middle
else:
right = middle
return left
def find_shaper_max_accel(self, shaper):
# Just some empirically chosen value which produces good projections
# for max_accel without much smoothing
TARGET_SMOOTHING = 0.12
max_accel = self._bisect(lambda test_accel: self._get_shaper_smoothing(
shaper, test_accel) <= TARGET_SMOOTHING)
return max_accel
def find_best_shaper(self, calibration_data, max_smoothing, logger=None):
best_shaper = None
all_shapers = []
resp = {}
for shaper_cfg in shaper_defs.INPUT_SHAPERS:
if shaper_cfg.name not in AUTOTUNE_SHAPERS:
continue
shaper = self.background_process_exec(self.fit_shaper, (
shaper_cfg, calibration_data, max_smoothing))
if logger is not None:
resp[shaper.name] = {
'freq': shaper.freq,
'vib': shaper.vibrs * 100.,
'smooth': shaper.smoothing,
'max_acel': round(shaper.max_accel / 100.) * 100.
}
all_shapers.append(shaper)
if (best_shaper is None or shaper.score * 1.2 < best_shaper.score or
(shaper.score * 1.05 < best_shaper.score and
shaper.smoothing * 1.1 < best_shaper.smoothing)):
# Either the shaper significantly improves the score (by 20%),
# or it improves the score and smoothing (by 5% and 10% resp.)
best_shaper = shaper
return best_shaper, all_shapers, {'shapers': resp, 'best': best_shaper.name}
def save_params(self, configfile, axis, shaper_name, shaper_freq):
if axis == 'xy':
self.save_params(configfile, 'x', shaper_name, shaper_freq)
self.save_params(configfile, 'y', shaper_name, shaper_freq)
else:
configfile.set('input_shaper', 'shaper_type_'+axis, shaper_name)
configfile.set('input_shaper', 'shaper_freq_'+axis,
'%.1f' % (shaper_freq,))
def apply_params(self, input_shaper, axis, shaper_name, shaper_freq):
if axis == 'xy':
self.apply_params(input_shaper, 'x', shaper_name, shaper_freq)
self.apply_params(input_shaper, 'y', shaper_name, shaper_freq)
return
gcode = self.printer.lookup_object("gcode")
axis = axis.upper()
input_shaper.cmd_SET_INPUT_SHAPER(gcode.create_gcode_command(
"SET_INPUT_SHAPER", "SET_INPUT_SHAPER", {
"SHAPER_TYPE_" + axis: shaper_name,
"SHAPER_FREQ_" + axis: shaper_freq}))
def save_calibration_data(self, output, calibration_data, shapers=None):
try:
with open(output, "w") as csvfile:
csvfile.write("freq,psd_x,psd_y,psd_z,psd_xyz")
if shapers:
for shaper in shapers:
csvfile.write(",%s(%.1f)" % (shaper.name, shaper.freq))
csvfile.write("\n")
num_freqs = calibration_data.freq_bins.shape[0]
for i in range(num_freqs):
if calibration_data.freq_bins[i] >= MAX_FREQ:
break
csvfile.write("%.1f,%.3e,%.3e,%.3e,%.3e" % (
calibration_data.freq_bins[i],
calibration_data.psd_x[i],
calibration_data.psd_y[i],
calibration_data.psd_z[i],
calibration_data.psd_sum[i]))
if shapers:
for shaper in shapers:
csvfile.write(",%.3f" % (shaper.vals[i],))
csvfile.write("\n")
except IOError as e:
raise self.error("Error writing to file '%s': %s", output, str(e))

View file

@ -0,0 +1,102 @@
# Definitions of the supported input shapers
#
# Copyright (C) 2020-2021 Dmitry Butyugin <dmbutyugin@google.com>
#
# This file may be distributed under the terms of the GNU GPLv3 license.
import collections, math
SHAPER_VIBRATION_REDUCTION=20.
DEFAULT_DAMPING_RATIO = 0.1
InputShaperCfg = collections.namedtuple(
'InputShaperCfg', ('name', 'init_func', 'min_freq'))
def get_none_shaper():
return ([], [])
def get_zv_shaper(shaper_freq, damping_ratio):
df = math.sqrt(1. - damping_ratio**2)
K = math.exp(-damping_ratio * math.pi / df)
t_d = 1. / (shaper_freq * df)
A = [1., K]
T = [0., .5*t_d]
return (A, T)
def get_zvd_shaper(shaper_freq, damping_ratio):
df = math.sqrt(1. - damping_ratio**2)
K = math.exp(-damping_ratio * math.pi / df)
t_d = 1. / (shaper_freq * df)
A = [1., 2.*K, K**2]
T = [0., .5*t_d, t_d]
return (A, T)
def get_mzv_shaper(shaper_freq, damping_ratio):
df = math.sqrt(1. - damping_ratio**2)
K = math.exp(-.75 * damping_ratio * math.pi / df)
t_d = 1. / (shaper_freq * df)
a1 = 1. - 1. / math.sqrt(2.)
a2 = (math.sqrt(2.) - 1.) * K
a3 = a1 * K * K
A = [a1, a2, a3]
T = [0., .375*t_d, .75*t_d]
return (A, T)
def get_ei_shaper(shaper_freq, damping_ratio):
v_tol = 1. / SHAPER_VIBRATION_REDUCTION # vibration tolerance
df = math.sqrt(1. - damping_ratio**2)
K = math.exp(-damping_ratio * math.pi / df)
t_d = 1. / (shaper_freq * df)
a1 = .25 * (1. + v_tol)
a2 = .5 * (1. - v_tol) * K
a3 = a1 * K * K
A = [a1, a2, a3]
T = [0., .5*t_d, t_d]
return (A, T)
def get_2hump_ei_shaper(shaper_freq, damping_ratio):
v_tol = 1. / SHAPER_VIBRATION_REDUCTION # vibration tolerance
df = math.sqrt(1. - damping_ratio**2)
K = math.exp(-damping_ratio * math.pi / df)
t_d = 1. / (shaper_freq * df)
V2 = v_tol**2
X = pow(V2 * (math.sqrt(1. - V2) + 1.), 1./3.)
a1 = (3.*X*X + 2.*X + 3.*V2) / (16.*X)
a2 = (.5 - a1) * K
a3 = a2 * K
a4 = a1 * K * K * K
A = [a1, a2, a3, a4]
T = [0., .5*t_d, t_d, 1.5*t_d]
return (A, T)
def get_3hump_ei_shaper(shaper_freq, damping_ratio):
v_tol = 1. / SHAPER_VIBRATION_REDUCTION # vibration tolerance
df = math.sqrt(1. - damping_ratio**2)
K = math.exp(-damping_ratio * math.pi / df)
t_d = 1. / (shaper_freq * df)
K2 = K*K
a1 = 0.0625 * (1. + 3. * v_tol + 2. * math.sqrt(2. * (v_tol + 1.) * v_tol))
a2 = 0.25 * (1. - v_tol) * K
a3 = (0.5 * (1. + v_tol) - 2. * a1) * K2
a4 = a2 * K2
a5 = a1 * K2 * K2
A = [a1, a2, a3, a4, a5]
T = [0., .5*t_d, t_d, 1.5*t_d, 2.*t_d]
return (A, T)
# min_freq for each shaper is chosen to have projected max_accel ~= 1500
INPUT_SHAPERS = [
InputShaperCfg('zv', get_zv_shaper, min_freq=21.),
InputShaperCfg('mzv', get_mzv_shaper, min_freq=23.),
InputShaperCfg('zvd', get_zvd_shaper, min_freq=29.),
InputShaperCfg('ei', get_ei_shaper, min_freq=29.),
InputShaperCfg('2hump_ei', get_2hump_ei_shaper, min_freq=39.),
InputShaperCfg('3hump_ei', get_3hump_ei_shaper, min_freq=48.),
]