Add the functionality for calibrating the analog input. Add demo script for calibrating analog input.

This commit is contained in:
2025-05-24 00:26:48 -07:00
parent 7f9ad7e00d
commit 392f4fffc7
2 changed files with 74 additions and 37 deletions

View File

@ -12,7 +12,6 @@
#define ANALOG_INPUT_H
const int MAX_INPUT = (1 << 10) - 1; // Max 10 bit analog read resolution.
const int CALIBRATION_OFFSET = 15;
class AnalogInput {
public:
@ -36,20 +35,29 @@ class AnalogInput {
void Process() {
old_read_ = read_;
int raw = analogRead(pin_);
read_ = map(raw, CALIBRATION_OFFSET, MAX_INPUT, 0, MAX_INPUT);
read_ = map(raw, calibration_offset_, MAX_INPUT, calibration_low_, calibration_high_);
}
// Set calibration values.
void AdjustCalibrationLow(int val) { calibration_low_ += val; }
void AdjustCalibrationOffset(int val) { calibration_offset_ += val; }
void AdjustCalibrationHigh(int val) { calibration_high_ += val; }
/**
* @brief Get the current value of the analog input.
* @brief Get the current value of the analog input within a range of +/-512.
*
* @return InputState
*/
inline uint16_t Read() { return read_; }
inline int16_t Read() { return read_; }
private:
uint8_t pin_;
uint16_t read_;
int16_t read_;
uint16_t old_read_;
int calibration_offset_ = 0;
int calibration_low_ = -512;
int calibration_high_ = 512;
};
#endif