Add the functionality for calibrating the analog input. Add demo script for calibrating analog input.
This commit is contained in:
@ -12,7 +12,6 @@
|
||||
#define ANALOG_INPUT_H
|
||||
|
||||
const int MAX_INPUT = (1 << 10) - 1; // Max 10 bit analog read resolution.
|
||||
const int CALIBRATION_OFFSET = 15;
|
||||
|
||||
class AnalogInput {
|
||||
public:
|
||||
@ -36,20 +35,29 @@ class AnalogInput {
|
||||
void Process() {
|
||||
old_read_ = read_;
|
||||
int raw = analogRead(pin_);
|
||||
read_ = map(raw, CALIBRATION_OFFSET, MAX_INPUT, 0, MAX_INPUT);
|
||||
read_ = map(raw, calibration_offset_, MAX_INPUT, calibration_low_, calibration_high_);
|
||||
}
|
||||
|
||||
// Set calibration values.
|
||||
|
||||
void AdjustCalibrationLow(int val) { calibration_low_ += val; }
|
||||
void AdjustCalibrationOffset(int val) { calibration_offset_ += val; }
|
||||
void AdjustCalibrationHigh(int val) { calibration_high_ += val; }
|
||||
|
||||
/**
|
||||
* @brief Get the current value of the analog input.
|
||||
* @brief Get the current value of the analog input within a range of +/-512.
|
||||
*
|
||||
* @return InputState
|
||||
*/
|
||||
inline uint16_t Read() { return read_; }
|
||||
inline int16_t Read() { return read_; }
|
||||
|
||||
private:
|
||||
uint8_t pin_;
|
||||
uint16_t read_;
|
||||
int16_t read_;
|
||||
uint16_t old_read_;
|
||||
int calibration_offset_ = 0;
|
||||
int calibration_low_ = -512;
|
||||
int calibration_high_ = 512;
|
||||
};
|
||||
|
||||
#endif
|
||||
|
||||
Reference in New Issue
Block a user