I have the Core hooked up to a resistor and a photosensitive diode in order for it to read and report the blinks on my not-very-smart power meter (something I hope can be useful to others as well eventually), as it blinks every watt hour. It runs one iteration perfectly, meaning it reports the number of blinks after the first minute has passed, but then the core promptly resets. I’m comfortably inside RAM and flash limits, and I have tried two different cores.
I’m using the SparkTime library to get the NTP time in order to report accurately.
I’m at my wit’s end here - any pointers would be much appreciated.
OJ
#include "SparkTime.h"
/*
Power meter reader and reporter
Logic: Reads the blinks from the main house meter led in the fusebox.
Records the blink frequency, as each blink is 1 Wh.
Reports the hourly frequency (consumption) via publish() and
reports history via the published function history() (TODO!)
HW:
Connect one end of the photocell to 3.3V, the other end to Analog 0.
Then connect one end of a 330ohm resistor from Analog 0 to ground
Connect LED from pin 11 through a resistor to ground
*/
#define threshold 100 // the treshold value triggers a signal, anything below this is considered ambient light
// value depends on real life conditions and resistance of your setup - must be tweaked
#define photocellPin 0 // the cell and 330ohm pulldown are connected to a0
#define LED 7 // the Spark onboard LED - we blink every time we recognize a blink from the meter
#define VER 0.3 // Code version number - to check that we're in sync
// Using the NTP RTC library
UDP UDPClient;
SparkTime rtc;
unsigned long currentTime;
bool DEBUG = true;
bool firstMinute = true; // we don't log the first minute (partial), we wait until we start a new minute fresh
bool firstHour = true; // we don't log the first hour (partial), we wait until we start a new hour fresh
uint8_t startMinute;
uint8_t startHour;
int freqMin[60];
int freqHour[24];
uint8_t currentHour;
uint8_t currentMinute;
int blinkCounter = 0;
int reading;
void cloudStausPublish();
void cloudReadingPublish();
char reportString[20];
void setup(void) {
delay(5000); // make sure we can re-flash the core
Spark.publish("shadowMeter", "Initializing after reboot...");
pinMode(LED, OUTPUT); // setting up onboard LED
digitalWrite(LED, LOW); // default off
// This will give us the intital NTP time, which the library will updated with millis() from here on
rtc.begin(&UDPClient, "pool.ntp.org"); // getting time from NTP server
rtc.setTimeZone(1); // gmt offset (+1 is EST)
rtc.setUseEuroDSTRule(true); // we want daylight saving time
// Settin up initial time
currentTime = rtc.now();
currentHour = rtc.hour(currentTime);
currentMinute = rtc.minute(currentTime);
blinkCounter = 0; // counter for meter blinks
}
void loop(void) {
digitalWrite(LED, LOW); // Onboard LED off until we get a signal
reading = 0;
while (reading < threshold) {
reading = analogRead(photocellPin);
}
blinkCounter++; // we got a new blink
digitalWrite(LED, HIGH); // We mirror the meter blink with our onboard LED, which helps check that we have found the right threshold value
delay(100); // we need this ti avoid false positives
// If a minute has passed we log the readings from that minute (unless it's the starting minute, which is ignored as it is a partial reading)
if (currentMinute < rtc.minute(rtc.now())) { // we have a new minute
//Serial.println("New minute - time to report!");
freqMin[currentMinute] = blinkCounter; // registering the blink in the minute array
blinkCounter = 0; // resetting the signal counter for the new minute
if(!firstMinute) { // we report the previous minute to the cloud (if it's not the first minute, since that is a partial reading)
sprintf(reportString, "Minute %d: %d blinks", currentMinute, freqMin[currentMinute]);
cloudStatusPublish(&reportString[0]);
} else { // the first minute we log
firstMinute = false; // ok, first minute is done, we can start logging
startMinute = currentMinute+1; // we now know the oldest value we can report (that we have data for)
}
// We now must check if we've turned a new hour, in which case we report last hour's average
if (!currentMinute) { // we're at miute 0, which means we just passed one hour
if (!firstHour) { // we report the previous hour to the cloud (if it's not the first hour, since that is a partial reading)
for (int minCounter = 0; minCounter < 60; minCounter++) {
freqHour[currentHour] += freqMin[minCounter]; // registering all the blinks from the last hour
}
freqHour[currentHour] = int(freqHour[currentHour]/60); // finding the average frquency for the last hour
sprintf(reportString, "Hour: %d: %d ", currentHour, freqHour[currentHour]);
cloudStatusPublish(&reportString[0]);
} else { // the first hour we log
firstHour = false;
startHour = currentHour; // we now know the oldest value we can report (that we have data for)
}
currentHour++; // we update the current hour that we're logging
}
currentMinute++; // we update the current minute that we're logging
}
}
void cloudStatusPublish(char* msg) {
char publishString[40];
unsigned long now = millis();
unsigned nowSec = now/1000UL;
unsigned sec = nowSec%60;
unsigned min = (nowSec%3600)/60;
unsigned hours = (nowSec%86400)/3600;
sprintf(publishString,"Ver: %f, uptime: %u:%u:%u, reading: \"%s\"",VER,hours,min,sec,msg);
Spark.publish("shadowMeter", publishString); // not private for now, 60, PRIVATE);
//delay(100); // Time to check the cloud for updates
return;
}