OK, apologies, but I still have the same issue. Sometimes, I
have cyan bursting on the Core for hours. I have internet connection working
fine.
Basically, I have three sensors (DS18B20, DHT11 and Water
sensors) connected to the Core.
The code which was considerably changed compare to my first post above:
// This #include statement was automatically added by the Particle IDE.
#include "PietteTech_DHT/PietteTech_DHT.h"
// This #include statement was automatically added by the Particle IDE.
#include "OneWire/OneWire.h"
// This #include statement was automatically added by the Particle IDE.
#include "spark-dallas-temperature/spark-dallas-temperature.h"
// DHT sensor type & pin
#define DHTTYPE DHT11 // Sensor type DHT11/21/22/AM2301/AM2302
#define DHTPIN D6 // Digital pin for communications D6
// One Wire pin
#define ONE_WIRE_BUS D5 //data on pin D5
// Water sensor pins
#define WATER_SENSOR D4
#define LED D7
// How long to wait before noting that the alarm has switched states -- this
// helps stop a super-sensitive sensor from ping-ponging back and forth a lot.
#define DEBOUNCE_SECONDS 5
// Variables
// the alarm state: 0 = off, 1 = on
int alarmState = 0;
// the last tiem we switched alarm states (ms since Unix epoch)
int lastStateSwitchTime = 0;
int temp=0;
int hum=0;
double tempc;
char tempInfo[10];
// Declaration
void dht_wrapper(); // must be declared before the lib initialization
// Lib instantiate
PietteTech_DHT DHT(DHTPIN, DHTTYPE, dht_wrapper);
OneWire oneWire(ONE_WIRE_BUS); // Setup a oneWire instance to communicate with any OneWire devices
DallasTemperature sensors(&oneWire); // Pass our oneWire reference to Dallas Temperature.
void setup(void)
{
//Serial.begin(9600);
Spark.variable("tempHotWater", tempInfo, STRING); //expose vars to web interface
sensors.begin(); // IC defaults to 9 bit. If you have trouble consider changing to 12.
sensors.setResolution(12);
Spark.variable("temperature", &temp, INT);
Spark.variable("humidity", &hum, INT);
//Serial.begin(9600);
//while (!Serial.available()) {
//Serial.println("Press any key to start.");
//delay (1000);
//}
//Serial.println("DHT Example program using DHT.acquireAndWait");
//Serial.print("LIB version: ");
//Serial.println(DHTLIB_VERSION);
//Serial.println("---------------");
// initialize our pins I/O
pins_init();
// tell the world we're online
Spark.publish("online");
// publish the alarm state variable
Spark.variable("alarmState", &alarmState, INT);
}
void dht_wrapper() {
DHT.isrCallback();}
void loop(void) {
sensors.requestTemperatures();
tempc= sensors.getTempCByIndex(0);
sprintf(tempInfo, "%2.1f", tempc);
DHT.acquireAndWait();
// Humidity measurement
//temperature = dht.getTempCelcius();
temp = DHT.getCelsius();
hum = DHT.getHumidity();
Spark.publish("temperature_Hot_Water_Tank", String(tempc) + "°C");
//delay (2000);
Spark.publish("temperature", String(temp) + "°C");
//delay (2000);
Spark.publish("humidity", String(hum) + "%");
delay (5000);
//Serial.print("Hot Water (oC): ");
//Serial.println(tempc, 2);
//Serial.print("Temperature (oC): ");
//Serial.println(temp, 2);
//Serial.print("Humidity (%): ");
//Serial.println(hum, 2);
if(isExposedToWater()) {
//
// Alarm ON
//
if (alarmState == 0) {
// only alarm if we're past the debounce interval
int now = Time.now();
if (now - lastStateSwitchTime > DEBOUNCE_SECONDS) {
alarmState = 1;
lastStateSwitchTime = now;
digitalWrite(LED, HIGH);
Spark.publish("alarm", "on", 60, PRIVATE);
}
}
} else {
//
// Alarm off
//
if (alarmState == 1) {
// only alarm if we're past the debounce interval
int now = Time.now();
if (now - lastStateSwitchTime > DEBOUNCE_SECONDS) {
alarmState = 0;
lastStateSwitchTime = now;
digitalWrite(LED, LOW);
Spark.publish("alarm", "off", 60, PRIVATE);
}
}
}
}
// initialize our pins
void pins_init()
{
pinMode(LED, OUTPUT);
pinMode(WATER_SENSOR, INPUT);
}
// determine if we're exposed to water or not
boolean isExposedToWater()
{
if (digitalRead(WATER_SENSOR) == LOW) {
return true;
} else {
return false;
}
}
</unquote>
Continuing the discussion from [DS18B20 and Particle Core](https://community.particle.io/t/ds18b20-and-particle-core/15910/38):
[quote="LukeUSMC, post:38, topic:15910, full:true"]
This works...change your pin to D0 I think is what it was, fix the includes (use the DS18B20 lib, makes it easier) and you are done. There was no reason to convert your temp to a STRING value since you are only sending the temp in `Particle.variable` It is tested and reports correct values in Variable and Dashboard. Except for the first publish which I will leave to you to correct the timing on that. You can crank down the publish interval as long as you keep it above 1/second.
```cpp
// This #include statement was automatically added by the Spark IDE.
#include "OneWire.h"
#include "DS18B20.h"
DS18B20 ds18b20 = DS18B20(D2); //Sets Pin D2 for Water Temp Sensor
char szInfo[64];
float pubTemp;
double celsius;
unsigned int Metric_Publish_Rate = 50000;
unsigned int MetricnextPublishTime;
unsigned int DS18B20nextSampleTime;
unsigned int DS18B20_SAMPLE_INTERVAL = 2000;
void setup() {
pinMode(D2, INPUT);
Particle.variable("tempHotWater", &celsius, DOUBLE);
Serial.begin(9600);
}
void loop() {
if (millis() > DS18B20nextSampleTime){
getTemp();
Serial.print("Temp is: ");
Serial.println(celsius);
}
if (millis() > MetricnextPublishTime){
Serial.println("Publishing now.");
publishData();
}
}
void publishData(){
sprintf(szInfo, "%2.2f", celsius);
Particle.publish("dsTmp", szInfo, PRIVATE);
MetricnextPublishTime = millis() + Metric_Publish_Rate;
}
void getTemp(){
if(!ds18b20.search()){
ds18b20.resetsearch();
celsius = ds18b20.getTemperature();
DS18B20nextSampleTime = millis() + DS18B20_SAMPLE_INTERVAL;
Serial.print(celsius);
}
}
[/quote]
Continuing the discussion from DS18B20 and Particle Core:
[quote=“LukeUSMC, post:38, topic:15910, full:true”]
This works…change your pin to D0 I think is what it was, fix the includes (use the DS18B20 lib, makes it easier) and you are done. There was no reason to convert your temp to a STRING value since you are only sending the temp in Particle.variable
It is tested and reports correct values in Variable and Dashboard. Except for the first publish which I will leave to you to correct the timing on that. You can crank down the publish interval as long as you keep it above 1/second.
// This #include statement was automatically added by the Spark IDE.
#include "OneWire.h"
#include "DS18B20.h"
DS18B20 ds18b20 = DS18B20(D2); //Sets Pin D2 for Water Temp Sensor
char szInfo[64];
float pubTemp;
double celsius;
unsigned int Metric_Publish_Rate = 50000;
unsigned int MetricnextPublishTime;
unsigned int DS18B20nextSampleTime;
unsigned int DS18B20_SAMPLE_INTERVAL = 2000;
void setup() {
pinMode(D2, INPUT);
Particle.variable("tempHotWater", &celsius, DOUBLE);
Serial.begin(9600);
}
void loop() {
if (millis() > DS18B20nextSampleTime){
getTemp();
Serial.print("Temp is: ");
Serial.println(celsius);
}
if (millis() > MetricnextPublishTime){
Serial.println("Publishing now.");
publishData();
}
}
void publishData(){
sprintf(szInfo, "%2.2f", celsius);
Particle.publish("dsTmp", szInfo, PRIVATE);
MetricnextPublishTime = millis() + Metric_Publish_Rate;
}
void getTemp(){
if(!ds18b20.search()){
ds18b20.resetsearch();
celsius = ds18b20.getTemperature();
DS18B20nextSampleTime = millis() + DS18B20_SAMPLE_INTERVAL;
Serial.print(celsius);
}
}
The cloud looks like:
The Core is simply not talking to cloud from time to time hence the variables
cannot be pulled/updated. I thinking about a faulty Core…