Alright, here’s a quick sample. Very little of this is my own work, it’s taken from @bko and the older DH22 library posted here somewhere. Code to follow, but I’ve packaged it up for easy download at http://emc2innovation.com/RHT03_Temp_Sensor_publish.zip . The .ino
file gets dropped into the Spark core programming side (then flash), and the .html
file goes on a server or can be opened in your browser.
The RHT03 has 4 pins:
- Vcc
- Signal
- –
- Gnd
Some people (myself included) have had trouble with connecting the Vcc pin to the Spark-regulated 3.3V output, so if you can attach that sucker to the Vin to get a slightly higher-powered rail. Pin 2 dumps into the Spark D4 and should have a pullup resistor on it, values somewhere between 4.7k and 10k should work.
Spark Code: single file, older blocking library because I was having read issues with the non-blocking version (which should technically be a better version). If you’re not using pins A0 and A1 for soil moisture sensors, remove lines 179 and 180 and their associated variables.
#define MAXTIMINGS 85
#define cli noInterrupts
#define sei interrupts
#define DHT11 11
#define DHT22 22
#define DHT21 21
#define AM2301 21
#define NAN 999999
class DHT {
private:
uint8_t data[6];
uint8_t _pin, _type, _count;
bool read(void);
unsigned long _lastreadtime;
bool firstreading;
public:
DHT(uint8_t pin, uint8_t type, uint8_t count=6);
void begin(void);
float readTemperature(bool S=false);
float convertCtoF(float);
float readHumidity(void);
};
DHT::DHT(uint8_t pin, uint8_t type, uint8_t count) {
_pin = pin;
_type = type;
_count = count;
firstreading = true;
}
void DHT::begin(void) {
// set up the pins!
pinMode(_pin, INPUT);
digitalWrite(_pin, HIGH);
_lastreadtime = 0;
}
//boolean S == Scale. True == Farenheit; False == Celcius
float DHT::readTemperature(bool S) {
float _f;
if (read()) {
switch (_type) {
case DHT11:
_f = data[2];
if(S)
_f = convertCtoF(_f);
return _f;
case DHT22:
case DHT21:
_f = data[2] & 0x7F;
_f *= 256;
_f += data[3];
_f /= 10;
if (data[2] & 0x80)
_f *= -1;
if(S)
_f = convertCtoF(_f);
return _f;
}
}
return NAN;
}
float DHT::convertCtoF(float c) {
return c * 9 / 5 + 32;
}
float DHT::readHumidity(void) {
float _f;
if (read()) {
switch (_type) {
case DHT11:
_f = data[0];
return _f;
case DHT22:
case DHT21:
_f = data[0];
_f *= 256;
_f += data[1];
_f /= 10;
return _f;
}
}
return NAN;
}
bool DHT::read(void) {
uint8_t laststate = HIGH;
uint8_t counter = 0;
uint8_t j = 0, i;
unsigned long currenttime;
// pull the pin high and wait 250 milliseconds
digitalWrite(_pin, HIGH);
delay(250);
currenttime = millis();
if (currenttime < _lastreadtime) {
// ie there was a rollover
_lastreadtime = 0;
}
if (!firstreading && ((currenttime - _lastreadtime) < 2000)) {
//delay(2000 - (currenttime - _lastreadtime));
return true; // return last correct measurement
}
firstreading = false;
//Serial.print("Currtime: "); Serial.print(currenttime);
//Serial.print(" Lasttime: "); Serial.print(_lastreadtime);
_lastreadtime = millis();
data[0] = data[1] = data[2] = data[3] = data[4] = 0;
// now pull it low for ~20 milliseconds
pinMode(_pin, OUTPUT);
digitalWrite(_pin, LOW);
delay(20);
cli();
digitalWrite(_pin, HIGH);
delayMicroseconds(40);
pinMode(_pin, INPUT);
// read in timings
for ( i=0; i< MAXTIMINGS; i++) {
counter = 0;
while (digitalRead(_pin) == laststate) {
counter++;
delayMicroseconds(1);
if (counter == 255)
break;
}
laststate = digitalRead(_pin);
if (counter == 255)
break;
// ignore first 3 transitions
if ((i >= 4) && (i%2 == 0)) {
// shove each bit into the storage bytes
data[j/8] <<= 1;
if (counter > _count)
data[j/8] |= 1;
j++;
}
}
sei();
// check we read 40 bits and that the checksum matches
if ((j >= 40) && (data[4] == ((data[0] + data[1] + data[2] + data[3]) & 0xFF)))
return true;
return false;
}
// CHANGE THESE THREE VALUES (if needed)
#define DHTPIN D4 // Digital pin D2
#define SOIL_ONE A0 // Analog pin 0
#define SOIL_TWO A1 // Analog pin 1
#define DHTTYPE DHT22
DHT dht(DHTPIN, DHTTYPE);
int f = 0; // failed?
int soilOne = SOIL_ONE; // Analog input A0
int soilTwo = SOIL_TWO; // Analog input A1
double fltHumidity; // humidity
double fltTemp_c; // temperature
double fltTemp_f; // temperature
double ftlSoilOne;
double fltSoilTwo;
unsigned long lastTime = 0UL;
char publishString[64];
void setup() {
dht.begin();
Serial.begin(9600);
}
void loop() {
unsigned long now = millis();
//Every 15 seconds publish uptime
if (now-lastTime>15000UL) {
lastTime = now;
// now is in milliseconds
unsigned nowSec = now/1000UL;
unsigned sec = nowSec%60;
unsigned min = (nowSec%3600)/60;
unsigned hours = (nowSec%86400)/3600;
f = 0;
fltHumidity = dht.readHumidity();
fltTemp_c = dht.readTemperature(false);
fltTemp_f = dht.readTemperature(true);
ftlSoilOne = analogRead(soilOne);
fltSoilTwo = analogRead(soilTwo);
Serial.print("Temperature (oC): ");
Serial.println(fltTemp_c, 2);
((fltHumidity==NAN) || (fltTemp_c==NAN) || (fltTemp_f==NAN) ) ? f=1 : f=0;
if(f != 1){
sprintf(publishString,"{\"Hours\": %u, \"Minutes\": %u, \"Seconds\": %u}",hours,min,sec);
Spark.publish("Uptime",publishString);
sprintf(publishString,"{\"temp_c\": %2.2f, \"temp_f\": %2.2f, \"humidity\": %2.2f}",fltTemp_c, fltTemp_f, fltHumidity);
Spark.publish("Environment",publishString);
sprintf(publishString,"{\"soilOne\": %2.2f, \"soilTwo\": %2.2f}",ftlSoilOne, fltSoilTwo);
Spark.publish("Soil",publishString);
RGB.control(true);
RGB.color(0, 255, 0);
delay(500);
RGB.control(false);
} else{
sprintf(publishString, "{\"error\":\"trouble retrieving data\"}");
}
}
}
Next up: HTML file. Place this on a web server or just open it in your browser - it binds an event listener to the Spark cloud and updates the DOM when it receives some data. You’ll want to update lines 29 and 30 with your own device ID and access token.
<!DOCTYPE HTML>
<html>
<head>
<style type="text/css">
#uptime{ font-size:20px;}
#tstamp{ font-size:12px;}
#temp{ font-size:14px;}
</style>
</head>
<body>
<h1>Spark Publish Test</h1>
<span id="uptime"></span><br>
<span id="tstamp"></span>
<div id="temp">
<ul id="environment"></ul>
<ul id="soil"></ul>
</div>
<br><br>
<button onclick="start()">Connect</button>
<script type="text/javascript">
function start() {
// CHANGE THESE TWO VALUES
var deviceID = "999999999999999999999999";
var accessToken = "9999999999999999999999999999999999999999";
document.getElementById("uptime").innerHTML = "Waiting for data...";
var eventSource = new EventSource("https://api.spark.io/v1/devices/" + deviceID + "/events/?access_token=" + accessToken);
eventSource.addEventListener('open', function(e) {
console.log("Opened!"); },false);
eventSource.addEventListener('error', function(e) {
console.log("Errored!"); },false);
eventSource.addEventListener('Uptime', function(e) {
var rawData = JSON.parse(e.data);
console.log( rawData);
var parsedData = JSON.parse(rawData.data);
var tempSpan = document.getElementById("uptime");
var tsSpan = document.getElementById("tstamp");
console.log( rawData);
console.log( parsedData);
tempSpan.innerHTML = "Core: " + rawData.coreid + " Uptime: " + parsedData.Hours + ":" + parsedData.Minutes + ":" + parseInt(parsedData.Seconds).toPrecision(2) +
" (" + (parsedData.Seconds + parsedData.Minutes*60 + parsedData.Hours*3600) + " secs)";
tsSpan.innerHTML = "At timestamp " + rawData.published_at;
}, false);
eventSource.addEventListener('Environment', function(e) {
var rawData = JSON.parse(e.data);
console.log( rawData);
var parsedData = JSON.parse(rawData.data);
var envList = document.getElementById("environment");
console.log( rawData);
console.log( parsedData);
envList.innerHTML = "<li>Temp C: " + parsedData.temp_c + "</li><li>Temp F: " + parsedData.temp_f
+ "</li><li>Humidity : " + parsedData.humidity + "%</li>";
}, false);
eventSource.addEventListener('Soil', function(e) {
var rawData = JSON.parse(e.data);
console.log( rawData);
var parsedData = JSON.parse(rawData.data);
var envList = document.getElementById("soil");
console.log( rawData);
console.log( parsedData);
envList.innerHTML = "<li>Soil 1: " + parsedData.soilOne + "</li><li>Soil 2: " + parsedData.soilTwo+'</li>';
}, false);
} // start()
</script>
</body>
</html>
However, this doesn’t really accomplish your original quest - this is only really a data read page and it doesn’t log anything. But I’ll let you in on a secret - I’ve been working on a Spark data logging website for the last few months and it’s almost ready for the community to dig into. The release of the Spark.publish()
event was an awesome thing, but I’m currently rewriting the site to work with these events.
On another note, I am also playing with Heroku and setting up a NodeJS drone that logs these events directly to a database. Once that data is stored somewhere, it’s only a matter of interpreting and formatting… pretty simple. I chose NodeJS because it’s basically server-side Javascript and has that event listener that is present in the HTML file.
Let us know if you have any issues!