Tuesday, April 1, 2014

RasPI + RGB LED = Color of Twitter

This project uses a Raspberry PI to scan all posted Tweets in real time for the mention of a color.  When a color is Tweeted  the Red, Green, and/or Blue segments of a RGB LED are turned on to display the Tweeted color.  Video demo below:

-----
The project is pretty cool and simple to duplicate.  You should be able to just copy/past my Python script below into your favorite RasPI editor and go from there.

Note that the code expects the RGB LED to be connected to I/O Pins 11 (Red), 15 (Green) and 13 (Blue).  Also, be sure to add a current limiting resistor to each of the three I/O pins; not the RGB LED ground pin.  My RGB LED was spec'd for 330 Ohm resistors.  This picture should help identify Pins 11, 15, and 13:
-----
You will also need to establish your own Twitter API Token.  Don't worry; it's easy if you already have a Twitter account.  To get them go to https://dev.twitter.com/.  Enter these API Token values where the X's are in the source code below.
-----
#  Program to search Twitter to control a RGB LED
#  by WhiskeyTangoHotel.Com with special thanks to Sparkfun and twython
#  Tracks a tally count after each find
#  APRIL 2014

import time
import datetime # to allow timestamp math
import RPi.GPIO as GPIO
from twython import TwythonStreamer

#  PI I/O 11 = Red
#  PI I/O 13 = Blue
#  PI I/O 15 = Green
#  Red + Blue = Orange
#  Red + Green = Pink
#  None on = Black

# GPIO pin number of LED
Red = 11
Blue = 13
Green = 15

# Setup GPIO as output
GPIO.setmode(GPIO.BOARD)
GPIO.setup(Red, GPIO.OUT)
GPIO.output(Red, GPIO.LOW)

GPIO.setup(Green, GPIO.OUT)
GPIO.output(Green, GPIO.LOW)

GPIO.setup(Blue, GPIO.OUT)
GPIO.output(Blue, GPIO.LOW)

# Twitter application authentication

APP_KEY = 'xxxxxxxxxxxxxxxxxxxx'
APP_SECRET = 'xxxxxxxxxxxxxxxxxxxx'
OAUTH_TOKEN = 'xxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxx'
OAUTH_TOKEN_SECRET = 'xxxxxxxxxxxxxxxxxxxx'

# Search terms placed in array TERM[]   # will find any term within the array ['one', 'two', 'three']   CASE SENSITIVE
TERM = []
TERM.append('red') #TERM[0]
TERM.append('blue') #TERM[1]
TERM.append('green') #TERM[2]
TERM.append('orange') #TERM[3]
TERM.append('pink') #TERM[4]

LED_secs_on = 30  # when found; how long to burn the LED

localtime = time.asctime( time.localtime(time.time()) )
print localtime
t0 = datetime.datetime.now()  # for timestamp math
print 'Self testing RGB LED...'
#Test RGB LED
i = 0
for i in range(0,5):
print 'RED...  GREEN...  BLUE...'
GPIO.output(Red, GPIO.HIGH)
time.sleep(0.3)
GPIO.output(Red, GPIO.LOW)

GPIO.output(Green, GPIO.HIGH)
time.sleep(0.3)
GPIO.output(Green, GPIO.LOW)

GPIO.output(Blue, GPIO.HIGH)
time.sleep(0.3)
GPIO.output(Blue, GPIO.LOW)
i = i + 1

print ' '
print "START searching for TERMS: "
print TERM[0]
print TERM[1]
print TERM[2]
print TERM[3]
print TERM[4]
print '................................'
print ' '

Tally_0 = 0
Tally_1 = 0
Tally_2 = 0
Tally_3 = 0
Tally_4 = 0

# Setup callbacks from Twython Streamer
class BlinkyLED(TwythonStreamer):
        def on_success(self, data):
global Tally_0
global Tally_1
global Tally_2
global Tally_3
global Tally_4

if 'text' in data:
check_string = data['text'].encode('utf-8')

if TERM[0] in check_string:
print TERM[0] + ' found on ' + time.asctime( time.localtime(time.time()) )
print ' '
Tally_0 = Tally_0 + 1
print data['text'].encode('utf-8')
GPIO.output(Red, GPIO.HIGH)


if TERM[1] in check_string:
print TERM[1] + ' found on ' + time.asctime( time.localtime(time.time()) )
print ' '
Tally_1 = Tally_1 +1
print data['text'].encode('utf-8')
GPIO.output(Blue, GPIO.HIGH)

if TERM[2] in check_string:
print TERM[2] + ' found on ' + time.asctime( time.localtime(time.time()) )
print ' '
Tally_2 = Tally_2 +1
print data['text'].encode('utf-8')
GPIO.output(Green, GPIO.HIGH)

if TERM[3] in check_string:
print TERM[3] + ' found on ' + time.asctime( time.localtime(time.time()) )
print ' '
Tally_3 = Tally_3 + 1
print data['text'].encode('utf-8')
GPIO.output(Red, GPIO.HIGH)
GPIO.output(Green, GPIO.HIGH)

if TERM[4] in check_string:
print TERM[4] + ' found on ' + time.asctime( time.localtime(time.time()) )
print ' '
Tally_4 = Tally_4 + 1
print data['text'].encode('utf-8')
GPIO.output(Red, GPIO.HIGH)
GPIO.output(Blue, GPIO.HIGH)

if TERM[0] in check_string or TERM[1] in check_string or TERM[2] in check_string or TERM[3] in check_string or TERM[4] in check_string:
print ' '
print 'SCORE:'
print TERM[0] + ' = ' + str(Tally_0)
print TERM[1] + ' = ' + str(Tally_1)
print TERM[2] + ' = ' + str(Tally_2)
print TERM[3] + ' = ' + str(Tally_3)
print TERM[4] + ' = ' + str(Tally_4)
print ' '
print str(Tally_0 + Tally_1 + Tally_2 + Tally_3 + Tally_4) + ' total finds after ' + str(datetime.datetime.now() - t0)
print '--------------------------------'
print ' '
time.sleep(LED_secs_on)    # keep LED on for xx secs
GPIO.output(Red, GPIO.LOW)  # turn off the LED
GPIO.output(Green, GPIO.LOW)
GPIO.output(Blue, GPIO.LOW)

# Create streamer
try:
        stream = BlinkyLED(APP_KEY, APP_SECRET, OAUTH_TOKEN, OAUTH_TOKEN_SECRET)
        stream.statuses.filter(track=TERM)
except KeyboardInterrupt:
        GPIO.cleanup()

-----
That's it.  Hope you give it a try!

     

Sunday, February 23, 2014

Graphing Twitter Mentions with the Raspberry PI

Objective:
Use the Raspberry PI to monitor Twitter for specific words or hashtags that are tweeted.  Graphically display the results on a publicly viewable webpage (http://open.sen.se/).
-----
If you are not interested in the build details and just want to see the result then take a look at this short video.  We had the RasPI set up to monitor for a "Tektronix" or "Agilent" mention in any and every message Twitter receives.

The video shows an iPad tweeting the message "Tektronix and Agilent. RasPI HashVote Test".  Since this tweet contains both search words the RasPI is monitoring for, each counter gets incremented and the graph is updated.

Tektronix and Agilent are well know test and measurement companies.  In general, they both get tweeted about the same number of times each day.  However, take a look at Agilent's graph the day they released their (not so good) quarterly earnings.  The graph clearly shows the extra Twitter chatter on Agilent due their earnings announcement. 
-----
So how does it work?
You're going to need a Raspberry PI connected to the internet (duh?), a Twitter account, and an open.se account.

Then you are going to create a Python script that runs on the Raspberry PI to search for Tweets and update the counters and graph on your open.se account.  The Python code is listed below.  If you are going to track two terms (as in the example above) you will have a separate Python script running for each search term.  You are going to need API authorization/access tokens for your Python scripts.  Don't panic; that's easy.

- Twitter API Token: Go to https://dev.twitter.com/.  You will need to create API tokens for each Python script you have running.  In the example above I am tracking two words; "Tektronix" and "Agilent", so I need to set up two API tokens.  If you decide to change the search terms from "Tektronix" or "Agilent" to "Happy" and "Sad" you will not have to create new API tokens.  Just simple change the search terms in the RasPI Python scripts.

- open.se API Token: Go to http://open.sen.se/ and create a "Channel" for each search term you want to track.  Again, the example above tracks two terms being tweeted so two channels are created.  After you create a "Channel" you will get a "FEED ID" for each channel.  You will also get an API token that is assigned to your account.  This API token is private to you and the same for each channel.  Then, play around with the "apps" at open.se to created graphs, counters, gauges, and a ton of other cool things.  Their tutorials are good, so I wont explain how to do that here.

Now boot your Raspberry PI, open your favorite text editor, and copy/past in the code below.  Replace the 'xxxxxxxx' with your custom API token and FEED ID information.  Again, you will need a Python script running for each Twitter term you are searching for.  There is probably a way to do this with one Python script, but I'm not that smart.

Run the script and watch the data flow and the counters update.  During debug and test, I would suggest tracking a commonly tweeted word such as "retweet" or "ipad".  That will help with the debug.  If you track a term like "WhiskeyTangoHotel.com" you may be waiting a while to see a result.  ;)

Mostly, the system runs perfectly, but for reasons unknown to me the Python script will 'freeze' from time to time.  I have had them running for days and days at a time without issue, but from time to time the freeze just happens.  If you know why, please enter into comments section of the youtube demo shown above.
-----
#  RasPI Python script to search Twitter for a string and post to open.se
#  by WhiskeyTangoHotel.Com with special thanks to Sparkfun and twython
#  FEB 2014



import time
from twython import TwythonStreamer
import httplib
import json as simplejson

#
# Search term that you want to find and count
#
Search_Term = 'WhiskeyTangoHotel.Com'  # Not case sensitive. TweET = tweet.
global Search_Term_Counter 
Search_Term_Counter = 0   # counts the finds

localtime = time.asctime( time.localtime(time.time()) )
print localtime
print "START searching for: " + Search_Term

# Twitter application authentication
APP_KEY = 'xxxxxxxxxxxxxxxx'
APP_SECRET = 'xxxxxxxxxxxxxxxxxxxxxxxxxxxxxxx'
OAUTH_TOKEN = 'xxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxx'
OAUTH_TOKEN_SECRET = 'xxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxx'

# open.se application authentication
SENSE_API_KEY = "xxxxxxxxxxxxxxxxxxxxx"
FEED_ID1 = 12345  #FeedID for Hashvote_Counter2 on open.se

# def function to send Search_Term_Counter to open.se for processing
def send_to_opensense(data):
#    print  >> fout, "\t=> Sending to OpenSense: %s" % data
        try:    # error trap to continue run if crash during open.se postings
                # prepare data     
                datalist = [{"feed_id" : FEED_ID1, "value" :data['F']},]  #:data string not important for counting, but leave it
                headers = {"sense_key": SENSE_API_KEY,"content-type": "application/json"}
                conn = httplib.HTTPConnection("api.sen.se")
                # format a POST request with JSON content
                conn.request("POST", "/events/", simplejson.dumps(datalist), headers)
                response = conn.getresponse()
                # you may get interesting information here in case it fails
                #   print >> fout, response.status, response.reason
                #   print >> fout, response.read()
                conn.close()
        except:
                pass

# def Hash_Counter called from 'class' below.  Add custom code here.
def Hash_Counter ():  
        global Search_Term_Counter
        Search_Term_Counter = Search_Term_Counter + 1
        localtime = time.asctime( time.localtime(time.time()) )
        print localtime
        print "Search Term " + Search_Term + " found " + str(Search_Term_Counter) + " times."
        data = {'F' : Search_Term_Counter}
        send_to_opensense(data)
        
# Setup callbacks from Twython Streamer
try:  # error trap to continue run if crash due to offsite TwythonStreamer
class Search_Twitter (TwythonStreamer):
def on_success(self, data):
#Hash_Counter()         # for debug only
if 'text' in data:
Hash_Counter()  # found so call the Hash_Counter def
print data['text'].encode('utf-8')  # Typically REM'd unless for debug
print "-----"  # seperator to format the screen.
time.sleep(10)  #pause xx seconds just to keep from flooding sen.se with data
except:
pass
   
# Create streamer to search Twitter fot the Search_Term var
try:
        stream = Search_Twitter(APP_KEY, APP_SECRET, OAUTH_TOKEN, OAUTH_TOKEN_SECRET)
        stream.statuses.filter(track=Search_Term)
except:  #  KeyboardInterrupt:  # helps during debug to exit more gracefully on CNTRL C
        pass  
-----
Good luck and thanks for the visit!


Sunday, February 16, 2014

DS Nano v2 Oscilloscope Performance

The DS Nano v2 is a very low cost 1-channel oscilloscope that, for the ~$80US price, has served me pretty well for my hobby electronics.  The scope came with a nice soft case and two sets of "probes".  It has other features as well.

 ----
This page is not intended to put down the little DS Nano v2.  The scope behaves as advertised.  For example, I had a project that required a piezo sensor input.  Things were not working according to plan and the DS Nano v2 was able to capture the low frequency signal from the piezo and suggest that a diode was needed to tame the signal for use as a microcontroller input.


Here is another example of the DS Nano v2 doing a great job showing variable pulse width from a simple PICAXE18 program:

----
That said, the DS Nano v2 does have its limits and I always wanted to compare it to a "real" oscilloscope.  I was able to get my hands on a Tektronix MSO2042 Oscilloscope and Tektronix AFG3252 Signal Generator.  The AFG3252 can produce waveforms up to 240MHz extremely accurately.  Well in the range for our purposes as an input source.
-----
The table below compares some key specs of the DS Nano v2 and the MSO2042:
Again, for our test purposes the Tektronix MSO2042 Oscilloscope is way more than adequate to test the DS Nano v2. 
-----
So what about the results?  Below are some screen shots of signals with increasing frequency.  The DS Nano v2 is the smaller screen in the foreground.  The Tektronix MSO2024 is the larger screen in the background.  Both  oscilloscopes are connected to the same signal source in parallel with no concern for impedance matching (note the ringing).   You can immediately see that DS Nano fails miserable at detecting the  high frequency ringing.  But let's be fair; the DS Nano spec is 200KHz for analog bandwidth, so this result is not a surprise.

One important thing to look at in the pics below are the voltage level cursor lines on the DS Nano v2.  You will notice that there is really no attenuation in the signal the DS Nano v2 displays as we test to its 200KHz bandwidth; that's good.  However, the limit of the 1MS/s sample rate does start to show.  The 100KHz signal pic really shows how the DS Nano v2 sample rate effects the displayed output.

All that said, it you are looking for a <<$100US scope that will fit in your pocket, the DS Nano v2 is a handy choice.  If you use it within it's specs it is a great value!  If you need more call Tektronix!
----


----







----

Tuesday, December 10, 2013

Arduino Compass Bearing Following Robot

Objective:
Build a robot that automatically tracks a user provided compass heading.  Create a proof of concept indoor model and then scale up for outdoor use.


-----
If you are not interested in the build details and just want to see the result take a look at the video below.  Basically, we are using an Arduino Nano and a LM303DLHC Micro Electro Mechanical (MEMs) compass sensor to read and track a compass bearing.

Rev 2.0: Uses the battery for the mounting chassis:


As shown in the videos, the robot continuously reads it's current compass bearing and adjusts to track the desired user direction.  It does this by turning the wheel motors on or off.  To keep things simple, the two motors are powered at full speed (no PWM).  The two motors are never on at the same time.
-----
Components:
Modern technology keeps the BOM small and puts the magic in the software that is shown later.















Ardunio Nano microcontroller for the brains.  About $10USD if you shop around.
-----












LM303DLHC MEMs sensor provides the magnetic compass reading input for the Arduino.  The LM303DLHC is has tilt compensation and accelerometers on board that can be used to detect hitting obstacles.  About $7USD shipped from "eBay China".
-----













754410NE H-Bridge to drive the motors.  (The Ardunio can't source enough current to drive the motors.)  At is stands now the project only powers the drive wheels to move forward, however; we selected the 754410NE because with just a few wires and lines of code it can drive both motors forward and reverse. About $3USD.
-----
Get the above parts, a breadboard, two DC motors, wheels, a power source, etc. and hook it all up.  In the diagram below I tried to closely mimic the breadboard setup in the pics:

















-----
Scaling Up:
We have a few APEX PA74 op amps that are capable of driving VERY high current loads.  We are in the process of scaling the build for outdoor use with strong 12VDC motors.  Here is a quick demo vid of the platform in action:


-----
If you are still with us, here is the Arduino Nano code.  This is the most simple example.  Other options include using the accelerometers to detect obstacles and drawing patterns (triangle, square, out and back, etc.)

/*
 **************************************
 ***** www.WhiskeyTangoHotel.Com  *****
 **************************************
    Project Name: LM303DLHC Compass Robot
 
    Start Date:  Nov 2013
 
    Program Rev History and Notes:
      Special thanks to pololu.com!  See their GitHub site.
   
      Point robot in desired direction then press reset button.
      Put robot on ground.  After 5 sec delay the robot will track the desired bearing.
   
      This version moves the robot on the user desired bearing forever.
      Other versions detect objects (bump) and draw patters (out and return, box, triangle)

 ***************************************
 */

// Define Variable Types and include.h
#include <Wire.h>
#include <LSM303.h>

int LeftWheel = 11;         //left motor on output D11
int RightWheel = 10;       //right motor on output D10

//**************************************

LSM303 compass;

int gotoheading;  //global var for the initial robot direction.  Or Press reset button...

void setup() {
   pinMode(LeftWheel, OUTPUT);      // sets the digital pin as output
   pinMode(RightWheel, OUTPUT);      // sets the digital pin as output
 
   digitalWrite(LeftWheel, LOW);
   digitalWrite(RightWheel, LOW);
 
   Serial.begin(9600);  // Used for debug only
 
   Wire.begin();
   compass.init();
   compass.enableDefault();

  //read the direction that you want the robot to go.  LM303 Header pins point to direction
  compass.read();
  gotoheading = compass.heading((LSM303::vector){0,-1,0});

  delay(5000);  // dealy xx mSecs after power up before doing anything

  // Calibration values. Use the pololu.com Calibrate example program to get the values for your compass.
  compass.m_min.x = -872; compass.m_min.y = -757; compass.m_min.z = -212;
  compass.m_max.x = +461; compass.m_max.y = +511; compass.m_max.z = 844;

}

void loop() {
  compass.read();
  int heading = compass.heading((LSM303::vector){0,-1,0});

 ///*  Print to the PC monitor.  Debug only.
  Serial.print(gotoheading);
  Serial.print(" Current is: ");
  Serial.print(heading);

//*/  //End Serial print to PC comment block
 
  if (heading > gotoheading)    //turn the bot left by turning on right motor
    {
      digitalWrite(LeftWheel, LOW);
      digitalWrite(RightWheel, HIGH);
      Serial.println("   OFF  --  ON");
    }
   
  if (heading <= gotoheading)  //turn the bot right by turning on left motor
    {
    digitalWrite(LeftWheel, HIGH);
    digitalWrite(RightWheel, LOW);
    Serial.println("   ON --  OFF");
    }

  delay(0);   //in mSecs

}  //void loop
 
-----

Friday, October 25, 2013

Exploding a Capacitor

Every electronics DIY/hobby site seems to have a cap exploding.  This is the WhiskeyTangoHotel oblatory example.  Actually, this is the first capacitor we have ever exploded (on purpose that is).  The cap is spec'd at 15VDC.  We reversed biased it with 40VDC.


Tuesday, August 6, 2013

Failure Tutorial: All hail the Mighty Bypass Cap

It's frustrating.  You design a great project, get it working perfectly on the breadboard with that nice beefy bench power supply only to discover it stops working in "the real world".  What the hell's going on?
-----

Here is a short video showing how everything just stops when the bypass cap from my Etch-a-Sketch to chart Temperature vs Time is removed.  Connect the bypass cap and, like magic, everything is back to normal.  Pretty amazing, huh?
-----
If you follow DIY projects on the web you see this issue frequently.  The fix often is simply to add a capacitor between power and ground.  This is called a by-pass cap or decoupling cap.  They are most useful in projects with electrical noise or where larger loads like motors, solenoids, relays, etc. are involved.

As I said, this recently happened to me.  I had the great idea to use two stepper motors mounted to an Etch-a-Sketch to chart Temperature vs Time.  The project came off fine in the end, but not without a bump in the road.

The rig was stable with one stepper motor.  However, adding the second stepper motor made everything 'wacky'.   Sometimes the PICAXE 18M2 microcontroller would not even accept new code downloads.  All fixed with a by-pass cap.
-----
Below is short video of the finished rig in action. (Thanks Hack-a-Day for featuring it!)  Take a look at the build page if you are interested in duplicating the project.

-----
Thanks for the visit and "All hail the mighty bypass cap"!!!

Tuesday, July 16, 2013

Raspberry PI: Charting Ambient vs Outside Temperature

How to use a Raspberry PI to chart ambient temperature vs outside temperature.  Source code and schematics below.

What you need:
-----
What you get:

Reading the graph above is pretty obvious.  It plots the temperature of the DS18B20 sensor connected to the Raspberry PI vs. the outside temperature that is provided by a local weather forecast feed.  Just for fun, we also display Min and Max temperatures (which can be reset).
-----
The graphing is provided by sen.se.  The sen.se site offers a lot of flexibility with "the internet of things".  sen.se is free.  Sign up and scan the tutorials.  The site is well laid out and the tutorials are very straight forward; you'll be an expert in no time.  Basically, you want to create a "channel" for your Raspberry PI by 'adding a device'.  sen.se will give you a 5 digit channel number for your RasPI and a very long passphrase that will be your personal identifier.  You will need both of these for the source code below.
-----
Next, let's connect the DS18B20 to the Raspberry PI.  The DS18B20 transmits its temperature reading via I2C bus.  Just follow the tutorial at Adafruit.  The connection is simple and looks like this:
-----
Load the Python script below into your Raspberry Pi and run it.  Be certain you enter your personal passphrase identifier and the device channel code that you got earlier from sen.se.  After you run the Python script head back over to sen.se.  You should see that sen.se has detected a 'heartbeat' from your Raspberry PI.  After that, it is just a matter of configuring one of the graphing apps on sen.se.  You can make your sen.se data public or private and there are many many tools to manipulate and display your data.
-----
Good luck!  Python script for the RasPI follows:

# WhiskeyTangoHotel.Com
# June 2013
# Program reads DS18B20 temp sensor and plots value to sen.se
# DS18B20 connections via AdaFruit tutorial
# With thanks to @Rob_Bishop

# This program is feed customized for RasPI(2)

import httplib
import json as simplejson
from random import randint
import time
import os
import glob

# Pass os commands to set up I2C bus 
os.system('modprobe w1-gpio')  
os.system('modprobe w1-therm')

base_dir = '/sys/bus/w1/devices/'
device_folder = glob.glob(base_dir + '28*')[0]
device_file = device_folder + '/w1_slave'

run_number = 0

SENSE_API_KEY = "long sen.se passphase here. note that it is in quotes"
FEED_ID1 = 12345  # five digit sen.se channel code.  note it is NOT in quotes

def read_temp_raw():  #read the DS18B20 function
    f = open(device_file, 'r')
    lines = f.readlines()
    f.close()
    return lines

def read_temp(): #process the raw temp file output and convert to F
    lines = read_temp_raw()
    while lines[0].strip()[-3:] != 'YES':
        time.sleep(1)
        lines = read_temp_raw()
    equals_pos = lines[1].find('t=')
    if equals_pos != -1:
        temp_string = lines[1][equals_pos+2:]
        ambC = float(temp_string) / 1000.0
        ambF = ambC * 9.0 / 5.0 + 32.0
        return ambF

def send_to_opensense(data):
#    print  >> fout, "\t=> Sending to OpenSense: %s" % data
try:
# prepare data 
datalist = [{"feed_id" : FEED_ID1, "value" :data['F']},]
headers = {"sense_key": SENSE_API_KEY,"content-type": "application/json"}
conn = httplib.HTTPConnection("api.sen.se")
# format a POST request with JSON content
conn.request("POST", "/events/", simplejson.dumps(datalist), headers)
response = conn.getresponse()
# you may get interesting information here in case it fails
#   print >> fout, response.status, response.reason
#   print >> fout, response.read()
conn.close()
except:
pass

while(True):
try:
run_number = run_number + 1
ambF = read_temp()
print "RasPI(2) Ambient Run:", run_number, "    ambF:",ambF
data = { 'F' : ambF}
send_to_opensense(data)
time.sleep(300)
except:
pass
-----







Wednesday, May 29, 2013

PI in the Oven: Logging Raspberry PI Core Temperatures to Sen.se

Objective:  Create a method of logging Raspberry PI data to the sen.se website.  In this example I plot the core temperature of two Raspberry PIs, but the method can be adapted to log virtually any form of data that you wish to capture or generate with the PI.  The python code is below to get you running quickly.
-----

The graph above is generated by sending data from the PI to the sen.se API.  Sen.se is one cool place.  Their goal is to assist in internet connectivity of personal devices; "the internet of things".  They have widgets, tools, applications, channels and a few other things that I barely understand.  Play around with sen.se some and you will get the idea .  In my application, I have sen.se graphing the core temperature of two Raspberry PIs; a data point every 60 seconds.  Just for fun, I also keep track of the number of reading, calculate an average temperature, and display the temperature change since the last reading.  Sen.se allows you to keep this information private or display it to the public.  If you want to see what my PIs are up to in real time, check out my public SenseBoard.  While you are there you can also check in on the activity of our two house cats as they move through their Tweeting Cat Door.
----
So.... from the graph we see one PI is running about ~12F hotter than the other.  Why?  

Probably due to a few reasons:  
RasPI_1 is always running the "motion" webcam software and functioning as an OpenVPN server.  RasPI_1 is also in a fully enclosed case.  (Maybe I should take it out of that case....)

RasPI_2 is not in a case on is only running my Hand of PI project.  Hand of PI is a robot hand that you can control by sending twitter commands to it.  Click for build page.

Of course, both PIs are running the temperature logging script.
-----
If you are still with me, the python source code is below.  It has been running flawlessly for a while, so it should be solid.  Occasionally Sen.se will go down briefly for maintenance, but that is why I put in the error traps.  Good luck and tweet the Hand of PI to let us know you were here!!!
------
# whiskeytangohotel.com
# May 2013

# Python script to read RaspberryPI
# internal core temp, covert from C to F
# and log to sen.se for graphing.

# If you get errros on the import
# make certain you have the 'apts' installed

import httplib
import json as simplejson
from random import randint
import time

# init some vars
run_number = 0
tempC = 0
tempF = 0


# Enter your private sen.se API KEY in quotes.  Enter the Feed ID# without quotes
SENSE_API_KEY = "x1xxxxxy2yyyyyyz3zzzz"  
FEED_ID1 = 12345

# Function to format for sen.se
# The try/expect are there to trap errors if sen.se goes down
# or is slow.  This keeps the script running.
def send_to_opensense(data):
try:
# prepare data 
datalist = [{"feed_id" : FEED_ID1, "value" :data['F']},]
headers = {"sense_key": SENSE_API_KEY,"content-type": "application/json"}
conn = httplib.HTTPConnection("api.sen.se")
# format a POST request with JSON content
conn.request("POST", "/events/", simplejson.dumps(datalist), headers)
response = conn.getresponse()
conn.close()
except:
pass   

while(True):

# The try/expect are there to trap errors if sen.se goes down
# or is slow.  This keeps the script running
try:
# read the PI core temperture and store in tempC
# then convert from C to F and send the data to sen,se
tempC = int(open('/sys/class/thermal/thermal_zone0/temp').read()) / 1e3
tempF = (tempC * 1.8) + 32
run_number = run_number + 1
print "Run:", run_number, "    tempC:", tempC, "    tempF:",tempF
data = { 'F' : tempF}
send_to_opensense(data)
time.sleep(60)
except:
pass
-----




Saturday, May 11, 2013

Speech Synthesis on the Raspberry PI

Here is a quick and simple tip to add speech output to jazz up your Raspberry PI projects.  It is easy as installing the "festival" Text to Speech (TTS) application then calling it from the command line or Python script.

To install "festival" you need to be at a terminal prompt on the Raspberry PI.  Then type:
$ sudo apt-get install festival festival-freebsoft-utils
You will get asked to confirm the install.  Type "Y".

To make the PI talk, just issue a command from the terminal prompt, such as:
$ echo  "Whiskey Tango Hotel dot com Where stupidity meets reality"| festival --tts
That's it.  Of course, you can also execute this from a Python script, etc.

Here's a sample of the output.  Sounds pretty good to me.




Thursday, April 11, 2013

Hand of PI (Twitter controlled Robot Hand)

OBJECTIVE:  Use the Raspberry PI to monitor a Twitter feed and control a mechanical device.

If you are not interested in the details of the build and just want to see the result you can watch the vid below.  

-----
RESULT:  Success!!!  You can control the "The Hand of PI" by sending a tweet to @OurCatDoor.   If  your tweet includes any of the text below, the "Hand of PI" obeys your command.  Valid commands are (lowercase):
  • one (holds up one finger)
  • peace (shows the two finger peace sign)
  • three (three fingers up)
  • hookem (if you are a Texas Longhorn fan this one makes sense)
  • fist (the Hand of PI gets ready to fight)
  • open (ready for a 'high five')
  • finger (well...  this will be the most tweeted command)
Go ahead, try it!!!  Send a tweet command to @OurCatDoor to let us know you were here.
-----
Basically what you are seeing is the Raspberry PI running a Python script searching any tweet sent to @OurCatDoor.  In the video, an iPad sends a tweet to @OurCatDoor that has the command "finger" in it.  It takes a few seconds, but the Raspberry PI finds the tweet, parses it, and find the "finger" command.  The Python script then sets the PI's GPIO ports High/Low.  The PI GPIO is connected to a PICAXE 18M2 (via a HC7404 buffer).  The PICAXE 18M2 reads the PI's GPIO to control five servo motors.  "Hand of PI" reacts with the appropriate gesture.  Watch closely and you can see the text on the screen update as the "finger" command is found and the "Hand of PI" gestures.   There's a lot going on here.  Confused?  This diagram should help (click to see full size):
Of course this isn't full schematic, but it lays out all the I/O to align with the source code you see below.  Really, the interconnects and 5VDC to the servos, PI, PICAXE, and HC7404 is something anyone wanting to duplicate the project should easily understand given the block diagram and source code.
-----
Let's show a few more pics and action videos of the rig before we get into the source code:
 
-----
This video is a bit long but demonstrates all the gestures of the "Hand of PI".  The screen in the background shows output from the Python script.  The screen is not needed, but I included it in the video to show the tweets as they are captured.  Note the "Hand of PI" reacts when a new tweet command is found.
Everyone wants to see the "Hand of PI" flip the bird; that is the last gesture if you want to skip to the end...
----
If you are still with us, enjoy some source code for your reading pleasure.

First the program that is running on the PICAXE 18M2.  It's job is to read the Raspberry PI's GPIO output and control the five servo motors on the "Hand of PI".

' PICAXE 18M2 for RaspPI intergration to Tweeter Controlled Hand Gesture Robot APRIL 2013
'"THE HAND OF PI"
' www.whiskeytangohotel.com
' NOTE: PICAXE Program Editor Rev newer than 5.3.6 causes servo jitter***
' Other than the minium PICAXE 18M2 'keep alive' 22K R & 10K R
' no other R, C, L, etc need for the project.
' Everything on PICAXE powered by 4.5VDC
' The PICAXE drives the servos straight from the chip.
' See pinouts in comments

' 0 is Thumb (PICAXE pin 6)
' 1 is Pointer (PICAXE pin 7)
' 2 is Middle (PICAXE pin 8)
' 3 is Ring (PICAXE pin 9)
' 4 is Pink (PICAXE pin 10)
' Normally Open Button Switch is PICAXE pin 18 (pulled HIGH with 10K)
; this button will not be used for the PI intergration

' PI GPIO 11 connected to c.0 (PICAXE pin 17)
' PI GPIO 13 connected to c.7 (PICAXE pin 16)
' PI GPIO 15 connected to c.6 (PICAXE pin 15)

symbol RaspPI11 = pinc.0
symbol RaspPI13 = pinc.7
symbol RaspPI15 = pinc.6

'Define Servo values to fully EXtend/Open finger
Symbol Ex_Thumb = 60
Symbol Ex_Pointer = 60
Symbol Ex_Middle = 245
Symbol Ex_Ring = 60
Symbol Ex_Pink = 60

'Define Servo values to fully CLose finger
Symbol CL_Thumb = 225
Symbol CL_Pointer = 240
Symbol CL_Middle = 50
Symbol CL_Ring = 240
Symbol CL_Pink = 240

'Init the servos
servo 0, Ex_Thumb
servo 1, Ex_Pointer
servo 2, Ex_Middle
servo 3, Ex_Ring
servo 4, Ex_Pink

pause 400

'Gesture Subroutines are (2^3 = 8 can be PI Callable)
' Valid Tweet commands are: one, peace, three, hookem, fist, finger, wave

'Insure Open_Hand position at program start
gosub Open_Hand
pause 500

main:  'This loops until hell freezes over

'Read the RasPI GPIO bus and  jump to gesture sub routine

If RaspPI15 = 0 and RaspPI13 = 0 and RaspPI11 = 0 then
gosub Open_Hand
end if

If RaspPI15 = 0 and RaspPI13 = 0 and RaspPI11 = 1 then
gosub One
end if

If RaspPI15 = 0 and RaspPI13 = 1 and RaspPI11 = 0 then
gosub Peace
end if

If RaspPI15 = 0 and RaspPI13 = 1 and RaspPI11 = 1 then
gosub Three
end if

If RaspPI15 = 1 and RaspPI13 = 0 and RaspPI11 = 0 then
gosub Hook_em
end if

If RaspPI15 = 1 and RaspPI13 = 0 and RaspPI11 = 1 then
gosub Fist
end if

If RaspPI15 = 1 and RaspPI13 = 1 and RaspPI11 = 0 then
gosub F_You
end if

'If RaspPI15 = 1 and RaspPI13 = 1 and RaspPI11 = 1 then
' gosub Wave  'wave is pretty hard on the servos, so we commented it
'end if

pause 5
goto main

' Gesture Subroutines below:
Open_Hand:
servopos 0, Ex_Thumb
servopos 1, Ex_Pointer
servopos 2, Ex_Middle
servopos 3, Ex_Ring
servopos 4, Ex_Pink
return ' Open_Hand

Hook_em:
servopos 0, CL_Thumb
servopos 1, Ex_Pointer
servopos 2, CL_Middle
servopos 3, CL_Ring
servopos 4, Ex_Pink
return 'Hook_em

F_you:
servopos 0, CL_Thumb
servopos 1, CL_Pointer
servopos 2, Ex_Middle
servopos 3, CL_Ring
servopos 4, CL_Pink
return 'F_you

One:
servopos 0, CL_Thumb
servopos 1, Ex_Pointer
servopos 2, CL_Middle
servopos 3, CL_Ring
servopos 4, CL_Pink
return 'One

Peace:
servopos 0, CL_Thumb
servopos 1, Ex_Pointer
servopos 2, Ex_Middle
servopos 3, CL_Ring
servopos 4, CL_Pink
return 'Two

Three:
servopos 0, CL_Thumb
servopos 1, Ex_Pointer
servopos 2, Ex_Middle
servopos 3, Ex_Ring
servopos 4, CL_Pink
return 'Three

Four:
servopos 0, CL_Thumb
servopos 1, Ex_Pointer
servopos 2, Ex_Middle
servopos 3, Ex_Ring
servopos 4, Ex_Pink
return 'Four

Fist:
servopos 0, CL_Thumb
servopos 1, CL_Pointer
servopos 2, CL_Middle
servopos 3, CL_Ring
servopos 4, CL_Pink
return 'Fist

Wave:  'waves the fingers
servopos 0, CL_Thumb
pause 70
servopos 1, CL_Pointer
pause 70
servopos 2, CL_Middle
pause 70
servopos 3, CL_Ring
pause 70
servopos 4, CL_Pink
pause 70

servopos 0, Ex_Thumb
pause 70
servopos 1, Ex_Pointer
pause 70
servopos 2, Ex_Middle
pause 70
servopos 3, Ex_Ring
pause 70
servopos 4, Ex_Pink
return 'Wave
----
Now for the Python script running on the Raspberry PI.  It's job is to search any tweet sent to @OurCatDoor and parse it for a "Hand of PI" command, then set the PI's GPIO for input to the PICAXE 18M2.



# WhiskeyTangoHotel.com - APRIL 2013   (special thanks to @Rob_Bishop)
# Error traps entered due to json hitting web site that was down etc.
# For next added to end of prog to blink LED to show program is running.

# Import the urllib library to read data from webpages
import urllib

# Import the simplejson library to  decode the data read from the webpage
import simplejson

# Import the time library for delay and lepse time tracking
import time
CurrentTime = time.time()

# Import the Raspberry Pi GPIO libraries
import RPi.GPIO as GPIO

# Set-up the GPIO pins
# Clear the current set-up
GPIO.cleanup()

# Set up the GPIO library to use Raspberry Pi board pin numbers
GPIO.setmode(GPIO.BOARD)

# Set pin 11, 13, 15  on the GPIO header to be an output
GPIO.setup(11,GPIO.OUT)  #PIXACE leg 17 (c.0)
GPIO.setup(13,GPIO.OUT)  #PIXACE leg 16 (c.7)
GPIO.setup(15,GPIO.OUT)  #PICAXE leg 15 (c.6)
GPIO.setup(7,GPIO.OUT)   #Blinkie LED to let us know the prog is running

# Start with Open Hand
GPIO.output(11,GPIO.LOW)
GPIO.output(13,GPIO.LOW)
GPIO.output(15,GPIO.LOW)
Last_gesture = "open"
Error_hit = 0
print "Hand open.  Waiting for Tweet...","\n"

# Function to take Twitter handle (e.g. @Raspberry_Pi) as an argument and return the most recent tweet

# Define the function name and show the arguments
def Latest_Tweet_to_Twitter_Handle(twitter_handle):
try:
# Get the results of a search on Twitter for tweets containing the given hand$
Twitter_search_results = urllib.urlopen("http://search.twitter.com/search.json?q="+twitter_handle)

# Decode the data that we got from the webpage to form a list of tweets
result_list = simplejson.loads(Twitter_search_results.read())

# The function returns the first result in the list
return result_list["results"][0]["text"]
except:
pass

# Main body of the program - Get the latest tweet and check if it contains certain words
# Loop to run forever

#Twitter commands the hand understands are:
#one, two, three, hookem, fist, finger, wave

while(True):
try:
#Time since program start in seconds
DeltaTime = int(time.time() - CurrentTime)

# Function gets the latest tweet mentioning the handle given in next line
Tweet=Latest_Tweet_to_Twitter_Handle("@OurCatDoor")

# START TEST(open): Check if tweet contains the word given in quotation marks
if "open" in Tweet: # and Last_gesture != "open":
Last_gesture = "open"
# If it did contain the word then print out the tweet along with a message
print DeltaTime,"seconds:",Tweet," - Gesture OPEN HAND","\n"
# Turn on the LED
GPIO.output(11,GPIO.LOW)
GPIO.output(13,GPIO.LOW)
GPIO.output(15,GPIO.LOW)
#---END TEST(open)---

# START TEST(one): Check if tweet contains the word given in quotation marks
if "one" in Tweet: # and Last_gesture != "one":
Last_gesture = "one"
# If it did contain the word then print out the tweet along with a message
print DeltaTime,"seconds:",Tweet," - Gesture ONE","\n"
# Set the PICAXE inputs
GPIO.output(11,GPIO.HIGH)
GPIO.output(13,GPIO.LOW)
GPIO.output(15,GPIO.LOW)
#---END TEST(one)---

# START TEST(peace): Check if tweet contains the word given in quotation marks
if "peace" in Tweet: # and Last_gesture != "peace":
Last_gesture = "peace"
# If it did contain the word then print out the tweet along with a message
print DeltaTime,"seconds:",Tweet," - Gesture PEACE","\n"
# Set the PICAXE inputs
GPIO.output(11,GPIO.LOW)
GPIO.output(13,GPIO.HIGH)
GPIO.output(15,GPIO.LOW)
#---END TEST(peace)---

# START TEST(three): Check if tweet contains the word given in quotation mar$
if "three" in Tweet: # and Last_gesture != "three":
Last_gesture = "three"
# If it did contain the word then print out the tweet along with a message
print DeltaTime,"seconds:",Tweet," - Gesture THREE","\n"
# Set the PICAXE inputs
GPIO.output(11,GPIO.HIGH)
GPIO.output(13,GPIO.HIGH)  
GPIO.output(15,GPIO.LOW)
#---END TEST(three)---

# START TEST(hookem): Check if tweet contains the word given in quotation mar$
if "hookem" in Tweet: # and Last_gesture != "hookem":
Last_gesture = "hookem"
# If it did contain the word then print out the tweet along with a message
print DeltaTime,"seconds:",Tweet," - Gesture HOOK EM HORNS","\n"
# Set the PICAXE inputs
GPIO.output(11,GPIO.LOW)
GPIO.output(13,GPIO.LOW)
GPIO.output(15,GPIO.HIGH)
#---END TEST(hookem)---

# START TEST(fist): Check if tweet contains the word given in quotation mar$
if "fist" in Tweet: # and Last_gesture != "fist":
Last_gesture = "fist"
# If it did contain the word then print out the tweet along with a message
print DeltaTime,"seconds:",Tweet," - Gesture FIST","\n"
# Set the PICAXE inputs
GPIO.output(11,GPIO.HIGH)
GPIO.output(13,GPIO.LOW)
GPIO.output(15,GPIO.HIGH)
#---END TEST(fist)---

# START TEST(finger): Check if tweet contains the word given in quotation mar$
if "finger" in Tweet: # and Last_gesture != "finger":
Last_gesture = "finger"
# If it did contain the word then print out the tweet along with a message
print DeltaTime,"seconds:",Tweet," - Gesture FINGER F_YOU","\n"
# TSet the PICAXE inputs
GPIO.output(11,GPIO.LOW)
GPIO.output(13,GPIO.HIGH)
GPIO.output(15,GPIO.HIGH)
#---END TEST(finger)---

# START TEST(wavewave): Check if tweet contains the word given in quotation mar$
if "wavewave" in Tweet: # and Last_gesture != "wave":
Last_gesture = "wavewave"
# If it did contain the word then print out the tweet along with a message
print DeltaTime,"seconds:",Tweet," - Gesture WAVE","\n"
# Set the PICAXE inputs
GPIO.output(11,GPIO.HIGH)
GPIO.output(13,GPIO.HIGH)
GPIO.output(15,GPIO.HIGH)
#---END TEST(wavewave)---

for x in range(0, 10):
# Wait for xx seconds before repeating
# Blinkie LED to let us know the program is running
GPIO.output(7,GPIO.HIGH)
time.sleep(.1)
GPIO.output(7,GPIO.LOW)
time.sleep(1)
except:
pass


-----
If you are still awake, thanks for checking out the build.  Send a tweet to @OurCatDoor to let us know you were here.
-----