I had to do something similar in the past. I've combined what I think are the important parts. Once its setup you can run fabric commands on a host, or group of hosts, by passing a list for the hosts parameter. Hope this helps
from fabric.api import *
import socket
socket.setdefaulttimeout(5)
# Setup fabric details
env.user = 'fabric-user'
env.key_filename = '/root/.ssh/fabric_rsa'
def execute_remote(command):
return run(command)
@parallel
def execute_remote_parallel(command):
return run(command)
def run_fabric(cmd,hosts,in_parallel):
""" Check the parameters and call the corresponding fabric block"""
if in_parallel:
return execute(execute_remote_parallel,command=cmd,hosts=hosts)
else:
return execute(execute_remote,command=cmd,hosts=hosts)
class FabricFunctions:
def stop_service(self,hosts,in_parallel,servicename):
cmd = "sudo /sbin/service %s stop" % servicename
run_fabric(cmd, hosts, in_parallel)
def start_service(self,hosts,in_parallel,servicename):
cmd = "sudo /sbin/service %s start" % servicename
run_fabric(cmd, hosts, in_parallel)
#
# Example - Untested!
#
f = FabricFunctions()
f.start_service("ec2-instance-ip",False,"httpd")
It should be pretty simple to add new methods to the fabric functions class to run pip commands