I have the following code :
import pika
import os
import eventlet
from eventlet.green import threading
pika = eventlet.import_patched('pika')
eventlet.monkey_patch()
#More Code
if __name__=='__main__'
eventlet.spawn(pika_client)
socketio.run(app, host='192.168.1.214')
def pika_client():
global connection, channel
params = pika.ConnectionParameters(heartbeat=500,
blocked_connection_timeout=300)
connection = pika.BlockingConnection(params)
channel = connection.channel()
return 1
However, the pika connection gets disconnected after 20-30 mins.
Any help will be highly appreciated.
Pika's BlockingConnection is not compatible with eventlet patching. There is a small chance that SelectConnection will work, but it has never been tested and is not supported.
NOTE: the RabbitMQ team monitors the rabbitmq-users mailing list and only sometimes answers questions on StackOverflow.
I am currently using the code below and it seems to be working.Could you please tell me if this will create problem
try:
pikaClient = socketio.start_background_task(pika_client)
socketio.run(app, host='192.168.1.214')
except KeyboardInterrupt:
pikaClient.join()
def pika_client():
global connection, channel
params = pika.ConnectionParameters(heartbeat=600,
blocked_connection_timeout=300)
connection = pika.BlockingConnection(params)
channel = connection.channel()
print('Connection created')
while True:
time.sleep(650)
try:
connection.process_data_events()
except pika.exceptions.StreamLostError:
print("Will try to re-connect next.")
params = pika.ConnectionParameters(heartbeat=600,
blocked_connection_timeout=300)
connection = pika.BlockingConnection(params)
channel = connection.channel()
print('Connection re-created')
#continue
except KeyboardInterrupt:
# Gracefully close the connection
channel.close()
connection.close()
Related
In order to push real time database info to client, I use flask-socketio in server side by using websocket to push all real-time database info to client.
There is a snippet of my view file:
from ..models import Host
from flask_socketio import emit, disconnect
from threading import Thread
thread = None
def background_thread(app=None):
"""
send host status information to client
"""
with app.app_context():
while True:
# update host info interval
socketio.sleep(app.config['HOST_UPDATE_INTERVAL'])
# socketio.sleep(5)
all_hosts = dict([(host.id, host.status) for host in Host.query.all()])
socketio.emit('update_host', all_hosts, namespace='/hostinfo')
#main.route('/', methods=['GET', 'POST'])
def index():
all_hosts = Host.query.all()
return render_template('dashboard.html', hosts=all_hosts, async_mode=socketio.async_mode)
#socketio.on('connect', namespace='/hostinfo')
def on_connect():
global thread
if thread is None:
app = current_app._get_current_object()
thread = socketio.start_background_task(target=background_thread, app=app)
emit('my_response', {'data': 'conncted'})
#socketio.on('disconnect', namespace='/hostinfo')
def on_disconnect():
print 'Client disconnected...', request.sid
#socketio.on('my_ping', namespace="/hostinfo")
def ping_pong():
emit('my_pong')
However, when I update my database Host table, Host.query.all() still get old information. I don't know why?
Thanks a lot to #miguelgrinberg. Because background thread just use an old session, so each iteration, the thread just get the cached session. So add db.session.remove() at the end of while True loop, each iteration will start a clean session.
I want to build monitoring system using RabbitMQ and Tornado. I can run the producer and my consumer can consume the data on queue but the data cant be show on website.
This just my experiment before I using the sensor
import pika
import tornado.ioloop
import tornado.web
import tornado.websocket
import logging
from threading import Thread
logging.basicConfig(lvl=logging.INFO)
clients=[]
credentials = pika.credentials.PlainCredentials('ayub','ayub')
connection = pika.BlockingConnection(pika.ConnectionParameters('192.168.43.101',
5672,
'/',
credentials))
channel = connection.channel()
def threaded_rmq():
channel.basic_consume('Queue',
on_message_callback= consumer_callback,
auto_ack=True,
exclusive=False,
consumer_tag=None,
arguments=None)
channel.start_consuming()
def disconect_rmq():
channel.stop_consuming()
Connection.close()
logging.info('Disconnected from broker')
def consumer_callback(ch,method,properties,body):
for itm in clients:
itm.write_message(body)
class SocketHandler(tornado.websocket.WebSocketHandler):
def open(self):
logging.info('websocket open')
clients.remove(self)
def close(self):
logging.info('websocket closed')
clients.remove(self)
class MainHandler(tornado.web.RequestHandler):
def get(self):
self.render("websocket.html")
application = tornado.web.Application([
(r'/ws',SocketHandler),
(r"/", MainHandler),
])
def startTornado():
application.listen(8888)
tornado.ioloop.IOLoop.instance().start()
def stopTornado():
tornado.ioloop.IOLoop.instance().stop()
if __name__ == "__main__":
logging.info('starting thread RMQ')
threadRMQ = Thread(target=threaded_rmq)
threadRMQ.start()
logging.info('starting thread tornado')
threadTornado = Thread(target=startTornado)
threadTornado.start()
try:
raw_input("server ready")
except SyntaxError:
pass
try:
logging.info('disconnected')
disconnect_rmq()
except Exception, e:
pass
stopTornado()
but I got this error
WARNING:tornado.access:404 GET /favicon.ico (192.168.43.10) 0.98ms
please help me
In your SocketHandler.open function you need to add the client not remove it.
Also consider using a set for clients instead of a list because the remove operation will be faster:
clients = set()
...
class SocketHandler(tornado.websocket.WebSocketHandler):
def open(self):
logging.info('websocket open')
clients.add(self)
def close(self):
logging.info('websocket closed')
clients.remove(self)
The message you get regarding favicon.ico is actually a warning and it's harmless (the browser is requesting an icon to show for web application but won't complain if none is available).
You might also run into threading issues because Tornado and Pika are running in different threads so you will have to synchronize them; you can use Tornado's IOLoop.add_callback method for that.
I am trying to build a basic messaging system, but I have hit a major roadblock in the process. I can't get the window to close without it not responding and me having to close it in the Task Manager. From what I've read online, it sounds like I need to close when a sys.exit(0) to exit all the threads and connections. I have been stuck on this problem for days so I would really appreciate an answer and explanation to why it doesn't work! The meat of the problem is in the close_window() function. It is run able provided you have a basic server that accepts a connection. Thanks!
import wx
import socket
import threading
import sys
class oranges(wx.Frame):
def __init__(self,parent,id):
##Unimportant stuff
wx.Frame.__init__(self,parent,id," Retro Message",size=(500,500))
self.frame=wx.Panel(self)
self.input_box=wx.TextCtrl(self.frame, -1,pos=(15,350),size=(455,120),style=wx.NO_BORDER| wx.TE_MULTILINE)
self.messaging_box=wx.TextCtrl(self.frame, -1,pos=(15,15),size=(455,285),style=wx.NO_BORDER | wx.TE_MULTILINE|wx.TE_READONLY)
send_button=wx.Button(self.frame,label="Send",pos=(350,315),size=(75,40))
self.Bind(wx.EVT_BUTTON, self.sender,send_button)
self.Bind(wx.EVT_CLOSE, self.close_window)
self.counter = 1
self.socket_connect = socket.socket()
self.setup()
def sender(self,event):
self.socket_connect.send(self.input_box.GetValue())
self.input_box.Clear()
self.Refresh()
##Important stuff
def close_window(self,event): #This is the function in question#
self.counter = 0
self.socket_connect.shutdown(socket.SHUT_RDWR)
sys.exit(0)
def setup(self):
self.ip_enter = wx.TextEntryDialog(None, "Enter in the IP:", "Setup", "192.168.1.1")
if self.ip_enter.ShowModal() ==wx.ID_OK:
self.offical_ip = self.ip_enter.GetValue()
try:
self.socket_connect.connect((self.offical_ip,5003))
self.username = "false" #Tells the server just to give the user a IP username
self.Thread1 = threading.Thread(target = self.listening_connect)
self.Thread1.start()
except socket.error:
self.error_connect()
else:
sys.exit(0)
def listening_connect(self):
self.socket_connect.send(self.username)
while self.counter != 0:
data = self.socket_connect.recv(1024)
self.messaging_box.AppendText(data)
self.Refresh()
if not data:
break
self.socket_connect.close()
def error_connect(self):
pop_ups = wx.MessageDialog(None, "Failed to Connect to Server!", 'Error', wx.OK)
pop_ups.ShowModal()
self.setup()
if __name__=="__main__":
app=wx.App(False)
window=oranges(parent=None,id=-1)
window.Show()
app.MainLoop()
Here is a basic server program that should work with it(I am unable to test it but it is very similar to the real one)
import socket
HOST = '192.168.1.1'
PORT=5003
s = socket.socket()
s.bind((HOST, PORT))
s.listen(1)
c,addr = s.accept()
while True:
data = c.recv(1024)
if not data:
break
c.close()
You need to wait for the thread to end. Otherwise it's probably going to make the script hang. Why? Well the thread is separate from the GUI thread, so it doesn't get killed just because you closed down your wxPython application. Here is what I would recommend:
def close_window(self, event):
self.Thread1.join()
self.Destroy()
This makes the script wait for the thread to finish before closing the application. If you want the frame to disappear, then you should call self.Hide() before the join. Another method would be to put some logic in your thread where you can send it a message that tells it the application is shutting down, so the thread needs to abort.
You should probably check out the following Stack answer:
Is there any way to kill a Thread in Python?
Background
The purpose of this project is to create a SMS based kill switch for a program I have running locally. The plan is to create web socket connection between the local program and an app hosted on Heroku. Using Twilio, receiving and SMS will trigger a POST request to this app. If it comes from a number on my whitelist, the application should send a command to the local program to shut down.
Problem
What can I do to find a reference to the namespace so that I can broadcast a message to all connected clients from a POST request?
Right now I am simply creating a new web socket client, connecting it and sending the message, because I can't seem to figure out how to get access to the namespace object in a way that I can call an emit or broadcast.
Server Code
from gevent import monkey
from flask import Flask, Response, render_template, request
from socketio import socketio_manage
from socketio.namespace import BaseNamespace
from socketio.mixins import BroadcastMixin
from time import time
import twilio.twiml
from socketIO_client import SocketIO #only necessary because of the hack solution
import socketIO_client
monkey.patch_all()
application = Flask(__name__)
application.debug = True
application.config['PORT'] = 5000
# White list
callers = {
"+15555555555": "John Smith"
}
# Part of 'hack' solution
stop_namespace = None
socketIO = None
# Part of 'hack' solution
def on_connect(*args):
global stop_namespace
stop_namespace = socketIO.define(StopNamespace, '/chat')
# Part of 'hack' solution
class StopNamespace(socketIO_client.BaseNamespace):
def on_connect(self):
self.emit("join", 'server#email.com')
print '[Connected]'
class ChatNamespace(BaseNamespace, BroadcastMixin):
stats = {
"people" : []
}
def initialize(self):
self.logger = application.logger
self.log("Socketio session started")
def log(self, message):
self.logger.info("[{0}] {1}".format(self.socket.sessid, message))
def report_stats(self):
self.broadcast_event("stats",self.stats)
def recv_connect(self):
self.log("New connection")
def recv_disconnect(self):
self.log("Client disconnected")
if self.session.has_key("email"):
email = self.session['email']
self.broadcast_event_not_me("debug", "%s left" % email)
self.stats["people"] = filter(lambda e : e != email, self.stats["people"])
self.report_stats()
def on_join(self, email):
self.log("%s joined chat" % email)
self.session['email'] = email
if not email in self.stats["people"]:
self.stats["people"].append(email)
self.report_stats()
return True, email
def on_message(self, message):
message_data = {
"sender" : self.session["email"],
"content" : message,
"sent" : time()*1000 #ms
}
self.broadcast_event_not_me("message",{ "sender" : self.session["email"], "content" : message})
return True, message_data
#application.route('/stop', methods=['GET', 'POST'])
def stop():
'''Right here SHOULD simply be Namespace.broadcast("stop") or something.'''
global socketIO
if socketIO == None or not socketIO.connected:
socketIO = SocketIO('http://0.0.0.0:5000')
socketIO.on('connect', on_connect)
global stop_namespace
if stop_namespace == None:
stop_namespace = socketIO.define(StopNamespace, '/chat')
stop_namespace.emit("join", 'server#bayhill.com')
stop_namespace.emit('message', 'STOP')
return "Stop being processed."
#application.route('/', methods=['GET'])
def landing():
return "This is Stop App"
#application.route('/socket.io/<path:remaining>')
def socketio(remaining):
try:
socketio_manage(request.environ, {'/chat': ChatNamespace}, request)
except:
application.logger.error("Exception while handling socketio connection",
exc_info=True)
return Response()
I borrowed code heavily from this project chatzilla which is admittedly pretty different because I am not really working with a browser.
Perhaps Socketio was a bad choice for web sockets and I should have used Tornado, but this seemed like it would work well and this set up helped me easily separate the REST and web socket pieces
I just use Flask-SocketIO for that.
from gevent import monkey
monkey.patch_all()
from flask import Flask
from flask.ext.socketio import SocketIO
app = Flask(__name__)
socketio = SocketIO(app)
#app.route('/trigger')
def trigger():
socketio.emit('response',
{'data': 'someone triggered me'},
namespace='/global')
return 'message sent via websocket'
if __name__ == '__main__':
socketio.run(app)
I currently have a site setup using Django. I have added Gevent Socketio to add a chat function. I have a need to scale it as there are quite a few users already on the site and can't find a way to do so.
I tried https://github.com/abourget/gevent-socketio/tree/master/examples/django_chat/chat
I am using Gunicorn & the socketio.sgunicorn.GeventSocketIOWorker worker class so at first I thought of increasing the worker count. Unfortunately this seems to fail intermittently. I have started rewriting it to use redis from a few sources I found and have 1 worker on each server which is now being load balanced. However this seems to have the same problem. I am wondering if there is some issue in the gevent socketio code itself which does not allow it to scale.
Here is how I have started which is just the submit message code.
def redis_client():
"""Get a redis client."""
return Redis(settings.REDIS_HOST, settings.REDIS_PORT, settings.REDIS_DB)
class PubSub(object):
"""
Very simple Pub/Sub pattern wrapper
using simplified Redis Pub/Sub functionality.
Usage (publisher)::
import redis
r = redis.Redis()
q = PubSub(r, "channel")
q.publish("test data")
Usage (listener)::
import redis
r = redis.Redis()
q = PubSub(r, "channel")
def handler(data):
print "Data received: %r" % data
q.subscribe(handler)
"""
def __init__(self, redis, channel="default"):
self.redis = redis
self.channel = channel
def publish(self, data):
self.redis.publish(self.channel, simplejson.dumps(data))
def subscribe(self, handler):
redis = self.redis.pubsub()
redis.subscribe(self.channel)
for data_raw in redis.listen():
if data_raw['type'] != "message":
continue
data = simplejson.loads(data_raw["data"])
handler(data)
from socketio.namespace import BaseNamespace
from socketio.sdjango import namespace
from supremo.utils import redis_client, PubSub
from gevent import Greenlet
#namespace('/chat')
class ChatNamespace(BaseNamespace):
nicknames = []
r = redis_client()
q = PubSub(r, "channel")
def initialize(self):
# Setup redis listener
def handler(data):
self.emit('receive_message',data)
greenlet = Greenlet.spawn(self.q.subscribe, handler)
def on_submit_message(self,msg):
self.q.publish(msg)
I used parts of code from https://github.com/fcurella/django-push-demo and gevent-socketio 0.3.5rc1 instead of rc2 and it is working now with multiple workers and load balancing.