Nested DirectConnection Signals are causing a segfault - c++

This is a follow-up to my unanswered question here. The code, exactly as it is below crashes with a segfault (copy/paste it into your system and run it). When I remove type=QtCore.Qt.DirectConnection from one or both of the signal constructor calls (thereby using QtCore.Qt.AutoConnection instead), everything runs the way it should: A widget appears, showing five progress bars filling up then emptying in an endless loop.
from PySide import QtCore, QtGui
import time
class Worker(QtCore.QThread):
sig_worker_update_progress = QtCore.Signal(int, int)
def __init__(self, thread_id, *args, **kwargs):
super(Worker, self).__init__(*args, **kwargs)
self.thread_id = thread_id
self.stop_requested = False
def slot_interrupt(self):
self.stop_requested = True
def run(self):
progress = 0
while(True):
self.sig_worker_update_progress.emit(self.thread_id, progress % 100)
progress += 1
if self.stop_requested:
break
else:
time.sleep(0.1)
class Controller(QtCore.QObject):
sig_controller_update_progress = QtCore.Signal(int, int)
def __init__(self, num_workers, *args, **kwargs):
super(Controller, self).__init__(*args, **kwargs)
self.workers = []
for i in range(num_workers):
self.workers.append(Worker(i))
self.workers[i].sig_worker_update_progress.connect(
self.slot_worker_update_progress,
type=QtCore.Qt.DirectConnection)
for worker in self.workers:
worker.start()
def slot_worker_update_progress(self, thread_id, progress):
# Do
# Stuff
self.sig_controller_update_progress.emit(thread_id, progress)
class Monitor(QtGui.QWidget):
def __init__(self, num_workers, *args, **kwargs):
super(Monitor, self).__init__(*args, **kwargs)
main_layout = QtGui.QVBoxLayout()
self.setLayout(main_layout)
self.progress_bars = []
for _ in range(num_workers):
progress_bar = QtGui.QProgressBar()
main_layout.addWidget(progress_bar)
self.progress_bars.append(progress_bar)
self.controller = Controller(num_workers)
self.controller.sig_controller_update_progress.connect(
self.slot_controller_update_progress,
type=QtCore.Qt.DirectConnection)
def slot_controller_update_progress(self, thread_id, progress):
self.progress_bars[thread_id].setValue(progress)
if __name__ == "__main__":
app = QtGui.QApplication([])
monitor = Monitor(5)
monitor.show()
app.exec_()
Why does using two nested DirectConnection signals cause a segfault? If Qt does not want you to do that, why isn't a more informative error given?
I am using PySide v1.2.2 which wraps the Qt 4.8 framework.

I found a satisfying explanation here. Apparently, emitting a signal of type DirectConnection is equivalent to a direct function call. So the GUI is after all updated on a Worker thread when both signals are DirectConnect-ed. As mentioned on my other question, threads are not allowed to change the GUI. The issue is NOT with nesting DirectConnections per se.

Related

close django db connections after ThreadPoolExecutor shutdown

I want to use ThreadPoolExecutor in django and reuse db connection in a thread in order to avoid create db connection for each sub_task. but db connections won't be closed after ThreadPoolExecutor is shutdown. I know that i can close connection at the end of the sub_task. but with this solution, we are creating connection for each task and connection is not reused. there's a initializer params in ThreadPoolExecutor but there isn't something like on_destroy which can be called when thread is destroyed.
main_task runs with the celery in my setup.
def sub_task():
#some db operations
def main_task(max_workers):
with ThreadPoolExecutor(max_workers=max_workers) as executor:
for i in range(10):
executor.submit(sub_task)
I wrote a custom ThreadPoolExecutor and create a list of threads db connections (add pointer to their connection handler in thread initializer function) and close all db connections of all threads on executor shutdown. note that if you want to use your initializer function too, be careful to pass it as keyword argument not positional
class DBSafeThreadPoolExecutor(ThreadPoolExecutor):
def generate_initializer(self, initializer):
def new_initializer(*args, **kwargs):
self, *args = args
try:
if initializer != None:
initializer(*args, **kwargs)
finally:
self.on_thread_init()
return new_initializer
def on_thread_init(self):
for curr_conn in db.connections.all():
curr_conn.connection = None
self.threads_db_conns.append(curr_conn)
def on_executor_shutdown(self):
for curr_conn in self.threads_db_conns:
try:
curr_conn.inc_thread_sharing()
curr_conn.close()
except Exception:
print(f'error while closing connection {curr_conn.alias}')
traceback.print_exc()
def __init__(self, *args, **kwargs):
kwargs['initializer'] = self.generate_initializer(kwargs.get('initializer'))
kwargs['initargs'] = (self,) + (kwargs.get('initargs') or ())
self.threads_db_conns = []
super().__init__(*args, **kwargs)
def shutdown(self, *args, **kwargs):
super().shutdown(*args, **kwargs)
self.on_executor_shutdown()

How to prevent recursion in django post_save signal?

I tried this answer https://stackoverflow.com/a/28369908/9902571 in my model and done the following :-
from functools import wraps
def prevent_recursion(func):
#wraps(func)
def no_recursion(sender, instance=None, **kwargs):
if not instance:
return
if hasattr(instance, '_dirty'):
return
func(sender, instance=instance, **kwargs)
try:
instance._dirty = True
instance.save()
finally:
del instance._dirty
return no_recursion
My model:
class Journal(models.Model):
user = models.ForeignKey(settings.AUTH_USER_MODEL,on_delete=models.CASCADE,null=True,blank=True)
company = models.ForeignKey(company,on_delete=models.CASCADE,null=True,blank=True,related_name='Companyname')
date = models.DateField(default=datetime.date.today)
voucher_id = models.PositiveIntegerField(blank=True,null=True)
by = models.ForeignKey(ledger1,on_delete=models.CASCADE,related_name='Debitledgers')
to = models.ForeignKey(ledger1,on_delete=models.CASCADE,related_name='Creditledgers')
debit = models.DecimalField(max_digits=10,decimal_places=2,null=True)
credit = models.DecimalField(max_digits=10,decimal_places=2,null=True)
My signal:
#receiver(post_save, sender=journal)
#prevent_recursion
def pl_journal(sender, instance, created, **kwargs):
if created:
if instance.by.group1_Name.group_Name == 'Indirect Expense':
Journal.objects.update_or_create(user=instance.user,company=instance.company,date=instance.date, voucher_id=instance.id, voucher_type= "Journal",by=instance.by,to=ledger1.objects.filter(user=instance.user,company=instance.company,name__icontains='Profit & Loss A/c').first(),debit=instance.debit,credit=instance.credit)
But the RecursionError of maximum recursion depth exceeded while calling a Python object stills comes while creating a journal object.
I want to pass the post_save signal for the same model (Journal) which when matches the condition in the signal gets called recursively.
Any anyone tell me what is wrong in my code?
Thank you
It is a little late but it may help others too. Instead of this, you can add dispatch_uid="identifier" to signal and it will work fine.
post_save.connect(function_name, sender=Model, dispatch_uid="identifier")
or
#receiver(post_save, sender=Model, dispatch_uid="identifier")
doc: https://docs.djangoproject.com/en/4.1/topics/signals/#preventing-duplicate-signals

DRY principle in signals

Django 1.11.1
Below are signal handlers. This is about saving history.
That is difference is signal (post_save and post-delete) and operation(+ or -).
The code is mostly duplicated. Could you help me understand how can I adhere to DRY principle here?
#receiver(post_save, sender=FramePlace)
def save_add_place(sender, **kwargs):
current_request = CrequestMiddleware.get_request()
author = current_request.user
instance = kwargs.get('instance')
frame = instance.frame
place = instance.place
FramePlaceHistory.objects.create(author=author,
operation="+",
frame=frame,
place=place)
#receiver(post_delete, sender=FramePlace)
def save_delete_place(sender, **kwargs):
current_request = CrequestMiddleware.get_request()
author = current_request.user
instance = kwargs.get('instance')
frame = instance.frame
place = instance.place
FramePlaceHistory.objects.create(author=author,
operation="-",
frame=frame,
place=place)
Just put you code in a function, it keeps the code simple (KISS) and you don't repeat yourself
#receiver(post_save, sender=FramePlace)
def save_add_place(sender, **kwargs):
create_frame("+", kwargs)
#receiver(post_delete, sender=FramePlace)
def save_delete_place(sender, **kwargs):
create_frame("-", kwargs)
def create_frame(operation, kwargs)
current_request = CrequestMiddleware.get_request()
author = current_request.user
instance = kwargs.get('instance')
frame = instance.frame
place = instance.place
FramePlaceHistory.objects.create(
author=author,
operation=operation,
frame=frame,
place=place
)

Python freezes when PyQT4 mixes with BaseHTTPServer

I have an error in my Python code, I'm new to Python, and I'm also new to PyQT. What I'm trying to do is build a basic http proxy server with a GUI. I've done it in the console, but when I tried to implement a GUI, I'm getting an error.
Here is my code, any help is appreciated.
import BaseHTTPServer, SocketServer,sys
from PyQt4 import QtCore, QtGui
from Ui_MiniGui import Ui_MainWindow
class ThreadingHTTPServer(SocketServer.ThreadingMixIn, BaseHTTPServer.HTTPServer):
pass
class webServer(QtCore.QThread):
log = QtCore.pyqtSignal(object)
def __init__(self, parent = None):
QtCore.QThread.__init__(self, parent)
def run(self):
self.log.emit("Listening On Port 1805")
Handler = BaseHTTPServer.BaseHTTPRequestHandler
def do_METHOD(self):
method = self.command
#I got some trouble here, how can i emit the signal back to the log?
#self.log.emit(method) not work, python not crash
#webServer.log.emit(method) not work, python not crash
#below one not work and python crashes immediately
webServer().log.emit(method)
Handler.do_GET = do_METHOD
self.httpd = ThreadingHTTPServer(("", 1805), Handler)
self.httpd.serve_forever()
class Form(QtGui.QMainWindow):
def __init__(self, parent=None):
QtGui.QWidget.__init__(self, parent)
self.ui = Ui_MainWindow()
self.ui.setupUi(self)
self.server = webServer()
self.server.log.connect(self.write_to_textEdit)
self.server.start()
def write_to_textEdit(self, data):
print data
self.ui.textEdit.setText(data)
if __name__ == "__main__":
app = QtGui.QApplication(sys.argv)
myapp = Form()
myapp.show()
sys.exit(app.exec_())
In this line
webServer().log.emit(method)
You create a new webserver instance and emit it's log signal. As this is a new object, this signal hasn't been connected to anything, so it's doing nothing.
To emit the signal on the right object, you could do the following:
def run(self):
...
server = self
def do_METHOD(self):
...
server.log.emit(self)

django signals received by only one function

I have a model that sends signal:
class WMTransaction(models.Model):
def save(self, *args, **kwargs):
if self.status == 'completed':
self.completed = datetime.datetime.now()
try:
old = WMTransaction.objects.get(pk=self.pk)
if old.status == 'processing':
print 'sending signal'
payment_done.send(self)
except:
pass
super(WMTransaction, self).save(*args, **kwargs)
Also I have receivers in 2 modules:
#receiver(payment_done, dispatch_uid="make_this_signal_unique", weak=False)
def subscribe(sender, **kwargs):
print 'subscribing'
# processing
And:
#receiver(payment_done, dispatch_uid="this_signal_is_also_unique", weak=False)
def buy(sender, **kwargs):
print 'buying'
# processing
The problem is that subscribe function is called, and buy - isn't... Both modules are in installed apps, other functions from these modules work correctly. What's the problem with signals?
Has module_B been installed and the definition of buy actually gets executed? Check payment_done.receivers before the payment_done.send line.