Python user input in child process - python-2.7

I'm trying to create a child process that can take input through raw_input() or input(), but I'm getting an end of liner error EOFError: EOF when asking for input.
I'm doing this to experiment with multiprocessing in python, and I remember this easily working in C. Is there a workaround without using pipes or queues from the main process to it's child ? I'd really like the child to deal with user input.
def child():
print 'test'
message = raw_input() #this is where this process fails
print message
def main():
p = Process(target = child)
p.start()
p.join()
if __name__ == '__main__':
main()
I wrote some test code that hopefully shows what I'm trying to achieve.

My answer is taken from here: Is there any way to pass 'stdin' as an argument to another process in python?
I have modified your example and it seems to work:
from multiprocessing.process import Process
import sys
import os
def child(newstdin):
sys.stdin = newstdin
print 'test'
message = raw_input() #this is where this process doesn't fail anymore
print message
def main():
newstdin = os.fdopen(os.dup(sys.stdin.fileno()))
p = Process(target = child, args=(newstdin,))
p.start()
p.join()
if __name__ == '__main__':
main()

Related

multiprocessing pool hangs in jupyter notebook

I have a very simple script which is the following:
import multiprocessing as multi
def call_other_thing_with_multi():
P = multi.Pool(3)
P.map(other_thing, range(0,5))
P.join()
def other_thing(arg):
print(arg)
return arg**2.
call_other_thing_with_multi()
When I call this, my code hangs at perpetuity. This is on windows with python 2.7.
Thanks for any guidance!
As per documentation, you need to call close() before join():
import multiprocessing as multi
def call_other_thing_with_multi():
P = multi.Pool(3)
P.map(other_thing, range(0,5))
P.close() # <-- calling close before P.join()
P.join()
print('END')
def other_thing(arg):
print(arg)
return arg**2.
call_other_thing_with_multi()
Prints:
0
1
2
3
4
END
EDIT: Better is use context manager, to not forget to call close():
def call_other_thing_with_multi():
with multi.Pool(3) as P:
P.map(other_thing, range(0,5))
print('END')

Why after pyinstaller finished .exe, all stdouts of subprocess were printed to QTextBrower at once?

Pycharm can run correctly my code, which print subprocess stdout one by one to qt widget (QTextBrower), but after pyinstaller to .exe, it will print all stdouts at once till the subprocess finished, that is not a expected result
I tried use flush() and stdout.close in the subprocess, still the same.
class NonBlockingStreamReader:
def __init__(self, stream):
self._s = stream
self._q = Queue()
def _populateQueue(stream, queue):
while True:
line = stream.readline()
if line:
queue.put(line)
#else:
#raise UnexpectedEndOfStream
self._t = Thread(target = _populateQueue, args = (self._s, self._q))
self._t.daemon = True
self._t.start() #start collecting lines from the stream
def readline(self, timeout=None):
try:
return self._q.get(block=timeout is not None, timeout=timeout)
except Empty:
return None
......
form = uic.loadUiType("data/GUI/GUI.ui")[0]
class Form(QtGui.QDialog, form):
def __init__(self, parent=None):
QtGui.QDialog.__init__(self, parent)
self.setupUi(self)
os.chdir("../../")
self.LogAnalyzeButton.clicked.connect(self.LogAnalyzePre)
......
def LogAnalyzePre(self):
self.Console.append("Analyzing log, please wait . . . . . . ." + "\n" )
arguments = 'python log.py %s'%(path)
self.proc = Popen(arguments, stdin=PIPE, stdout=PIPE, stderr=PIPE, shell=True)
nbsr = NonBlockingStreamReader(self.proc.stdout)
while self.proc.poll() is None:
line = nbsr.readline(0.1)
print line
if line:
self.Console.insertPlainText(unicode(line, "utf-8"))
self.Console.moveCursor(QtGui.QTextCursor.End)
QtGui.QApplication.processEvents()
when run .exe, I can see the debug cmd window show that line's value is always None, and till the subprocess closed, the stdouts in queue are print at once
This has been proved it is a mistake that I did't put the log.py which had been added a flush() method into the same folder with .exe, so the flush() method can definitely solve this stdout output issue

Redirecting stderr when using multiprocessing.Pool

I want to spawn multiple processes using multiprocessing.Pool (python 2.7.13), and redirect stdout / stderr of each process to a file. The problem is it works for stdout, but not for stderr. Here's an example with a single process.
import sys
import multiprocessing as mp
def foo():
sys.stdout = open('a.out','w')
sys.stderr = open('a.err', 'w')
print("this must go to a.out.")
raise Exception('this must go to a.err.')
return True
def run():
pool = mp.Pool(4)
_retvals = []
_retvals.append( pool.apply_async(foo) )
retvals = [r.get(timeout=10.) for r in _retvals]
if __name__ == '__main__':
run()
Running python stderr.py in terminal (of macbook) produces a.out with correct message ("this must go to a.out"). But it produces empty a.err, and the error message appears in terminal window instead.
If I don't use multiprocessing.Pool and directly run it in the main thread, it produces correct messages on both files. This means replacing run() with the following snippet:
def run():
foo()
When using Pools, unhandled exceptions are handled by the the main process. You should either redirect stderr in main(), or wrap your functions like this:
def foo():
sys.stdout = open('x.out', 'a')
sys.stderr = open('x.err', 'a')
try:
print("this goes to x.out.")
print("this goes to x.err.", file=sys.stderr)
raise ValueError('this must go to a.err.')
except:
traceback.print_exc()
raise # optional

RealTime output from a subprogram to a pyQT4 Widget

I'm trying to redirect the stdout from a subprogram to the QTextBrowser widget. (Python 2.7, Window 7, pyQT4)
This is the sub program, it will be in an executable file:
#test.py
import time
print ("ABC")
time.sleep(1)
print ("DEF")
time.sleep(1)
print ("GHI")
time.sleep(1)
print ("JKL")
My pyQT4 program:
from PyQt4 import QtGui
import subprocess, time, os, sys
from subprocess import Popen, PIPE
class GUI (QtGui.QWidget):
def __init__(self):
...
self.initUI()
def initUI(self):
...
self.edit = QtGui.QTextBrowser()
grid.addWidget (self.edit, 7, 0, 5, 7)
def run(self):
p = Popen (['C:\\...\\test.exe'], stdout=PIPE, stderr=subprocess.STDOUT)
while True:
line = p.stdout.readline()
if not line:
break
self.append_edit('>>>' + line)
def append_edit(self, string):
self.edit.append(string)
def main():
app = QtGui.QApplication(sys.argv)
upgrade = GUI()
sys.exit (app.exec_())
The program I have above will wait until the subprogram finish running then print everything into the widget. What I want is for the program to output ABC then DEF so on with one second in between into the widget WHILE the subprogram is running.
Edit: I can have the subprocess output to cmd just fine, however, if I were print it into the QTextBrowser, it will not work.
I have seen many questions about this issue, but none of them seems to answer my question.
Thanks in advance.
Edit: I'm still new to python. I think this is maybe the problem: Fixed by adding QtGui.QApplication.processEvents()
def initUI(self):
...
running = QtGui.QPushButton('Run')
running.clicked.connect(self.run)
def run(self):
time.sleep(1)
append_edit('Before')
time.sleep(2)
append_edit('After')
So when I hit run in my program, instead of printing out Before first then wait 1 sec, it wait 3 seconds in total then print both Before and After. What cause this problem?
Edit: Even with QtGui.QApplication.processEvents() in my code, I'm still having the same issue. it will run the whole subprogram before showing the output to QTextBrowser.

Django multiprocessing and empty queue after put

I'm trying to make something like "task manager" using thread in Django which will be waiting some job.
import multiprocessing
from Queue import Queue
def task_maker(queue_obj):
while True:
try:
print queue_obj.qsize() # << always print 0
_data = queue_obj.get(timeout=10)
if _data:
_data['function'](*_data['args'], **_data['kwargs'])
except Empty:
pass
except Exception as e:
print e
tasks = Queue()
stream = multiprocessing.Process(target=task_maker, args=(tasks,))
stream.start()
def add_task(func=lambda: None, args=(), kwargs={}):
try:
tasks.put({
'function': func,
'args': args,
'kwargs': kwargs
})
print tasks.qsize() # print a normal size 1,2,3,4...
except Exception as e:
print e
I'm using "add_task" in views.py files, when user makes some request.
Why queue in "stream" always empty? what i'm doing wrong?
There are two issues with the current code. 1) with multiprocess (but not threading), the qsize() function is unreliable -- I suggest don't use it, as it is confusing. 2) you can't modify an object directly that's been taken from a queue.
Consider two processes, sending data back and forth. One won't know if the other has modified some data, as data is private. To communicate, send data explicitly, with Queue.put() or using a Pipe.
The general way producer/consumer system works is this: 1) jobs are stuff into a queue 2) worker blocks, waiting for work. When a job appears, it puts the result on a different queue. 3) a manager or 'beancounter' process consumes the output from the 2nd queue, and prints it or otherwise processes it.
Have fun!
#!/usr/bin/env python
import logging, multiprocessing, sys
def myproc(arg):
return arg*2
def worker(inqueue, outqueue):
logger = multiprocessing.get_logger()
logger.info('start')
while True:
job = inqueue.get()
logger.info('got %s', job)
outqueue.put( myproc(job) )
def beancounter(inqueue):
while True:
print 'done:', inqueue.get()
def main():
logger = multiprocessing.log_to_stderr(
level=logging.INFO,
)
logger.info('setup')
data_queue = multiprocessing.Queue()
out_queue = multiprocessing.Queue()
for num in range(5):
data_queue.put(num)
worker_p = multiprocessing.Process(
target=worker, args=(data_queue, out_queue),
name='worker',
)
worker_p.start()
bean_p = multiprocessing.Process(
target=beancounter, args=(out_queue,),
name='beancounter',
)
bean_p.start()
worker_p.join()
bean_p.join()
logger.info('done')
if __name__=='__main__':
main()
I've got it. I do not know why, but when I tried "threading", it worked!
from Queue import Queue, Empty
import threading
MailLogger = logging.getLogger('mail')
class TaskMaker(threading.Thread):
def __init__(self, que):
threading.Thread.__init__(self)
self.queue = que
def run(self):
while True:
try:
print "start", self.queue.qsize()
_data = self.queue.get()
if _data:
print "make"
_data['function'](*_data['args'], **_data['kwargs'])
except Empty:
pass
except Exception as e:
print e
MailLogger.error(e)
tasks = Queue()
stream = TaskMaker(tasks)
stream.start()
def add_task(func=lambda: None, args=(), kwargs={}):
global tasks
try:
tasks.put_nowait({
'function': func,
'args': args,
'kwargs': kwargs
})
except Exception as e:
print e
MailLogger.error(e)