XMLRPC (client/proxy) use behind a firewall - Python 2.7 - python-2.7

I'm new to XMLRPC but I need to use it (xmlrpclib in Python 2.7) to communicate with a server (www.neos-server.org) which accepts xml files. I'm behind a firewall that severely restricts outgoing and incoming traffic, but I'm able to browse the web mostly unimpeded using a webproxy.
Test 1 and 2 (below) work, but Test 3 results in an HTTP 502 error (cantconnect)
import urllib2
import xmlrpclib
import httplib
class ProxyTransport(xmlrpclib.Transport):
def request(self, host, handler, request_body, verbose):
self.verbose = verbose
url = 'http://' + host + handler
if self.verbose: "ProxyTransport URL: [%s]" % url
request = urllib2.Request(url)
request.add_data(request_body)
request.add_header("User-Agent", self.user_agent)
request.add_header("Content-Type", "text/xml")
proxy_handler = urllib2.ProxyHandler({"http":"MYPROXY:8080"})
opener = urllib2.build_opener(proxy_handler)
f = opener.open(request)
return(self.parse_response(f))
# TEST 1 - HTML fetching
def test1():
html = urllib2.urlopen("http://www.google.com").read() # note no proxy setup here
print html
# TEST 2 - XMLRPC sample server fetching
def test2():
p = ProxyTransport()
test_url = "http://betty.userland.com"
#test_server = xmlrpclib.Server(test_url) # gives <ProtocolError for betty.userland.com/RPC2: 403 WebTrap>
test_server = xmlrpclib.Server(test_url, transport=p)
test_api = "examples.getStateName(9)"
print "API: %s" % test_api
r = eval("test_server.%s" % test_api)
print "Result: ", r
# TEST 3 - XMLRPC server (NEOS)
def test3():
# Setup proxy and server
p = ProxyTransport()
NEOS_HOST = "www.neos-server.org"
NEOS_PORT = 3332
neos = xmlrpclib.Server("http://%s:%d" % (NEOS_HOST, NEOS_PORT), transport = p)
# Talk
print "Ping Neos..."
neos.ping()
test1()
test2()
test3()
I've tried a couple different solutions (https://gist.github.com/nathforge/980961, https://mail.python.org/pipermail/python-list/2006-July/367049.html) but they don't seem to work. I assume I need to be able to ping before I can send/receive xml files and results. What should I try next?

Related

Pymongo on raspberry pi 3 wait too much to raise an error

I have an issue with pymongo on raspberry pi 3. The thing is when I run the script normally (I mean, I have internet connection and database connection, so there is not problem writing on database) I disconnect the wi-fi or ethernet from the Raspberry in order to get an error and handle it later but, when I disconnect the internet the script stops in the "insert_one" command of pymongo, like waiting until it gets internet connection again... so... the error raised several minutes later, like 25 minutes and that is not good for me, because I need to get the error inmediatly, so I can do some savings on a csv file.
from pymongo import MongoClient
from pymongo import errors
from pymongo import client_options
from pymongo import settings
url = 'emaweather.hopto.org'
port = 27017
client_options.ClientOptions.server_selection_timeout = 1
try:
client = MongoClient(url, port)
db = client['weather-mongo']
print('Cliente: ' ,client)
print('DB: ', db)
except errors.ConnectionFailure as e:
print('Error: ', e)
def main():
while True:
try:
__readCSV()
utc_now = pytz.utc.localize(datetime.datetime.utcnow())
pst_now = utc_now.astimezone(pytz.timezone("America/Asuncion"))
dateNowIsoFormat = pst_now.isoformat()
print (dateNowIsoFormat)
temperature,pressure,humidity = readBME280All()
dbDataTemp = temperature
print (dbDataTemp)
dbDataHum = round(humidity,2)
print (dbDataHum)
dbDataPress = round(pressure,2)
print (dbDataPress)
dbData = {"date": dateNowIsoFormat, "temp": dbDataTemp, "hum": dbDataHum, "press": dbDataPress}
db.data.insert_one(dbData)
print('Writed on MongoDB')
time.sleep(5)
except errors.PyMongoError as error:
print (error)
if __name__=="__main__":
main()

Python 2.7 posting, and getting result from web site

I appreciate the help in advance. I am trying to write a python script that posts an IP address to a site referenced below, and get the results printed out in the terminal or file, and then read the file immediately after.
Here is my script:
#!/usr/bin/env python
import requests
IP = raw_input("Enter IP address here: ")
Alert_URL = 'http://www.blacklistalert.org'
def submit_form():
"""Submit a form"""
payload = IP
# make a get request
resp = requests.get(Alert_URL)
print "Response to GET request: %s" % resp.content
# send POST request
resp = requests.post(Alert_URL, payload)
print "Headers from a POST request response: %s" % resp.headers
# print "HTML Response: %s" %resp.read()
if __name__ == '__main__':
submit_form()
The site has section to input IP addresses on the web page, and inspecting the site I found lines to input as follows:
<form method=POST onsubmit="document.forms[0].submit.disabled='true';">
IP or Domain <input onclick="this.value='';" name=q value=11.11.154.23>
I would like to post an IP address that I want to check to the site using the input section above somehow. For instance using raw_input to post into the 'value=' section, and get the result.
Thanks for the help.
You need to parse the PHPSESSID and post:
import requests
from bs4 import BeautifulSoup
ip = raw_input("Enter IP address here: ")
data = {"q": ip} # ip goes here
url = "http://www.blacklistalert.org/"
with requests.Session() as s:
# get the page first to parse
soup = BeautifulSoup(s.get(url).content)
# extract and add the PHPSESSID
PHPSESSID = soup.select_one("input[name=PHPSESSID]")["value"]
data["PHPSESSID"] = PHPSESSID
# finally post
res = s.post(url, data=data)
print(res)
print(res.content)

fetch website with python include j.s css

i'm trying to fetch a whole website include the JavaScript and css file while using python.
The script get a "GET" request and send back the website (local proxy).
here is my code :
class myHandler(BaseHTTPRequestHandler):
# Handler for the GET requests
def do_GET(self):
opener = urllib.FancyURLopener({})
f = opener.open("http://www.ynet.co.il")
self.wfile.write(f.read())
return
try:
# Create a web server and define the handler to manage the
# incoming request
server = HTTPServer(('', PORT_NUMBER), myHandler)
print 'Started httpserver on port ', PORT_NUMBER
# Wait forever for incoming htto requests
server.serve_forever()
except KeyboardInterrupt:
print '^C received, shutting down the web server'
server.socket.close()
The result for this code is only the html is present to the client.
Thanks a lot for the help, i'm Trying to solve that for few days with no result any .

how to run two process in parallel using multiprocessing module in python

My requirement is to capture logs for a particular http request sent to server from project server log file. So have written two function and trying to execute them parallel using multiprocessing module. But only one is getting executed. not sure what is going wrong.
My two functions - run_remote_command - using paramiko module for executing the tail command on remote server(linux box) and redirecting the output to a file. And send_request - using request module to make POST request from local system (windows laptop) to the server.
Code:
import multiprocessing as mp
import paramiko
import datetime
import requests
def run_remote_command():
basename = "sampletrace"
suffixname = datetime.datetime.now().strftime("%y%m%d_%H%M%S")
filename = "_".join([basename, suffixname])
print filename
ssh = paramiko.SSHClient()
ssh.set_missing_host_key_policy(paramiko.AutoAddPolicy())
try:
ssh.connect(hostname='x.x.x.x',username='xxxx',password='xxxx')
except Exception as e:
print "SSH Connecting to Host failed"
print e
ssh.close()
print ssh
tail = "tail -1cf /var/opt/logs/myprojectlogFile.txt >"
cmdStr = tail + " " + filename
result = ''
try:
stdin, stdout, stderr = ssh.exec_command(cmdStr)
print "error:" +str( stderr.readlines())
print stdout
#logger.info("return output : response=%s" %(self.resp_result))
except Exception as e:
print 'Run remote command failed cmd'
print e
ssh.close()
def send_request():
request_session = requests.Session()
headers = {"Content-Type": "application/x-www-form-urlencoded"}
data = "some data "
URL = "http://X.X.X.X:xxxx/request"
request_session.headers.update(headers)
resp = request_session.post(URL, data=data)
print resp.status_code
print resp.request.headers
print resp.text
def runInParallel(*fns):
proc = []
for fn in fns:
p = mp.Process(target=fn)
p.start()
proc.append(p)
for p in proc:
p.join()
if __name__ == '__main__':
runInParallel(run_remote_command, send_request)
Output: only the function send_request is getting executed. Even I check the process list of the server there is no tail process is getting created
200
Edited the code per the #Ilja comment

Uploading video to YouTube and adding it to playlist using YouTube Data API v3 in Python

I wrote a script to upload a video to YouTube using YouTube Data API v3 in the python with help of example given in Example code.
And I wrote another script to add uploaded video to playlist using same YouTube Data API v3 you can be seen here
After that I wrote a single script to upload video and add that video to playlist. In that I took care of authentication and scops still I am getting permission error. here is my new script
#!/usr/bin/python
import httplib
import httplib2
import os
import random
import sys
import time
from apiclient.discovery import build
from apiclient.errors import HttpError
from apiclient.http import MediaFileUpload
from oauth2client.file import Storage
from oauth2client.client import flow_from_clientsecrets
from oauth2client.tools import run
# Explicitly tell the underlying HTTP transport library not to retry, since
# we are handling retry logic ourselves.
httplib2.RETRIES = 1
# Maximum number of times to retry before giving up.
MAX_RETRIES = 10
# Always retry when these exceptions are raised.
RETRIABLE_EXCEPTIONS = (httplib2.HttpLib2Error, IOError, httplib.NotConnected,
httplib.IncompleteRead, httplib.ImproperConnectionState,
httplib.CannotSendRequest, httplib.CannotSendHeader,
httplib.ResponseNotReady, httplib.BadStatusLine)
# Always retry when an apiclient.errors.HttpError with one of these status
# codes is raised.
RETRIABLE_STATUS_CODES = [500, 502, 503, 504]
CLIENT_SECRETS_FILE = "client_secrets.json"
# A limited OAuth 2 access scope that allows for uploading files, but not other
# types of account access.
YOUTUBE_UPLOAD_SCOPE = "https://www.googleapis.com/auth/youtube.upload"
YOUTUBE_API_SERVICE_NAME = "youtube"
YOUTUBE_API_VERSION = "v3"
# Helpful message to display if the CLIENT_SECRETS_FILE is missing.
MISSING_CLIENT_SECRETS_MESSAGE = """
WARNING: Please configure OAuth 2.0
To make this sample run you will need to populate the client_secrets.json file
found at:
%s
with information from the APIs Console
https://code.google.com/apis/console#access
For more information about the client_secrets.json file format, please visit:
https://developers.google.com/api-client-library/python/guide/aaa_client_secrets
""" % os.path.abspath(os.path.join(os.path.dirname(__file__),
CLIENT_SECRETS_FILE))
def get_authenticated_service():
flow = flow_from_clientsecrets(CLIENT_SECRETS_FILE, scope=YOUTUBE_UPLOAD_SCOPE,
message=MISSING_CLIENT_SECRETS_MESSAGE)
storage = Storage("%s-oauth2.json" % sys.argv[0])
credentials = storage.get()
if credentials is None or credentials.invalid:
credentials = run(flow, storage)
return build(YOUTUBE_API_SERVICE_NAME, YOUTUBE_API_VERSION,
http=credentials.authorize(httplib2.Http()))
def initialize_upload(title,description,keywords,privacyStatus,file):
youtube = get_authenticated_service()
tags = None
if keywords:
tags = keywords.split(",")
insert_request = youtube.videos().insert(
part="snippet,status",
body=dict(
snippet=dict(
title=title,
description=description,
tags=tags,
categoryId='26'
),
status=dict(
privacyStatus=privacyStatus
)
),
# chunksize=-1 means that the entire file will be uploaded in a single
# HTTP request. (If the upload fails, it will still be retried where it
# left off.) This is usually a best practice, but if you're using Python
# older than 2.6 or if you're running on App Engine, you should set the
# chunksize to something like 1024 * 1024 (1 megabyte).
media_body=MediaFileUpload(file, chunksize=-1, resumable=True)
)
vid=resumable_upload(insert_request)
#Here I added lines to add video to playlist
#add_video_to_playlist(youtube,vid,"PL2JW1S4IMwYubm06iDKfDsmWVB-J8funQ")
#youtube = get_authenticated_service()
add_video_request=youtube.playlistItems().insert(
part="snippet",
body={
'snippet': {
'playlistId': "PL2JW1S4IMwYubm06iDKfDsmWVB-J8funQ",
'resourceId': {
'kind': 'youtube#video',
'videoId': vid
}
#'position': 0
}
}
).execute()
def resumable_upload(insert_request):
response = None
error = None
retry = 0
vid=None
while response is None:
try:
print "Uploading file..."
status, response = insert_request.next_chunk()
if 'id' in response:
print "'%s' (video id: %s) was successfully uploaded." % (
title, response['id'])
vid=response['id']
else:
exit("The upload failed with an unexpected response: %s" % response)
except HttpError, e:
if e.resp.status in RETRIABLE_STATUS_CODES:
error = "A retriable HTTP error %d occurred:\n%s" % (e.resp.status,
e.content)
else:
raise
except RETRIABLE_EXCEPTIONS, e:
error = "A retriable error occurred: %s" % e
if error is not None:
print error
retry += 1
if retry > MAX_RETRIES:
exit("No longer attempting to retry.")
max_sleep = 2 ** retry
sleep_seconds = random.random() * max_sleep
print "Sleeping %f seconds and then retrying..." % sleep_seconds
time.sleep(sleep_seconds)
return vid
if __name__ == '__main__':
title="sample title"
description="sample description"
keywords="keyword1,keyword2,keyword3"
privacyStatus="public"
file="myfile.mp4"
vid=initialize_upload(title,description,keywords,privacyStatus,file)
print 'video ID is :',vid
I am not able to figure out what is wrong. I am getting permission error. both script works fine independently.
could anyone help me figure out where I am wrong or how to achieve uploading video and adding that too playlist.
I got the answer actually in both the independent script scope is different.
scope for uploading is "https://www.googleapis.com/auth/youtube.upload"
scope for adding to playlist is "https://www.googleapis.com/auth/youtube"
as scope is different so I had to handle authentication separately.