Query between two numbers - django

Django Version 1.9.5
What I'm essentially after is this query:
SELECT *
FROM "ipaddress_setup"
WHERE '167837954' BETWEEN "start_ipaddress" AND "end_ipaddress"
In the query there may be additional WHERE statements, here is an example of what I've got so far:
from django.core.paginator import Paginator
from database.models import IpaddressSetup
from django.db.models import Q
import ipaddress
class ServiceSearch:
def __init__(self, request, get):
self.request = request
self.get = get
def search(self):
args = ()
context = {}
if 'name' in self.get and self.get['name'] is not None:
context['name__icontains'] = self.get['name']
if 'pool' in self.get and self.get['pool'] is not None:
try:
ip = ipaddress.ip_address(self.get['pool'])
args = (Q(start_ipaddress__gte=int(ip)) | Q(end_ipaddress__lte=int(ip)),)
except ValueError:
pass
if 'ipaddress_type' in self.get and self.get['ipaddress_type'] is not None:
context['ipaddress_type__exact'] = self.get['ipaddress_type']
if 'assigned' in self.get and self.get['assigned'] is not None:
context['assigned__exact'] = self.get['assigned']
if 'status' in self.get and self.get['status'] is not None:
context['status__exact'] = self.get['status']
result = IpaddressSetup.objects.all().filter(*args, **context).order_by('name')
return Paginator(result, self.request.user.max_search)
This is used in a search feature for finding IP Addresses in an allotted pool. I store the IP's as INT's for starting ipaddress / ending ipaddress in two different columns named start_ipaddress and end_ipaddress
But as you can see, I'm also allowing the ability to search for the pool name, type of IP (ipv4/ipv6), assigned (public/reserved), and status (enabled/disabled)
The only problem I am having right now is trying to get the BETWEEN query working on the start_ipaddress / end_ipaddress.
I've tried using GTE/LTE queries, but then it returns other IP Pools that may also fall within the search'd params, but I'm after more of a concrete way of finding IP's between a pool.
Based on this image and my search params, my hopes is to only return 1 record instead of the 3 listed here.
I'd be happy to supply any other details.

In the line
args = (Q(start_ipaddress__gte=int(ip)) | Q(end_ipaddress__lte=int(ip)),)
you are ORing the subqueries:
start_ip >= ip OR end_ip <= ip
That will yield everything.
AND them:
args = (Q(start_ipaddress__gte=int(ip)) & Q(end_ipaddress__lte=int(ip)),)

This means OR
args = (Q(start_ipaddress__gte=int(ip)) | Q(end_ipaddress__lte=int(ip)),)
but you want AND
... WHERE "start_ipaddress" <= 167837954 AND "end_ipaddress" => 167837954
or in Django's
context['start_ipaddress__lte'] = int(ip)
context['end_ipaddress__gte'] = int(ip)

Related

Getting a 'ValueError: 2 many values to unpack' from a method that lists only one required arg. I'm not sure of the best way to unpack it

I am calling a method from a predefined Class (L2Interface) from the acitoolkit module that lists only one required argument. The method returns two strings 'encap-type' and 'encap-id'. I am floundering with the best way to unpack these values. Here is my script. The method in question is: 'vlans = aci.L2Interface.parse_encap(encap)'
import sys
import acitoolkit.acitoolkit as aci
import requests
import re
def init(self, name, encap_type, encap_id, encap_mode=None):
self.name = None
self.encap_type = VLAN
self.encap_id = None
def main():
"""
Main Show Endpoints Routine
:return: None
"""
# Take login credentials from the command line if provided
# Otherwise, take them from your environment variables file ~/.profile
description = ('Simple application that logs on to the APIC'
' and displays all of the Endpoints.')
creds = aci.Credentials('apic', description)
args = creds.get()
# Login to APIC
session = aci.Session(args.url, args.login, args.password, verify_ssl=False)
resp = session.login()
if not resp.ok:
print('%% Could not login to APIC')
sys.exit(0)
# Get encap per interface
# and store the data as tuples in a List
data = []
encap = 'vlan-[0-9].*'
#vxtype = 'vxlan\-[0-9|a-z].*'
vlans = aci.L2Interface.parse_encap(encap)
for vlan in vlans:
data.append((vlan.attributes['encap_type'],
vlan.attributes['encap_id']))
# Display the data downloaded
col_widths = [19, 17, 15, 15, 15]
template = ''
for idx, width in enumerate(col_widths):
template += '{%s:%s} ' % (idx, width)
print(template.format("ENDCAP_TYPE", "ENCAP_ID"))
fmt_string = []
for i in range(0, len(col_widths)):
fmt_string.append('-' * (col_widths[i] - 2))
print(template.format(*fmt_string))
for rec in data:
print(template.format(*rec))
if name == 'main':
try:
main()
except KeyboardInterrupt:
pass
I am trying to connect to an APIC, grab L2 interfaces with encapsulation (encap) assigned and return them in a list.

Best way to download all historic Ethereum ERC721 transfers

I'd like to download all the transfer events of tokens under a given contract address.
I know etherscan provides an API endpoint for this, however it is limited to the latest 10,000 transfers (even if paginating). https://docs.etherscan.io/api-endpoints/accounts#get-a-list-of-erc721-token-transfer-events-by-address
Is there a third party who can provide this data, or is my only option to get it directly from a node (Infura, Geth, etc.)?
Thanks!
Limited to 10k transfers per contract? I know opensea events api can filter by contract address + token id and you can do before and after timestamps. But I don't know how far back they go.
This should work. You need to insert your API key (API_KEY) and the wallet address you want to explore in the main function.
The output will be as CSV file named data.csv
from requests import get
import pandas as pd
pd.options.display.max_columns = 60 ## 0 by default
pd.options.display.width = 10000 ## 80 by default
pd.options.display.max_rows = 3000
API_KEY = ""
'''
https://api.etherscan.io/api
?module=account
&action=balance
&address=0xde0b295669a9fd93d5f28d9ec85e40f4cb697bae
&tag=latest
&apikey=YourApiKeyToken
'''
BASE_URL = "https://api.etherscan.io/api"
ETH_VALUE = 10 ** 18
def make_api_url(module, action, adress, **kwargs):
url = f"{BASE_URL}?module={module}&action={action}&address={adress}&apikey={API_KEY}"
for key, value in kwargs.items():
url += f"&{key}={value}"
return url
class Collector:
def __init__(self, start_block):
self.start_block = start_block
def get_erc721_transactions(self, adress):
'''
https://api.etherscan.io/api
?module=account
&action=tokennfttx
&contractaddress=0x06012c8cf97bead5deae237070f9587f8e7a266d
&address=0x6975be450864c02b4613023c2152ee0743572325
&page=1
&offset=100
&startblock=0
&endblock=27025780
&sort=asc
&apikey=YourApiKeyToken
'''
get_transaction_url = make_api_url("account",
"tokennfttx",
adress,
startblock=self.start_block,
endblock=999999999999999999,
sort='asc')
response = get(get_transaction_url)
data = response.json()
temp_df = pd.json_normalize(data['result'], sep="_")
temp_df['gasCost'] = temp_df.gasUsed.astype(float) * temp_df.gasPrice.astype(float)
print(temp_df.tail())
print(self.start_block)
temp_df['type'] = 'erc721'
return temp_df
def aggrigate_data(self, address):
data = pd.DataFrame()
self.start_block = 0
while True:
df = self.get_erc721_transactions(address)
if df.shape[0] == 0:
print('There is no erc721 transactions')
break
if self.start_block == df.blockNumber.iloc[-1]:
break
data = pd.concat([data, df])
self.start_block = df.blockNumber.iloc[-1]
data.head()
data.to_csv("data.csv")
if __name__ == '__main__':
'''Insert the wallet address you want to check'''
address = "0x4c8CFE078a5B989CeA4B330197246ceD82764c63"
Collector(0).aggrigate_data(address)

Django redirect and modify GET parameters

I am implementing magic tokens and would like clean URLs. As a consequence, I would like to remove the token from the URL upon a successful user authentication. This is my attempt:
def authenticate_via_token(get_response):
def middleware(request):
if request.session.get('authenticated', None):
pass
else:
token = request.GET.get('token', None)
if token:
mt = MagicToken.fetch_by_token(token)
if mt:
request.session['authenticated'] = mt.email
if not request.GET._mutable:
request.GET._mutable = True
request.GET['token'] = None
request.GET._mutable = False
else:
print("invalid token")
response = get_response(request)
return response
return middleware
IE, I would like to send /products/product-detail/3?token=piyMlVMrmYblRwHcgwPEee --> /products/product-detail/3
It's possible that there may be additional GET parameters and I would like to keep them. Any input would be appreciated!
This is the solution I ended up going for:
from django.urls import resolve, reverse
import urllib
def drop_get_param(request, param):
'helpful for redirecting while dropping a specific parameter'
resolution = resolve(request.path_info) #simulate resolving the request
new_params = request.GET.copy() # copy the parameters
del new_params[param] # drop the specified parameter
reversed = reverse(resolution.url_name, kwargs=resolution.kwargs) # create a base url
if new_params: #append the remaining parameters
reversed += '?' + urllib.parse.urlencode(new_params)
return reversed

How to work with a very large "allowed_domains" attribute in scrapy?

The following is my scrapy code:
def get_host_regex(self, spider):
"""Override this method to implement a different offsite policy"""
allowed_domains = getattr(spider, 'allowed_domains', None)
if not allowed_domains:
return re.compile('') # allow all by default
regex = r'^(.*\.)?(%s)$' % '|'.join(re.escape(d) for d in allowed_domains if d is not None)
return re.compile(regex)
def spider_opened(self, spider):
self.host_regex = self.get_host_regex(spider)
self.domains_seen = set()
Because the allowed_domains is very big, it throws this exception:
regex = r'^(.*.)?(%s)$' % '|'.join(re.escape(d) for d in allowed_domains if d is not None)
How do I solve this problem?
You can build your own OffsiteMiddleware variation, with a different implementation checking requests to domains not in the spider's allowed_domains.
For example, add this in a middlewares.py file,
from scrapy.spidermiddlewares.offsite import OffsiteMiddleware
from scrapy.utils.httpobj import urlparse_cached
class SimpleOffsiteMiddleware(OffsiteMiddleware):
def spider_opened(self, spider):
# don't build a regex, just use the list as-is
self.allowed_hosts = getattr(spider, 'allowed_domains', [])
self.domains_seen = set()
def should_follow(self, request, spider):
if self.allowed_hosts:
host = urlparse_cached(request).hostname or ''
# does 'www.example.com' end with 'example.com'?
# test this for all allowed domains
return any([host.endswith(h) for h in self.allowed_hosts])
else:
return True
and change your settings to disable the default OffsiteMiddleware, and add yours:
SPIDER_MIDDLEWARES = {
'scrapy.spidermiddlewares.offsite.OffsiteMiddleware': None,
'myproject.middlewares.SimpleOffsiteMiddleware': 500,
}
Warning: this middleware is not tested. This is a very naive implementation, definitely not very efficient (testing string inclusion for each of 50'000 possible domains for each and every request).
You could use another backend to store the list and test a hostname value, like sqlite for example.

GeoDJango: retrieve last inserted primary key from LayerMapping

I am building an application with GeoDjango and I have the following problem:
I need to read track data from a GPX file and those data should be stored in a model MultiLineStringField field.
This should happen in the admin interface, where the user uploads a GPX file
I am trying to achieve this, namely that the data grabbed from the file should be assigned to the MultiLineStringField, while the other fields should get values from the form.
My model is:
class GPXTrack(models.Model):
nome = models.CharField("Nome", blank = False, max_length = 255)
slug = models.SlugField("Slug", blank = True)
# sport natura arte/cultura
tipo = models.CharField("Tipologia", blank = False, max_length = 2, choices=TIPOLOGIA_CHOICES)
descrizione = models.TextField("Descrizione", blank = True)
gpx_file = models.FileField(upload_to = 'uploads/gpx/')
track = models.MultiLineStringField(blank = True)
objects = models.GeoManager()
published = models.BooleanField("Pubblicato")
rel_files = generic.GenericRelation(MyFiles)
#publish_on = models.DateTimeField("Pubblicare il", auto_now_add = True)
created = models.DateTimeField("Created", auto_now_add = True)
updated = models.DateTimeField("Updated", auto_now = True)
class Meta:
#verbose_name = "struttura'"
#verbose_name_plural = "strutture"
ordering = ['-created']
def __str__(self):
return str(self.nome)
def __unicode__(self):
return '%s' % (self.nome)
def put(self):
self.slug = sluggy(self.nome)
key = super(Foresta, self).put()
# do something after save
return key
While in the admin.py file I have overwritten the save method as follows:
from django.contrib.gis import admin
from trails.models import GPXPoint, GPXTrack
from django.contrib.contenttypes import generic
from django.contrib.gis.gdal import DataSource
#from gpx_mapping import GPXMapping
from django.contrib.gis.utils import LayerMapping
from django.template import RequestContext
import tempfile
import os
import pprint
class GPXTrackAdmin(admin.OSMGeoAdmin):
list_filter = ( 'tipo', 'published')
search_fields = ['nome']
list_display = ('nome', 'tipo', 'published', 'gpx_file')
inlines = [TrackImagesInline, TrackFilesInline]
prepopulated_fields = {"slug": ("nome",)}
def save_model(self, request, obj, form, change):
"""When creating a new object, set the creator field.
"""
if 'gpx_file' in request.FILES:
# Get
gpxFile = request.FILES['gpx_file']
# Save
targetPath = tempfile.mkstemp()[1]
destination = open(targetPath, 'wt')
for chunk in gpxFile.chunks():
destination.write(chunk)
destination.close()
#define fields of interest for LayerMapping
track_point_mapping = {'timestamp' : 'time',
'point' : 'POINT',
}
track_mapping = {'track' : 'MULTILINESTRING'}
gpx_file = DataSource(targetPath)
mytrack = LayerMapping(GPXTrack, gpx_file, track_mapping, layer='tracks')
mytrack.save()
#remove the temp file saved
os.remove(targetPath)
orig = GPXTrack.objects.get(pk=mytrack.pk)
#assign the parsed values from LayerMapping to the appropriate Field
obj.track = orig.track
obj.save()
As far as I know:
LayerMapping cannot be used to update a field but only to save a new one
I cannot access a specific field of the LayerMapping object (ie in the code above: mytrack.track) and assign its value to a model field (ie obj.track) in the model_save method
I cannot retrieve the primary key of the last saved LayerMapping object (ie in the code above: mytrack.pk) in order to update it with the values passed in the form for the field not mapped in LayerMapping.mapping
What can I do then?!?!
I sorted it out subclassing LayerMapping and adding a method get_values() that instead of saving the retrieved data, returns them for any use or manipulation.The get_values method is a copy of the LayerMapping::save() method that returns the values instead of saving them.
I am using django 1.5
import os
from django.contrib.gis.utils import LayerMapping
import sys
class MyMapping(LayerMapping):
def get_values(self, verbose=False, fid_range=False, step=False,
progress=False, silent=False, stream=sys.stdout, strict=False):
"""
Returns the contents from the OGR DataSource Layer
according to the mapping dictionary given at initialization.
Keyword Parameters:
verbose:
If set, information will be printed subsequent to each model save
executed on the database.
fid_range:
May be set with a slice or tuple of (begin, end) feature ID's to map
from the data source. In other words, this keyword enables the user
to selectively import a subset range of features in the geographic
data source.
step:
If set with an integer, transactions will occur at every step
interval. For example, if step=1000, a commit would occur after
the 1,000th feature, the 2,000th feature etc.
progress:
When this keyword is set, status information will be printed giving
the number of features processed and sucessfully saved. By default,
progress information will pe printed every 1000 features processed,
however, this default may be overridden by setting this keyword with an
integer for the desired interval.
stream:
Status information will be written to this file handle. Defaults to
using `sys.stdout`, but any object with a `write` method is supported.
silent:
By default, non-fatal error notifications are printed to stdout, but
this keyword may be set to disable these notifications.
strict:
Execution of the model mapping will cease upon the first error
encountered. The default behavior is to attempt to continue.
"""
# Getting the default Feature ID range.
default_range = self.check_fid_range(fid_range)
# Setting the progress interval, if requested.
if progress:
if progress is True or not isinstance(progress, int):
progress_interval = 1000
else:
progress_interval = progress
# Defining the 'real' save method, utilizing the transaction
# decorator created during initialization.
#self.transaction_decorator
def _get_values(feat_range=default_range, num_feat=0, num_saved=0):
if feat_range:
layer_iter = self.layer[feat_range]
else:
layer_iter = self.layer
for feat in layer_iter:
num_feat += 1
# Getting the keyword arguments
try:
kwargs = self.feature_kwargs(feat)
except LayerMapError, msg:
# Something borked the validation
if strict: raise
elif not silent:
stream.write('Ignoring Feature ID %s because: %s\n' % (feat.fid, msg))
else:
# Constructing the model using the keyword args
is_update = False
if self.unique:
# If we want unique models on a particular field, handle the
# geometry appropriately.
try:
# Getting the keyword arguments and retrieving
# the unique model.
u_kwargs = self.unique_kwargs(kwargs)
m = self.model.objects.using(self.using).get(**u_kwargs)
is_update = True
# Getting the geometry (in OGR form), creating
# one from the kwargs WKT, adding in additional
# geometries, and update the attribute with the
# just-updated geometry WKT.
geom = getattr(m, self.geom_field).ogr
new = OGRGeometry(kwargs[self.geom_field])
for g in new: geom.add(g)
setattr(m, self.geom_field, geom.wkt)
except ObjectDoesNotExist:
# No unique model exists yet, create.
m = self.model(**kwargs)
else:
m = self.model(**kwargs)
try:
# Attempting to save.
pippo = kwargs
num_saved += 1
if verbose: stream.write('%s: %s\n' % (is_update and 'Updated' or 'Saved', m))
except SystemExit:
raise
except Exception, msg:
if self.transaction_mode == 'autocommit':
# Rolling back the transaction so that other model saves
# will work.
transaction.rollback_unless_managed()
if strict:
# Bailing out if the `strict` keyword is set.
if not silent:
stream.write('Failed to save the feature (id: %s) into the model with the keyword arguments:\n' % feat.fid)
stream.write('%s\n' % kwargs)
raise
elif not silent:
stream.write('Failed to save %s:\n %s\nContinuing\n' % (kwargs, msg))
# Printing progress information, if requested.
if progress and num_feat % progress_interval == 0:
stream.write('Processed %d features, saved %d ...\n' % (num_feat, num_saved))
# Only used for status output purposes -- incremental saving uses the
# values returned here.
return pippo
nfeat = self.layer.num_feat
if step and isinstance(step, int) and step < nfeat:
# Incremental saving is requested at the given interval (step)
if default_range:
raise LayerMapError('The `step` keyword may not be used in conjunction with the `fid_range` keyword.')
beg, num_feat, num_saved = (0, 0, 0)
indices = range(step, nfeat, step)
n_i = len(indices)
for i, end in enumerate(indices):
# Constructing the slice to use for this step; the last slice is
# special (e.g, [100:] instead of [90:100]).
if i + 1 == n_i: step_slice = slice(beg, None)
else: step_slice = slice(beg, end)
try:
pippo = _get_values(step_slice, num_feat, num_saved)
beg = end
except:
stream.write('%s\nFailed to save slice: %s\n' % ('=-' * 20, step_slice))
raise
else:
# Otherwise, just calling the previously defined _save() function.
return _get_values()
In a custom save or save_model method you can then use:
track_mapping = {'nome': 'name',
'track' : 'MULTILINESTRING'}
targetPath = "/my/gpx/file/path.gpx"
gpx_file = DataSource(targetPath)
mytrack = MyMapping(GPXTrack, gpx_file, track_mapping, layer='tracks')
pippo = mytrack.get_values()
obj.track = pippo['track']