django-storages EndpointConnectionError - django

Sorry for the noise but I think I am missing something and I can't find my solution. When running my collectstatic, I get the following error:
botocore.exceptions.EndpointConnectionError: Could not connect to the endpoint URL: "http://localhost:1212/test/static/gis/css/ol3.css"
Here is the following setup:
docker-compose.yaml
. . .
s3server:
image: scality/s3server:latest
restart: unless-stopped
ports:
- "1212:8000"
volumes:
- s3data:/usr/src/app/localData
- s3metadata:/usr/src/app/localMetadata
environment:
SCALITY_ACCESS_KEY_ID: newAccessKey
SCALITY_SECRET_ACCESS_KEY: newSecretKey
SSL: "FALSE"
settings.py
# AWS settings
AWS_ACCESS_KEY_ID = env.str('AWS_ACCESS_KEY_ID')
AWS_SECRET_ACCESS_KEY = env.str('AWS_SECRET_ACCESS_KEY')
AWS_S3_REGION_NAME = env.str('AWS_S3_REGION_NAME')
AWS_STORAGE_BUCKET_NAME = env.str('AWS_STORAGE_BUCKET_NAME')
AWS_S3_ENDPOINT_URL = env.str('AWS_S3_ENDPOINT_URL')
AWS_DEFAULT_ACL = None
AWS_S3_OBJECT_PARAMETERS = {
'CacheControl': 'max-age=86400',
}
AWS_QUERYSTRING_AUTH = False
# s3 static settings
AWS_STATIC_LOCATION = 'static'
STATIC_URL = f'{AWS_S3_ENDPOINT_URL}/{AWS_STATIC_LOCATION}/'
STATICFILES_STORAGE = 'backend.storages.StaticStorage'
# s3 media settings
AWS_MEDIA_LOCATION = 'media'
MEDIA_URL = f'{AWS_S3_ENDPOINT_URL}/{AWS_MEDIA_LOCATION}/'
DEFAULT_FILE_STORAGE = 'backend.storages.PublicMediaStorage'
dev.env
AWS_STORAGE_BUCKET_NAME=test
AWS_ACCESS_KEY_ID=newAccessKey
AWS_SECRET_ACCESS_KEY=newSecretKey
AWS_S3_REGION_NAME=us-east-1
AWS_S3_ENDPOINT_URL=http://localhost:1212
backend/storages.py
class StaticStorage(S3Boto3Storage):
location = settings.AWS_STATIC_LOCATION
default_acl = "public-read"
class PublicMediaStorage(S3Boto3Storage):
location = settings.AWS_MEDIA_LOCATION
default_acl = 'public-read'
file_overwrite = False
I really don't understand why as the following script works just fine:
script.py
import logging
import boto3
from botocore.exceptions import ClientError
s3_client = boto3.client(
's3',
aws_access_key_id="newAccessKey",
aws_secret_access_key="newSecretKey",
endpoint_url='http://localhost:1212',
region_name="us-east-1",
)
def create_bucket(bucket_name):
try:
s3_client.create_bucket(
Bucket=bucket_name,
CreateBucketConfiguration={'LocationConstraint': "us-east-1"},
)
except ClientError as e:
logging.error(e)
return False
return True
if __name__ == "__main__":
create_bucket("test", region="us-east-1")
response = s3_client.list_buckets()
# Output the bucket names
print('Existing buckets:')
for bucket in response['Buckets']:
print(f' {bucket["Name"]}')
response = s3_client.upload_file(
"backend/tests/test_image.jpg",
"test",
"static/test_image",
)
s3_client.download_file('test', 'static/test_image', 'toto.jpg')

Well, inside a container, locahost is obviously not the other services. Change AWS_S3_ENDPOINT_URL=http://localhost:1212 to AWS_S3_ENDPOINT_URL=http://s3server:8000 and expose the 8000 port from s3server in compose. The last step to make it work is to add "s3server": "us-east-1" in the config.json mounted in the scality server.

Related

Django AWS S3 object storage boto3 media upload error

I used django docker and aws s3 bucket for my project. I configure my settings file for my bucket and it is working but i got an error while uploading media files "expected string or bytes-like object" and docker log error if not VALID_BUCKET.search(bucket) and not VALID_S3_ARN.search(bucket). I used django forms and function based view.
models.py
def user_directory_path(instance, filename):
tenant = connection.get_tenant()
return 'profile_photos/{0}/{1}'.format(tenant, filename)
class UserProfilePhoto(models.Model):
user = models.OneToOneField(User, on_delete=models.CASCADE)
profilephoto = models.ImageField(blank=True,default="profile_photos/profilephoto.png",upload_to=user_directory_path )
views.py
def userprofile(request,id):
get_object_or_404(User,id = id)
if request.user.userprofile.status == 3 or str(request.user.id) == str(id):
now_today = datetime.now(pytz.timezone('Europe/Istanbul'))
announcements=Announcements.objects.filter(announce_type="announcement")
current_page="Kullanıcı Profili"
user=User.objects.filter(id=id).first()
user_doc_create=InsuranceFile.objects.filter(file_creator=user.username)
user_doc_create_last_month=InsuranceFile.objects.filter(file_creator=user.username, created_at__gte=now()-relativedelta(months=1)).count()
ratio_of_doc = ratio_utils(user_doc_create_last_month,user_doc_create.count())
user_doc_update=InsuranceFile.objects.filter(file_updater=user.id)
user_doc_update_last_month=InsuranceFile.objects.filter(file_updater=user.id, updated_at__gte=now()-relativedelta(months=1)).count()
ratio_of_doc_update = ratio_utils(user_doc_update_last_month,user_doc_update.count())
path_check=str("/account/userprofile/"+ id)
profilephoto=UserProfilePhoto.objects.filter(user=request.user).first()
previous_profilephoto=profilephoto.profilephoto
form_user=CreateUserForm(request.POST or None , instance=request.user)
form_userprofile=UserProfileForm(request.POST or None , instance=request.user.userprofile)
form_userphoto=UserProfilePhotoForm(request.POST or None,request.FILES, instance=request.user.userprofilephoto,)
is_confirmed=False
if TOTPDevice.objects.filter(user_id=id).first():
totp=TOTPDevice.objects.filter(user_id=id).first()
is_confirmed=totp.confirmed
if request.method == 'POST':
if form_userphoto.is_valid() and form_userprofile.is_valid() and form_user.is_valid():
with transaction.atomic():
form_userprofile.save()
if str(request.FILES) != "<MultiValueDict: {}>":
upload_profile_photo(request,form_userphoto,user,previous_profilephoto)
messages.success(request,"Profil başarılı bir şekilde güncellendi.")
return HttpResponseRedirect(request.META.get('HTTP_REFERER', '/'))
return render(request,'userprofile.html',{"now_today":now_today,"ratio_of_doc_update":ratio_of_doc_update,"user_doc_update_last_month":user_doc_update_last_month,"user_doc_update":user_doc_update,"announcements":announcements,"current_page":current_page,"user_doc_create_last_month":user_doc_create_last_month,"ratio_of_doc":ratio_of_doc,"user_doc_create":user_doc_create,"path_check":path_check,"profilephoto":profilephoto,"is_confirmed":is_confirmed,"user":user,"form_userprofile":form_userprofile,"form_userphoto":form_userphoto,"form_user":form_user})
messages.warning(request,"Bu işlemi yapmaya yetkiniz bulunmamaktadır.")
return HttpResponseRedirect(request.META.get('HTTP_REFERER', '/'))
upload*_profile function*
import boto3
def upload_profile_photo(request,form_userphoto,user,previous_profilephoto):
s3 = boto3.client('s3',
aws_access_key_id="AKIAW7UXTA7VBPUVLPGW",
aws_secret_access_key= "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")
if previous_profilephoto != "profile_photos/profilephoto.png":
s3.delete_object(Bucket='dj-crm-tenant', Key= f'media/{previous_profilephoto}')
form_userphoto.save()
settings.py
AWS_ACCESS_KEY_ID = os.environ.get('AWS_ACCESS_KEY_ID')
AWS_SECRET_ACCESS_KEY = os.environ.get('AWS_SECRET_ACCESS_KEY')
AWS_STORAGE_BUCKET_NAME = os.environ.get('AWS_STORAGE_BUCKET_NAME')
AWS_S3_CUSTOM_DOMAIN = 'dj-crm-tenant.s3.amazonaws.com'
AWS_S3_OBJECT_PARAMETERS = {'CacheControl': 'max-age=86400'}
AWS_DEFAULT_ACL = 'public-read'
AWS_LOCATION = 'static'
STATICFILES_DIRS = [
os.path.join(BASE_DIR, 'static'),
]
STATIC_URL = 'https://%s/%s/' % (AWS_S3_CUSTOM_DOMAIN, AWS_LOCATION)
STATICFILES_STORAGE = 'storages.backends.s3boto3.S3Boto3Storage'
DEFAULT_FILE_STORAGE = 'dj_crm_tenant.storages.MediaStore'
storages.py
from storages.backends.s3boto3 import S3Boto3Storage
class MediaStore(S3Boto3Storage):
location = 'media'
file_overwrite = False
when i try to change my user profilephoto i got an error.
i tried to upload file to my s3 bucket i got the previosuly mentioned error.
I am trying to upload media files to my s3 bucket using django-forms and boto
I followed https://github.com/veryacademy/YT-Django-Media-Static-AWS-S3 tutorial
Problem seems to be about your .env file. You cannot connect to your bucket since your key and id is not read by the program.
You can also install dotenv package for managing your env files.

django storages AWS S3 SigVer4: SignatureDoesNotMatch

My configuration (very basic):
settings.py
AWS_S3_REGION_NAME = 'eu-west-3'
AWS_S3_FILE_OVERWRITE = False
# S3_USE_SIGV4 = True # if used, nothing changes
# AWS_S3_SIGNATURE_VERSION = "s3v4" # if used, nothing changes
AWS_ACCESS_KEY_ID = "xxx"
AWS_SECRET_ACCESS_KEY = "xxx"
AWS_STORAGE_BUCKET_NAME = 'xxx'
# AWS_S3_CUSTOM_DOMAIN = f'{AWS_STORAGE_BUCKET_NAME}.s3.amazonaws.com' # if used, no pre-signed urls
AWS_DEFAULT_ACL = 'private'
AWS_S3_OBJECT_PARAMETERS = {'CacheControl': 'max-age=86400'}
AWS_LOCATION = 'xxx'
DEFAULT_FILE_STORAGE = 'storages.backends.s3boto3.S3Boto3Storage'
INSTALLED_APPS = [
...,
'storages'
]
models.py
class ProcessStep(models.Model):
icon = models.FileField(upload_to="photos/process_icons/")
What I get:
Pre-signed url is generated (both in icon.url and automatically on admin page)
Pre-signed url response status code = 403 (Forbidden)
If opened, SignatureDoesNotMatch error. With text: The request signature we calculated does not match the signature you provided. Check your key and signing method.
Tried:
changing access keys (both root and IAM)
changing bucket region
creating separate storage object for icon field (same error SignatureDoesNotMatch)
changing django-storages package version (currently using the latest 1.11.1)
Opinion:
boto3 client generate_presigned_url returns url with invalid signature
Questions:
What should I do?
Why do I get the error?
Patience is a virtue!
One might wait for 1 day for everything to work

How to connect in django s3 files storage from yandexcloud?

There are s3 from yandex cloud
https://cloud.yandex.com/docs/storage/tools/?utm_source=console&utm_medium=empty-page&utm_campaign=storage
How kan I configure django to use it ?
I) Install boto3 anf django-storages libs.
II) Add yandex_s3_storage.py file with the code below:
from storages.backends.s3boto3 import S3Boto3Storage
from sites.crm.settings import YOUR_YANDEX_BUCKET_NAME
class ClientDocsStorage(S3Boto3Storage):
bucket_name = YANDEX_CLIENT_DOCS_BUCKET_NAME
file_overwrite = False
III) Add the code below to settings.py:
INSTALLED_APPS = [
...
'storages',
...
]
...
# ----Yandex s3----
DEFAULT_FILE_STORAGE = 'yandex_s3_storage.ClientDocsStorage' # path to file we created before
YANDEX_CLIENT_DOCS_BUCKET_NAME = 'client-docs'
AWS_ACCESS_KEY_ID = env('AWS_ACCESS_KEY')
AWS_SECRET_ACCESS_KEY = env('AWS_SECRET_ACCESS_KEY')
AWS_S3_ENDPOINT_URL = 'https://storage.yandexcloud.net'
AWS_S3_REGION_NAME = 'storage'
IV) Add a file field to your model:
from sites.yandex_s3_storage import ClientDocsStorage
class ClientDocs(models.Model):
...
upload = models.FileField(storage=ClientDocsStorage())
...

how to save files to s3 buckets

I am trying to create a barcode image file that will save to a path in my aws s3 bucket. I don't know how to link to it. My media and static files are already on aws and are working perfectly but I don't know how to set the path for this barcodemaker function to save to the aws s3 bucket.
Thank you for your patience and guidance.
my barcode function in my view
def barcodemaker():
barcodemodel = apps.get_model('barcoder', 'barcodeModel')
employee = apps.get_model('employees', 'Employee')
data = employee.objects.filter(id=1)
try:
data2 = barcodemodel.objects.latest('id')
except:
data2 = 1002390000
naa = str(data2)
naa = int(naa[-10:])
for i in data:
id_name= str(i.id)
naa += random.randint(500, 900)
mocode = 'M-'+ id_name + '-'+ str(naa)
b = barcodemodel(barcode_num=str(mocode))
b.save()
path = (>>>PATH to aws<<<,'static','media','barcodes', mocode+'.png')
with open(path, 'wb') as f:
Code128(mocode, writer=ImageWriter()).write(f)
barcode_context = {
'mocode':mocode, 'f':f
}
return barcode_context
My Static file settings in my settings file
STATIC_URL = '/static/'
STATICFILES_DIRS = [
os.path.join(BASE_DIR,"static")
]
STATIC_ROOT = os.path.join(BASE_DIR, 'staticfiles')
MEDIA_URL = '/profile_image/'
MEDIA_ROOT = os.path.join(BASE_DIR, 'static/images')
#S3 BUCKETS CONFIG
# S3 logins Data
AWS_ACCESS_KEY_ID = os.environ.get('AWS_ACCESS_KEY_ID')
AWS_SECRET_ACCESS_KEY = os.environ.get('AWS_SECRET_ACCESS_KEY')
AWS_STORAGE_BUCKET_NAME = os.environ.get('AWS_STORAGE_BUCKET_NAME')
AWS_S3_FILE_OVERWRITE = False
AWS_DEFAULT_ACL = None
DEFAULT_FILE_STORAGE = 'storages.backends.s3boto3.S3Boto3Storage'
STATICFILES_STORAGE = 'storages.backends.s3boto3.S3Boto3Storage'
From the documentation:
import logging
import boto3
from botocore.exceptions import ClientError
def upload_file(file_name, bucket, object_name=None):
"""Upload a file to an S3 bucket
:param file_name: File to upload
:param bucket: Bucket to upload to
:param object_name: S3 object name. If not specified then file_name is used
:return: True if file was uploaded, else False
"""
# If S3 object_name was not specified, use file_name
if object_name is None:
object_name = file_name
# Upload the file
s3_client = boto3.client('s3')
try:
response = s3_client.upload_file(file_name, bucket, object_name)
except ClientError as e:
logging.error(e)
return False
return True
then (here f is in binary mode):
s3.upload_fileobj(f, "BUCKET_NAME", "OBJECT_NAME")
The code will use your environment variables for the access keys. Alternatively, you can use this syntax:
client = boto3.client(
's3',
aws_access_key_id=ACCESS_KEY,
aws_secret_access_key=SECRET_KEY,
aws_session_token=SESSION_TOKEN,
)

Django Static Files on S3: S3ResponseError: 301 Moved Permanently

I'm trying to host my Django Static and Media files on Amazon S3 and I've been following every guide out there, but I still end up getting S3ResponseError: 301 Moved Permanently errors on deployment of my Elastic Beanstalk Application when it tries to run collectstatic.
My S3 is working and I can access other files on it. I also have it set to a custom domain so you can access the same file in the following ways:
http://s3.condopilot.com.s3-eu-west-1.amazonaws.com/thumbs/big/3fca62e2150e8abec3f693a6eae8d2f79bb227fb.jpg
https://s3-eu-west-1.amazonaws.com/s3.condopilot.com/thumbs/big/3fca62e2150e8abec3f693a6eae8d2f79bb227fb.jpg
http://s3.condopilot.com/thumbs/big/3fca62e2150e8abec3f693a6eae8d2f79bb227fb.jpg
It is the third option that I want to use, but I've tried the other ones aswell. Both with and without https:// in the settings below.
My settings file look like this
#settings.py file
AWS_ACCESS_KEY_ID = 'XXX'
AWS_SECRET_ACCESS_KEY = 'XXX'
AWS_HEADERS = {
'Expires': 'Thu, 31 Dec 2099 20:00:00 GMT',
'Cache-Control': 'max-age=94608000',
}
AWS_STORAGE_BUCKET_NAME = 's3.condopilot.com'
# I have also tried setting AWS_S3_CUSTOM_DOMAIN to the following:
# - "s3-eu-west-1.amazonaws.com/%s/" % AWS_STORAGE_BUCKET_NAME
# - "s3-eu-west-1.amazonaws.com/%s" % AWS_STORAGE_BUCKET_NAME
# - "s3.condopilot.com"
AWS_S3_CUSTOM_DOMAIN = "%s.s3-eu-west-1.amazonaws.com" % AWS_STORAGE_BUCKET_NAME
AWS_S3_CALLING_FORMAT = 'boto.s3.connection.OrdinaryCallingFormat'
AWS_S3_SECURE_URLS = False # Tried both True and False
AWS_S3_URL_PROTOCOL = 'http' # Tried with and without
STATICFILES_LOCATION = 'static'
STATIC_URL = "http://%s/%s/" % (AWS_S3_CUSTOM_DOMAIN, STATICFILES_LOCATION)
STATICFILES_STORAGE = 'custom_storages.StaticStorage'
MEDIAFILES_LOCATION = 'media'
MEDIA_URL = "http://%s/%s/" % (AWS_S3_CUSTOM_DOMAIN, MEDIAFILES_LOCATION)
DEFAULT_FILE_STORAGE = 'custom_storages.MediaStorage'
The reason I have AWS_S3_CALLING_FORMAT = 'boto.s3.connection.OrdinaryCallingFormat' is because without it I get the following error:
ssl.CertificateError: hostname 's3.condopilot.com.s3.amazonaws.com' doesn't match either of '*.s3.amazonaws.com', 's3.amazonaws.com'. All advice I find online regarding that error says that OrdinaryCallingFormat should be used when bucket name contains dots, example s3.condopilot.com.
My custom storages looks like this
#custom_storages.py
from django.conf import settings
from storages.backends.s3boto import S3BotoStorage
class StaticStorage(S3BotoStorage):
location = settings.STATICFILES_LOCATION
class MediaStorage(S3BotoStorage):
location = settings.MEDIAFILES_LOCATION
And yes, my S3 bucket is set up in eu-west-1.
I think you do not need to set the region S3 in the URL and if you are using django-storage replaces this app to django-storages-redux. You don't need the custom_storages.py file.
Keep things simple. This is enough.
from django.utils import six
DEFAULT_FILE_STORAGE = 'storages.backends.s3boto.S3BotoStorage'
AWS_ACCESS_KEY_ID = 'XXXXXXXXXXXXXXXX'
AWS_SECRET_ACCESS_KEY = 'XxXxXxXxXxXxXxXxXxXxXxXxXxXxxXxX'
AWS_STORAGE_BUCKET_NAME = 'bucket-name'
AWS_AUTO_CREATE_BUCKET = False
AWS_QUERYSTRING_AUTH = False
AWS_EXPIRY = 60 * 60 * 24 * 7
AWS_HEADERS = {
'Cache-Control': six.b('max-age=%d, s-maxage=%d, must-revalidate' % (
AWS_EXPIRY, AWS_EXPIRY))
}
MEDIA_URL = 'https://%s.s3.amazonaws.com/' % AWS_STORAGE_BUCKET_NAME
STATICFILES_STORAGE = DEFAULT_FILE_STORAGE
STATIC_URL = MEDIA_URL