I am trying to create a barcode image file that will save to a path in my aws s3 bucket. I don't know how to link to it. My media and static files are already on aws and are working perfectly but I don't know how to set the path for this barcodemaker function to save to the aws s3 bucket.
Thank you for your patience and guidance.
my barcode function in my view
def barcodemaker():
barcodemodel = apps.get_model('barcoder', 'barcodeModel')
employee = apps.get_model('employees', 'Employee')
data = employee.objects.filter(id=1)
try:
data2 = barcodemodel.objects.latest('id')
except:
data2 = 1002390000
naa = str(data2)
naa = int(naa[-10:])
for i in data:
id_name= str(i.id)
naa += random.randint(500, 900)
mocode = 'M-'+ id_name + '-'+ str(naa)
b = barcodemodel(barcode_num=str(mocode))
b.save()
path = (>>>PATH to aws<<<,'static','media','barcodes', mocode+'.png')
with open(path, 'wb') as f:
Code128(mocode, writer=ImageWriter()).write(f)
barcode_context = {
'mocode':mocode, 'f':f
}
return barcode_context
My Static file settings in my settings file
STATIC_URL = '/static/'
STATICFILES_DIRS = [
os.path.join(BASE_DIR,"static")
]
STATIC_ROOT = os.path.join(BASE_DIR, 'staticfiles')
MEDIA_URL = '/profile_image/'
MEDIA_ROOT = os.path.join(BASE_DIR, 'static/images')
#S3 BUCKETS CONFIG
# S3 logins Data
AWS_ACCESS_KEY_ID = os.environ.get('AWS_ACCESS_KEY_ID')
AWS_SECRET_ACCESS_KEY = os.environ.get('AWS_SECRET_ACCESS_KEY')
AWS_STORAGE_BUCKET_NAME = os.environ.get('AWS_STORAGE_BUCKET_NAME')
AWS_S3_FILE_OVERWRITE = False
AWS_DEFAULT_ACL = None
DEFAULT_FILE_STORAGE = 'storages.backends.s3boto3.S3Boto3Storage'
STATICFILES_STORAGE = 'storages.backends.s3boto3.S3Boto3Storage'
From the documentation:
import logging
import boto3
from botocore.exceptions import ClientError
def upload_file(file_name, bucket, object_name=None):
"""Upload a file to an S3 bucket
:param file_name: File to upload
:param bucket: Bucket to upload to
:param object_name: S3 object name. If not specified then file_name is used
:return: True if file was uploaded, else False
"""
# If S3 object_name was not specified, use file_name
if object_name is None:
object_name = file_name
# Upload the file
s3_client = boto3.client('s3')
try:
response = s3_client.upload_file(file_name, bucket, object_name)
except ClientError as e:
logging.error(e)
return False
return True
then (here f is in binary mode):
s3.upload_fileobj(f, "BUCKET_NAME", "OBJECT_NAME")
The code will use your environment variables for the access keys. Alternatively, you can use this syntax:
client = boto3.client(
's3',
aws_access_key_id=ACCESS_KEY,
aws_secret_access_key=SECRET_KEY,
aws_session_token=SESSION_TOKEN,
)
Related
I used django docker and aws s3 bucket for my project. I configure my settings file for my bucket and it is working but i got an error while uploading media files "expected string or bytes-like object" and docker log error if not VALID_BUCKET.search(bucket) and not VALID_S3_ARN.search(bucket). I used django forms and function based view.
models.py
def user_directory_path(instance, filename):
tenant = connection.get_tenant()
return 'profile_photos/{0}/{1}'.format(tenant, filename)
class UserProfilePhoto(models.Model):
user = models.OneToOneField(User, on_delete=models.CASCADE)
profilephoto = models.ImageField(blank=True,default="profile_photos/profilephoto.png",upload_to=user_directory_path )
views.py
def userprofile(request,id):
get_object_or_404(User,id = id)
if request.user.userprofile.status == 3 or str(request.user.id) == str(id):
now_today = datetime.now(pytz.timezone('Europe/Istanbul'))
announcements=Announcements.objects.filter(announce_type="announcement")
current_page="Kullanıcı Profili"
user=User.objects.filter(id=id).first()
user_doc_create=InsuranceFile.objects.filter(file_creator=user.username)
user_doc_create_last_month=InsuranceFile.objects.filter(file_creator=user.username, created_at__gte=now()-relativedelta(months=1)).count()
ratio_of_doc = ratio_utils(user_doc_create_last_month,user_doc_create.count())
user_doc_update=InsuranceFile.objects.filter(file_updater=user.id)
user_doc_update_last_month=InsuranceFile.objects.filter(file_updater=user.id, updated_at__gte=now()-relativedelta(months=1)).count()
ratio_of_doc_update = ratio_utils(user_doc_update_last_month,user_doc_update.count())
path_check=str("/account/userprofile/"+ id)
profilephoto=UserProfilePhoto.objects.filter(user=request.user).first()
previous_profilephoto=profilephoto.profilephoto
form_user=CreateUserForm(request.POST or None , instance=request.user)
form_userprofile=UserProfileForm(request.POST or None , instance=request.user.userprofile)
form_userphoto=UserProfilePhotoForm(request.POST or None,request.FILES, instance=request.user.userprofilephoto,)
is_confirmed=False
if TOTPDevice.objects.filter(user_id=id).first():
totp=TOTPDevice.objects.filter(user_id=id).first()
is_confirmed=totp.confirmed
if request.method == 'POST':
if form_userphoto.is_valid() and form_userprofile.is_valid() and form_user.is_valid():
with transaction.atomic():
form_userprofile.save()
if str(request.FILES) != "<MultiValueDict: {}>":
upload_profile_photo(request,form_userphoto,user,previous_profilephoto)
messages.success(request,"Profil başarılı bir şekilde güncellendi.")
return HttpResponseRedirect(request.META.get('HTTP_REFERER', '/'))
return render(request,'userprofile.html',{"now_today":now_today,"ratio_of_doc_update":ratio_of_doc_update,"user_doc_update_last_month":user_doc_update_last_month,"user_doc_update":user_doc_update,"announcements":announcements,"current_page":current_page,"user_doc_create_last_month":user_doc_create_last_month,"ratio_of_doc":ratio_of_doc,"user_doc_create":user_doc_create,"path_check":path_check,"profilephoto":profilephoto,"is_confirmed":is_confirmed,"user":user,"form_userprofile":form_userprofile,"form_userphoto":form_userphoto,"form_user":form_user})
messages.warning(request,"Bu işlemi yapmaya yetkiniz bulunmamaktadır.")
return HttpResponseRedirect(request.META.get('HTTP_REFERER', '/'))
upload*_profile function*
import boto3
def upload_profile_photo(request,form_userphoto,user,previous_profilephoto):
s3 = boto3.client('s3',
aws_access_key_id="AKIAW7UXTA7VBPUVLPGW",
aws_secret_access_key= "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")
if previous_profilephoto != "profile_photos/profilephoto.png":
s3.delete_object(Bucket='dj-crm-tenant', Key= f'media/{previous_profilephoto}')
form_userphoto.save()
settings.py
AWS_ACCESS_KEY_ID = os.environ.get('AWS_ACCESS_KEY_ID')
AWS_SECRET_ACCESS_KEY = os.environ.get('AWS_SECRET_ACCESS_KEY')
AWS_STORAGE_BUCKET_NAME = os.environ.get('AWS_STORAGE_BUCKET_NAME')
AWS_S3_CUSTOM_DOMAIN = 'dj-crm-tenant.s3.amazonaws.com'
AWS_S3_OBJECT_PARAMETERS = {'CacheControl': 'max-age=86400'}
AWS_DEFAULT_ACL = 'public-read'
AWS_LOCATION = 'static'
STATICFILES_DIRS = [
os.path.join(BASE_DIR, 'static'),
]
STATIC_URL = 'https://%s/%s/' % (AWS_S3_CUSTOM_DOMAIN, AWS_LOCATION)
STATICFILES_STORAGE = 'storages.backends.s3boto3.S3Boto3Storage'
DEFAULT_FILE_STORAGE = 'dj_crm_tenant.storages.MediaStore'
storages.py
from storages.backends.s3boto3 import S3Boto3Storage
class MediaStore(S3Boto3Storage):
location = 'media'
file_overwrite = False
when i try to change my user profilephoto i got an error.
i tried to upload file to my s3 bucket i got the previosuly mentioned error.
I am trying to upload media files to my s3 bucket using django-forms and boto
I followed https://github.com/veryacademy/YT-Django-Media-Static-AWS-S3 tutorial
Problem seems to be about your .env file. You cannot connect to your bucket since your key and id is not read by the program.
You can also install dotenv package for managing your env files.
I am working on a REST API (using Django Rest Framework). I am trying to upload a video by sending a post request to the endpoint I made.
Issue
The video does upload to the s3 bucket, but the upload progress shows 100% within a couple of seconds only however large file I upload.
Why is this happening and how can I solve this it?
PS: Previously I was uploading on local storage, and the upload progress was working fine.
I am using React.
First of all you make sure you've installed these library: boto3==1.14.53, botocore==1.17.53, s3transfer==0.3.3, django-storages==1.10
settings.py :
INSTALLED_APPS = [
'storages',
]
AWS_ACCESS_KEY_ID = 'your-key-id'
AWS_SECRET_ACCESS_KEY = 'your-secret-key'
AWS_STORAGE_BUCKET_NAME = 'your-bucket-name'
AWS_S3_CUSTOM_DOMAIN = '%s.s3.amazonaws.com' % AWS_STORAGE_BUCKET_NAME
AWS_S3_OBJECT_PARAMETERS = {
'CacheControl': 'max-age=86400',
}
DEFAULT_FILE_STORAGE = 'your_project-name.storage_backends.MediaStorage'
MEDIA_URL = "https://%s/" % AWS_S3_CUSTOM_DOMAIN
#File upload setting
BASE_URL = 'http://example.com'
FILE_UPLOAD_PERMISSIONS = 0o640
DATA_UPLOAD_MAX_MEMORY_SIZE = 500024288000
then make a storage_backends python file inside your project folder where settings.py file is located.
storage_backends.py:
import os
from tempfile import SpooledTemporaryFile
from storages.backends.s3boto3 import S3Boto3Storage
class MediaStorage(S3Boto3Storage):
bucket_name = 'your-bucket-name'
file_overwrite = False
def _save(self, name, content):
"""
We create a clone of the content file as when this is passed to
boto3 it wrongly closes the file upon upload where as the storage
backend expects it to still be open
"""
# Seek our content back to the start
content.seek(0, os.SEEK_SET)
# Create a temporary file that will write to disk after a specified
# size. This file will be automatically deleted when closed by
# boto3 or after exiting the `with` statement if the boto3 is fixed
with SpooledTemporaryFile() as content_autoclose:
# Write our original content into our copy that will be closed by boto3
content_autoclose.write(content.read())
# Upload the object which will auto close the
# content_autoclose instance
return super(MediaStorage, self)._save(name, content_autoclose)
Sorry for the noise but I think I am missing something and I can't find my solution. When running my collectstatic, I get the following error:
botocore.exceptions.EndpointConnectionError: Could not connect to the endpoint URL: "http://localhost:1212/test/static/gis/css/ol3.css"
Here is the following setup:
docker-compose.yaml
. . .
s3server:
image: scality/s3server:latest
restart: unless-stopped
ports:
- "1212:8000"
volumes:
- s3data:/usr/src/app/localData
- s3metadata:/usr/src/app/localMetadata
environment:
SCALITY_ACCESS_KEY_ID: newAccessKey
SCALITY_SECRET_ACCESS_KEY: newSecretKey
SSL: "FALSE"
settings.py
# AWS settings
AWS_ACCESS_KEY_ID = env.str('AWS_ACCESS_KEY_ID')
AWS_SECRET_ACCESS_KEY = env.str('AWS_SECRET_ACCESS_KEY')
AWS_S3_REGION_NAME = env.str('AWS_S3_REGION_NAME')
AWS_STORAGE_BUCKET_NAME = env.str('AWS_STORAGE_BUCKET_NAME')
AWS_S3_ENDPOINT_URL = env.str('AWS_S3_ENDPOINT_URL')
AWS_DEFAULT_ACL = None
AWS_S3_OBJECT_PARAMETERS = {
'CacheControl': 'max-age=86400',
}
AWS_QUERYSTRING_AUTH = False
# s3 static settings
AWS_STATIC_LOCATION = 'static'
STATIC_URL = f'{AWS_S3_ENDPOINT_URL}/{AWS_STATIC_LOCATION}/'
STATICFILES_STORAGE = 'backend.storages.StaticStorage'
# s3 media settings
AWS_MEDIA_LOCATION = 'media'
MEDIA_URL = f'{AWS_S3_ENDPOINT_URL}/{AWS_MEDIA_LOCATION}/'
DEFAULT_FILE_STORAGE = 'backend.storages.PublicMediaStorage'
dev.env
AWS_STORAGE_BUCKET_NAME=test
AWS_ACCESS_KEY_ID=newAccessKey
AWS_SECRET_ACCESS_KEY=newSecretKey
AWS_S3_REGION_NAME=us-east-1
AWS_S3_ENDPOINT_URL=http://localhost:1212
backend/storages.py
class StaticStorage(S3Boto3Storage):
location = settings.AWS_STATIC_LOCATION
default_acl = "public-read"
class PublicMediaStorage(S3Boto3Storage):
location = settings.AWS_MEDIA_LOCATION
default_acl = 'public-read'
file_overwrite = False
I really don't understand why as the following script works just fine:
script.py
import logging
import boto3
from botocore.exceptions import ClientError
s3_client = boto3.client(
's3',
aws_access_key_id="newAccessKey",
aws_secret_access_key="newSecretKey",
endpoint_url='http://localhost:1212',
region_name="us-east-1",
)
def create_bucket(bucket_name):
try:
s3_client.create_bucket(
Bucket=bucket_name,
CreateBucketConfiguration={'LocationConstraint': "us-east-1"},
)
except ClientError as e:
logging.error(e)
return False
return True
if __name__ == "__main__":
create_bucket("test", region="us-east-1")
response = s3_client.list_buckets()
# Output the bucket names
print('Existing buckets:')
for bucket in response['Buckets']:
print(f' {bucket["Name"]}')
response = s3_client.upload_file(
"backend/tests/test_image.jpg",
"test",
"static/test_image",
)
s3_client.download_file('test', 'static/test_image', 'toto.jpg')
Well, inside a container, locahost is obviously not the other services. Change AWS_S3_ENDPOINT_URL=http://localhost:1212 to AWS_S3_ENDPOINT_URL=http://s3server:8000 and expose the 8000 port from s3server in compose. The last step to make it work is to add "s3server": "us-east-1" in the config.json mounted in the scality server.
I am using django-storage and boto3 for media and static files using aws s3. I need to get the object key of aws s3 bucket, so that I can generate a url for that object.
client = boto3.client('s3')
bucket_name = 'django-bucket'
key = ???
u = client.generate_presigned_url('get_object', Params = {'Bucket': bucket_name, 'Key': key,'ResponseContentType':'image/jpeg', 'ResponseContentDisposition': 'attachment; filename="your-filename.jpeg"'}, ExpiresIn = 1000)
These are in my settings:
STATICFILES_LOCATION = 'static'
MEDIAFILES_LOCATION = 'media'
STATICFILES_STORAGE = 'myproject.custom_storages.StaticStorage'
DEFAULT_FILE_STORAGE = 'myproject.custom_storages.MediaStorage'
AWS_ACCESS_KEY_ID = "my_access_key_id"
AWS_SECRET_ACCESS_KEY = "my_secret_access_key"
AWS_STORAGE_BUCKET_NAME = "django-bucket"
AWS_QUERYSTRING_AUTH = False
AWS_S3_CUSTOM_DOMAIN = AWS_STORAGE_BUCKET_NAME + ".s3.amazonaws.com"
# static media settings
STATIC_URL = "https://" + AWS_STORAGE_BUCKET_NAME + ".s3.amazonaws.com/"
MEDIA_URL = STATIC_URL + "media/"
ADMIN_MEDIA_PREFIX = STATIC_URL + "admin/"
I can get the file path of the image file
ui = UserImage.objects.get(user=user_id, image=image_id)
url = ui.image.url
'https://django-bucket.s3.amazonaws.com/media/user_image/1497598249_49.jpeg'
But I don't know how to get the s3 object key so that I can generate a url for that object.
It would seem the prefix can be constructed from the file field 'storage' location value and the file 'name' (which is a path from the 'location' to the file - it includes what most of us think of as the file 'name').
Here's a quick demo of a function that should get the job done:
import os
def get_file_key(file_field):
return os.path.join(file_field.storage.location, file_field.name)
Use as follows:
prefix = get_file_key(my_model_instance.relevant_file_field)
Notes:
You'll probably want to implement error catching sanity checking in/around the function as appropriate for your needs - for example the function will raise an AttributeError if the file_field is None - and in most scenarios you almost certainly wouldn't want to end up with just the location if for some reason the field.name returned an empty string as a result of a bug or just because that was saved somehow (or '/' if the location wasn't set)
You can get s3 Object key by:
ui = UserImage.objects.get(user=user_id, image=image_id)
bucket_key = ui.image.file.obj.key
'media/user_image/1497598249_49.jpeg'
I followed every QA suggestions found on SO and in different blogs, Everything works ok on my dev machine and nothing works on heroku.
here are my settings:
DEFAULT_FILE_STORAGE = 'arena.utils.MediaRootS3BotoStorage' # media files
# storage
AWS_ACCESS_KEY_ID = os.environ.get('AWS_ACCESS_KEY_ID')
AWS_SECRET_ACCESS_KEY = os.environ.get('AWS_SECRET_ACCESS_KEY')
AWS_STORAGE_BUCKET_NAME = os.environ.get('AWS_STORAGE_BUCKET_NAME')
AWS_PRELOAD_METADATA = True # necessary to fix manage.py collectstatic command to only upload changed files instead of all files
S3_URL = 'https://%s.s3.amazonaws.com' % AWS_STORAGE_BUCKET_NAME
MEDIA_URL = S3_URL + '/media/'
STATIC_URL = S3_URL + '/static/'
ADMIN_MEDIA_PREFIX = STATIC_URL + 'admin/'
COMPRESS_URL = STATIC_URL
COMPRESS_OFFLINE = True
COMPRESS_STORAGE = 'utils.CachedS3BotoStorage'
STATICFILES_STORAGE = COMPRESS_STORAGE
When i run collectstatic/compress everything is ok, i see the files being collected to S3 and put in proper places. I see the manifest file.
Loading any page with compressor tag, show an error OfflineGenerationError: You have offline compression enabled but key "d2a53169c44dec41ce3ee7da19b2b9d4" is missing from offline manifest. Running python manage.py compress again solves nothing. when i check the manifest file, indeed the key it looks for doesn't exist.
What is going wrong here?
Question i already checked:
How to configure django-compressor and django-staticfiles with Amazon's S3?
Django Compressor with S3 URL Heroku
Configuring django-compressor with remote storage (django-storage - amazon s3)
On my side I have very similar config, and I'm successfully using compressor for more than 2 years.
settings.py
COMPRESS_STORAGE = 'MyAwesomeApp.app.CachedS3BotoStorage.CachedS3BotoStorage'
AWS_ACCESS_KEY_ID = '#######'
AWS_SECRET_ACCESS_KEY = '########################+#########+BqoQ'
AWS_STORAGE_BUCKET_NAME = 'myAmazonS3cdn.myawesomewebsite.com'
AWS_S3_SECURE_URLS = False
AWS_QUERYSTRING_AUTH = False
COMPRESS_ROOT = 'MyAwesomeApp/static'
STATIC_ROOT = 'MyAwesomeApp/static/javascript'
COMPRESS_OUTPUT_DIR = 'compressed'
STATICFILES_STORAGE = COMPRESS_STORAGE
STATIC_URL = "http://myAmazonS3cdn.myawesomewebsite.com/"
COMPRESS_URL = STATIC_URL
COMPRESS_ENABLED = True
CachedS3BotoStorage.py
from django.core.files.storage import get_storage_class
from storages.backends.s3boto import S3BotoStorage
from django.core.files.base import File
class CachedS3BotoStorage(S3BotoStorage):
"""
S3 storage backend that saves the files locally, too.
"""
def __init__(self, *args, **kwargs):
super(CachedS3BotoStorage, self).__init__(*args, **kwargs)
self.local_storage = get_storage_class("compressor.storage.CompressorFileStorage")()
def save(self, name, content):
name = super(CachedS3BotoStorage, self).save(name, content)
self.local_storage._save(name, content)
return name
I'm running python managep.py compress locally, and having manifest generated on my static files directory. Heroku only deals with the collecstatic and delivers the most recent manifest version to my cdn.
Regards,
I completed the above solution with some lines, to fix the problem that create many (multiples) manifest_%.json in Amazon S3
in setting.py:
STATICFILES_STORAGE = 'your_package.s3utils.CachedS3BotoStorage'
in s3utils.py:
from storages.backends.s3boto import S3BotoStorage
from django.core.files.storage import get_storage_class
class CachedS3BotoStorage(S3BotoStorage):
"""
S3 storage backend that saves the files locally, too.
"""
location = 'static'
def __init__(self, *args, **kwargs):
super(CachedS3BotoStorage, self).__init__(*args, **kwargs)
self.local_storage = get_storage_class(
"compressor.storage.CompressorFileStorage")()
def url(self, name):
"""
Fix problem images admin Django S3 images
"""
url = super(CachedS3BotoStorage, self).url(name)
if name.endswith('/') and not url.endswith('/'):
url += '/'
return url
def save(self, name, content):
name = super(CachedS3BotoStorage, self).save(name, content)
self.local_storage._save(name, content)
return name
# HERE is secret to dont generating multiple manifest.json and to delete manifest.json in Amazon S3
def get_available_name(self, name):
if self.exists(name):
self.delete(name)
return name
I found a git repository that contains post_compile hooks to solve this problem. It runs compress after Heroku built the Django app (and also installs lessc if you need less in your compressor settings).
https://github.com/nigma/heroku-django-cookbook