There are s3 from yandex cloud
https://cloud.yandex.com/docs/storage/tools/?utm_source=console&utm_medium=empty-page&utm_campaign=storage
How kan I configure django to use it ?
I) Install boto3 anf django-storages libs.
II) Add yandex_s3_storage.py file with the code below:
from storages.backends.s3boto3 import S3Boto3Storage
from sites.crm.settings import YOUR_YANDEX_BUCKET_NAME
class ClientDocsStorage(S3Boto3Storage):
bucket_name = YANDEX_CLIENT_DOCS_BUCKET_NAME
file_overwrite = False
III) Add the code below to settings.py:
INSTALLED_APPS = [
...
'storages',
...
]
...
# ----Yandex s3----
DEFAULT_FILE_STORAGE = 'yandex_s3_storage.ClientDocsStorage' # path to file we created before
YANDEX_CLIENT_DOCS_BUCKET_NAME = 'client-docs'
AWS_ACCESS_KEY_ID = env('AWS_ACCESS_KEY')
AWS_SECRET_ACCESS_KEY = env('AWS_SECRET_ACCESS_KEY')
AWS_S3_ENDPOINT_URL = 'https://storage.yandexcloud.net'
AWS_S3_REGION_NAME = 'storage'
IV) Add a file field to your model:
from sites.yandex_s3_storage import ClientDocsStorage
class ClientDocs(models.Model):
...
upload = models.FileField(storage=ClientDocsStorage())
...
Related
I used django docker and aws s3 bucket for my project. I configure my settings file for my bucket and it is working but i got an error while uploading media files "expected string or bytes-like object" and docker log error if not VALID_BUCKET.search(bucket) and not VALID_S3_ARN.search(bucket). I used django forms and function based view.
models.py
def user_directory_path(instance, filename):
tenant = connection.get_tenant()
return 'profile_photos/{0}/{1}'.format(tenant, filename)
class UserProfilePhoto(models.Model):
user = models.OneToOneField(User, on_delete=models.CASCADE)
profilephoto = models.ImageField(blank=True,default="profile_photos/profilephoto.png",upload_to=user_directory_path )
views.py
def userprofile(request,id):
get_object_or_404(User,id = id)
if request.user.userprofile.status == 3 or str(request.user.id) == str(id):
now_today = datetime.now(pytz.timezone('Europe/Istanbul'))
announcements=Announcements.objects.filter(announce_type="announcement")
current_page="Kullanıcı Profili"
user=User.objects.filter(id=id).first()
user_doc_create=InsuranceFile.objects.filter(file_creator=user.username)
user_doc_create_last_month=InsuranceFile.objects.filter(file_creator=user.username, created_at__gte=now()-relativedelta(months=1)).count()
ratio_of_doc = ratio_utils(user_doc_create_last_month,user_doc_create.count())
user_doc_update=InsuranceFile.objects.filter(file_updater=user.id)
user_doc_update_last_month=InsuranceFile.objects.filter(file_updater=user.id, updated_at__gte=now()-relativedelta(months=1)).count()
ratio_of_doc_update = ratio_utils(user_doc_update_last_month,user_doc_update.count())
path_check=str("/account/userprofile/"+ id)
profilephoto=UserProfilePhoto.objects.filter(user=request.user).first()
previous_profilephoto=profilephoto.profilephoto
form_user=CreateUserForm(request.POST or None , instance=request.user)
form_userprofile=UserProfileForm(request.POST or None , instance=request.user.userprofile)
form_userphoto=UserProfilePhotoForm(request.POST or None,request.FILES, instance=request.user.userprofilephoto,)
is_confirmed=False
if TOTPDevice.objects.filter(user_id=id).first():
totp=TOTPDevice.objects.filter(user_id=id).first()
is_confirmed=totp.confirmed
if request.method == 'POST':
if form_userphoto.is_valid() and form_userprofile.is_valid() and form_user.is_valid():
with transaction.atomic():
form_userprofile.save()
if str(request.FILES) != "<MultiValueDict: {}>":
upload_profile_photo(request,form_userphoto,user,previous_profilephoto)
messages.success(request,"Profil başarılı bir şekilde güncellendi.")
return HttpResponseRedirect(request.META.get('HTTP_REFERER', '/'))
return render(request,'userprofile.html',{"now_today":now_today,"ratio_of_doc_update":ratio_of_doc_update,"user_doc_update_last_month":user_doc_update_last_month,"user_doc_update":user_doc_update,"announcements":announcements,"current_page":current_page,"user_doc_create_last_month":user_doc_create_last_month,"ratio_of_doc":ratio_of_doc,"user_doc_create":user_doc_create,"path_check":path_check,"profilephoto":profilephoto,"is_confirmed":is_confirmed,"user":user,"form_userprofile":form_userprofile,"form_userphoto":form_userphoto,"form_user":form_user})
messages.warning(request,"Bu işlemi yapmaya yetkiniz bulunmamaktadır.")
return HttpResponseRedirect(request.META.get('HTTP_REFERER', '/'))
upload*_profile function*
import boto3
def upload_profile_photo(request,form_userphoto,user,previous_profilephoto):
s3 = boto3.client('s3',
aws_access_key_id="AKIAW7UXTA7VBPUVLPGW",
aws_secret_access_key= "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")
if previous_profilephoto != "profile_photos/profilephoto.png":
s3.delete_object(Bucket='dj-crm-tenant', Key= f'media/{previous_profilephoto}')
form_userphoto.save()
settings.py
AWS_ACCESS_KEY_ID = os.environ.get('AWS_ACCESS_KEY_ID')
AWS_SECRET_ACCESS_KEY = os.environ.get('AWS_SECRET_ACCESS_KEY')
AWS_STORAGE_BUCKET_NAME = os.environ.get('AWS_STORAGE_BUCKET_NAME')
AWS_S3_CUSTOM_DOMAIN = 'dj-crm-tenant.s3.amazonaws.com'
AWS_S3_OBJECT_PARAMETERS = {'CacheControl': 'max-age=86400'}
AWS_DEFAULT_ACL = 'public-read'
AWS_LOCATION = 'static'
STATICFILES_DIRS = [
os.path.join(BASE_DIR, 'static'),
]
STATIC_URL = 'https://%s/%s/' % (AWS_S3_CUSTOM_DOMAIN, AWS_LOCATION)
STATICFILES_STORAGE = 'storages.backends.s3boto3.S3Boto3Storage'
DEFAULT_FILE_STORAGE = 'dj_crm_tenant.storages.MediaStore'
storages.py
from storages.backends.s3boto3 import S3Boto3Storage
class MediaStore(S3Boto3Storage):
location = 'media'
file_overwrite = False
when i try to change my user profilephoto i got an error.
i tried to upload file to my s3 bucket i got the previosuly mentioned error.
I am trying to upload media files to my s3 bucket using django-forms and boto
I followed https://github.com/veryacademy/YT-Django-Media-Static-AWS-S3 tutorial
Problem seems to be about your .env file. You cannot connect to your bucket since your key and id is not read by the program.
You can also install dotenv package for managing your env files.
I am hosting a website on Microsoft Azure. I have updated all the configurations within Azure and my settings.py file looks as below -
DEFAULT_FILE_STORAGE = 'storages.backends.azure_storage.AzureStorage'
STATICFILES_STORAGE = 'custom_storage.custom_azure.AzureStaticStorage'
AZURE_ACCOUNT_NAME = os.environ.get('AZURE_ACCOUNT_NAME')
AZURE_STORAGE_KEY = os.environ.get('AZURE_STORAGE_KEY', False)
AZURE_MEDIA_CONTAINER = os.environ.get('AZURE_MEDIA_CONTAINER', 'media')
AZURE_STATIC_CONTAINER = os.environ.get('AZURE_STATIC_CONTAINER', 'static')
AZURE_CUSTOM_DOMAIN = f'{AZURE_ACCOUNT_NAME}.blob.core.windows.net/'
STATIC_URL = f'https://{AZURE_CUSTOM_DOMAIN}/{AZURE_STATIC_CONTAINER}/'
MEDIA_URL = f'https://{AZURE_CUSTOM_DOMAIN}/{AZURE_MEDIA_CONTAINER}/'
STATIC_ROOT = f'https://{AZURE_CUSTOM_DOMAIN}/{AZURE_STATIC_CONTAINER}/'
I have a custom_storage folder with a custom_azure.py file with following -
from django.conf import settings
from storages.backends.azure_storage import AzureStorage
class AzureMediaStorage(AzureStorage):
account_name = settings.AZURE_ACCOUNT_NAME
account_key = settings.AZURE_STORAGE_KEY
azure_container = settings.AZURE_MEDIA_CONTAINER
expiration_secs = None
class AzureStaticStorage(AzureStorage):
account_name = settings.AZURE_ACCOUNT_NAME
account_key = settings.AZURE_STORAGE_KEY
azure_container = settings.AZURE_STATIC_CONTAINER
expiration_secs = None
However I am now pulling a static admin file and not the static project file containing css, scss and js files..
As a work around the the install of whitenoise enables the static files when hosting on Azure. However does not solve the problem of storing the static files on the Azure platform.
I am working on a REST API (using Django Rest Framework). I am trying to upload a video by sending a post request to the endpoint I made.
Issue
The video does upload to the s3 bucket, but the upload progress shows 100% within a couple of seconds only however large file I upload.
Why is this happening and how can I solve this it?
PS: Previously I was uploading on local storage, and the upload progress was working fine.
I am using React.
First of all you make sure you've installed these library: boto3==1.14.53, botocore==1.17.53, s3transfer==0.3.3, django-storages==1.10
settings.py :
INSTALLED_APPS = [
'storages',
]
AWS_ACCESS_KEY_ID = 'your-key-id'
AWS_SECRET_ACCESS_KEY = 'your-secret-key'
AWS_STORAGE_BUCKET_NAME = 'your-bucket-name'
AWS_S3_CUSTOM_DOMAIN = '%s.s3.amazonaws.com' % AWS_STORAGE_BUCKET_NAME
AWS_S3_OBJECT_PARAMETERS = {
'CacheControl': 'max-age=86400',
}
DEFAULT_FILE_STORAGE = 'your_project-name.storage_backends.MediaStorage'
MEDIA_URL = "https://%s/" % AWS_S3_CUSTOM_DOMAIN
#File upload setting
BASE_URL = 'http://example.com'
FILE_UPLOAD_PERMISSIONS = 0o640
DATA_UPLOAD_MAX_MEMORY_SIZE = 500024288000
then make a storage_backends python file inside your project folder where settings.py file is located.
storage_backends.py:
import os
from tempfile import SpooledTemporaryFile
from storages.backends.s3boto3 import S3Boto3Storage
class MediaStorage(S3Boto3Storage):
bucket_name = 'your-bucket-name'
file_overwrite = False
def _save(self, name, content):
"""
We create a clone of the content file as when this is passed to
boto3 it wrongly closes the file upon upload where as the storage
backend expects it to still be open
"""
# Seek our content back to the start
content.seek(0, os.SEEK_SET)
# Create a temporary file that will write to disk after a specified
# size. This file will be automatically deleted when closed by
# boto3 or after exiting the `with` statement if the boto3 is fixed
with SpooledTemporaryFile() as content_autoclose:
# Write our original content into our copy that will be closed by boto3
content_autoclose.write(content.read())
# Upload the object which will auto close the
# content_autoclose instance
return super(MediaStorage, self)._save(name, content_autoclose)
I am trying to create a barcode image file that will save to a path in my aws s3 bucket. I don't know how to link to it. My media and static files are already on aws and are working perfectly but I don't know how to set the path for this barcodemaker function to save to the aws s3 bucket.
Thank you for your patience and guidance.
my barcode function in my view
def barcodemaker():
barcodemodel = apps.get_model('barcoder', 'barcodeModel')
employee = apps.get_model('employees', 'Employee')
data = employee.objects.filter(id=1)
try:
data2 = barcodemodel.objects.latest('id')
except:
data2 = 1002390000
naa = str(data2)
naa = int(naa[-10:])
for i in data:
id_name= str(i.id)
naa += random.randint(500, 900)
mocode = 'M-'+ id_name + '-'+ str(naa)
b = barcodemodel(barcode_num=str(mocode))
b.save()
path = (>>>PATH to aws<<<,'static','media','barcodes', mocode+'.png')
with open(path, 'wb') as f:
Code128(mocode, writer=ImageWriter()).write(f)
barcode_context = {
'mocode':mocode, 'f':f
}
return barcode_context
My Static file settings in my settings file
STATIC_URL = '/static/'
STATICFILES_DIRS = [
os.path.join(BASE_DIR,"static")
]
STATIC_ROOT = os.path.join(BASE_DIR, 'staticfiles')
MEDIA_URL = '/profile_image/'
MEDIA_ROOT = os.path.join(BASE_DIR, 'static/images')
#S3 BUCKETS CONFIG
# S3 logins Data
AWS_ACCESS_KEY_ID = os.environ.get('AWS_ACCESS_KEY_ID')
AWS_SECRET_ACCESS_KEY = os.environ.get('AWS_SECRET_ACCESS_KEY')
AWS_STORAGE_BUCKET_NAME = os.environ.get('AWS_STORAGE_BUCKET_NAME')
AWS_S3_FILE_OVERWRITE = False
AWS_DEFAULT_ACL = None
DEFAULT_FILE_STORAGE = 'storages.backends.s3boto3.S3Boto3Storage'
STATICFILES_STORAGE = 'storages.backends.s3boto3.S3Boto3Storage'
From the documentation:
import logging
import boto3
from botocore.exceptions import ClientError
def upload_file(file_name, bucket, object_name=None):
"""Upload a file to an S3 bucket
:param file_name: File to upload
:param bucket: Bucket to upload to
:param object_name: S3 object name. If not specified then file_name is used
:return: True if file was uploaded, else False
"""
# If S3 object_name was not specified, use file_name
if object_name is None:
object_name = file_name
# Upload the file
s3_client = boto3.client('s3')
try:
response = s3_client.upload_file(file_name, bucket, object_name)
except ClientError as e:
logging.error(e)
return False
return True
then (here f is in binary mode):
s3.upload_fileobj(f, "BUCKET_NAME", "OBJECT_NAME")
The code will use your environment variables for the access keys. Alternatively, you can use this syntax:
client = boto3.client(
's3',
aws_access_key_id=ACCESS_KEY,
aws_secret_access_key=SECRET_KEY,
aws_session_token=SESSION_TOKEN,
)
Sorry for the noise but I think I am missing something and I can't find my solution. When running my collectstatic, I get the following error:
botocore.exceptions.EndpointConnectionError: Could not connect to the endpoint URL: "http://localhost:1212/test/static/gis/css/ol3.css"
Here is the following setup:
docker-compose.yaml
. . .
s3server:
image: scality/s3server:latest
restart: unless-stopped
ports:
- "1212:8000"
volumes:
- s3data:/usr/src/app/localData
- s3metadata:/usr/src/app/localMetadata
environment:
SCALITY_ACCESS_KEY_ID: newAccessKey
SCALITY_SECRET_ACCESS_KEY: newSecretKey
SSL: "FALSE"
settings.py
# AWS settings
AWS_ACCESS_KEY_ID = env.str('AWS_ACCESS_KEY_ID')
AWS_SECRET_ACCESS_KEY = env.str('AWS_SECRET_ACCESS_KEY')
AWS_S3_REGION_NAME = env.str('AWS_S3_REGION_NAME')
AWS_STORAGE_BUCKET_NAME = env.str('AWS_STORAGE_BUCKET_NAME')
AWS_S3_ENDPOINT_URL = env.str('AWS_S3_ENDPOINT_URL')
AWS_DEFAULT_ACL = None
AWS_S3_OBJECT_PARAMETERS = {
'CacheControl': 'max-age=86400',
}
AWS_QUERYSTRING_AUTH = False
# s3 static settings
AWS_STATIC_LOCATION = 'static'
STATIC_URL = f'{AWS_S3_ENDPOINT_URL}/{AWS_STATIC_LOCATION}/'
STATICFILES_STORAGE = 'backend.storages.StaticStorage'
# s3 media settings
AWS_MEDIA_LOCATION = 'media'
MEDIA_URL = f'{AWS_S3_ENDPOINT_URL}/{AWS_MEDIA_LOCATION}/'
DEFAULT_FILE_STORAGE = 'backend.storages.PublicMediaStorage'
dev.env
AWS_STORAGE_BUCKET_NAME=test
AWS_ACCESS_KEY_ID=newAccessKey
AWS_SECRET_ACCESS_KEY=newSecretKey
AWS_S3_REGION_NAME=us-east-1
AWS_S3_ENDPOINT_URL=http://localhost:1212
backend/storages.py
class StaticStorage(S3Boto3Storage):
location = settings.AWS_STATIC_LOCATION
default_acl = "public-read"
class PublicMediaStorage(S3Boto3Storage):
location = settings.AWS_MEDIA_LOCATION
default_acl = 'public-read'
file_overwrite = False
I really don't understand why as the following script works just fine:
script.py
import logging
import boto3
from botocore.exceptions import ClientError
s3_client = boto3.client(
's3',
aws_access_key_id="newAccessKey",
aws_secret_access_key="newSecretKey",
endpoint_url='http://localhost:1212',
region_name="us-east-1",
)
def create_bucket(bucket_name):
try:
s3_client.create_bucket(
Bucket=bucket_name,
CreateBucketConfiguration={'LocationConstraint': "us-east-1"},
)
except ClientError as e:
logging.error(e)
return False
return True
if __name__ == "__main__":
create_bucket("test", region="us-east-1")
response = s3_client.list_buckets()
# Output the bucket names
print('Existing buckets:')
for bucket in response['Buckets']:
print(f' {bucket["Name"]}')
response = s3_client.upload_file(
"backend/tests/test_image.jpg",
"test",
"static/test_image",
)
s3_client.download_file('test', 'static/test_image', 'toto.jpg')
Well, inside a container, locahost is obviously not the other services. Change AWS_S3_ENDPOINT_URL=http://localhost:1212 to AWS_S3_ENDPOINT_URL=http://s3server:8000 and expose the 8000 port from s3server in compose. The last step to make it work is to add "s3server": "us-east-1" in the config.json mounted in the scality server.