Copy a folder from server to Azure blob using django-storage - django

How can I upload a folder (media_root/folder) which contains subfolder and files in it to Azure blob container? I can upload a file to Azure blob container using this:
from django.core.files.storage import default_storage
f = open('media_root/folder/file.csv', 'rb')
default_storage.save(path, f)
I have set AzureStorage class in my settings.py.
DEFAULT_FILE_STORAGE = 'storages.backends.azure_storage.AzureStorage'
Any help would be much appreciated.

As the doc of django-storage shows, there is no method to upload a folder to Azure Storage blob, it just can be used to upload files.
If you want to upload a folder(make sure it is not empty) to Azure Storage blob, see the following code with Python.
from azure.storage.blob import BlockBlobService,PublicAccess
import os
def run_sample():
block_blob_service = BlockBlobService("your_account_name", "your_account_key")
container_name ='test1'
path_remove = "F:\\"
local_path = "F:\\folderA"
for r,d,f in os.walk(local_path):
if f:
for file in f:
file_path_on_azure = os.path.join(r,file).replace(path_remove,"")
file_path_on_local = os.path.join(r,file)
block_blob_service.create_blob_from_path(container_name,file_path_on_azure,file_path_on_local)
# Main method.
if __name__ == '__main__':
run_sample()

Related

How to Update file name in FileField after file is uploaded on s3 via presigned post URL in Django?

I have integrated django-storages in Django Project. for large file sizes, I have used a pre-signed URL to upload files from externally without taking a load on my server.
by pre-signed URL, Files uploaded successfully in the s3 bucket AWS, after uploading the file in s3 I need to update the name of the file in FileField.
Probably you need something like, using the boto3 library to retrieve the file from S3 and os library to rename the file.
import boto3
import os
s3 = boto3.client('s3')
uploaded_file = s3.get_object(Bucket='your-bucket-name', Key='object-key')
new_filename = 'new_file_name.txt'
os.rename(uploaded_file['Body'].name, new_filename)
...
with open(new_filename, 'rb') as f:
file_obj = File(f, name=new_filename)
my_model.file_field_name = file_obj
my_model.save()

Upload folder to google cloud bucket with python

I know that I can upload single files like this:
bucket_name = "my-bucket-name"
bucket = client.get_bucket(bucket_name)
blob_name = "myfile.txt"
blob = bucket.blob(blob_name)
blob.upload_from_filename(blob_name)
How can I do the same with a folder? Is there something like blob.upload_from_foldername?
I tried the same code with replacing myfile.txt with myfoldername but it did not work.
FileNotFoundError: [Errno 2] No such file or directory: 'myfoldername'
This is the folder structure:
I assume something is wrong with the path but I am not sure what. I am executing the code from Untitled.ipynb. Works with myfile.txt but not with myfoldername.
I do not want to use a command line function.
You can't upload an empty folder or directory in Google Cloud Storage but you can create empty folder in Cloud Storage using the client:
from google.cloud import storage
def create_newfolder(bucket_name, destination_folder_name):
storage_client = storage.Client()
bucket = storage_client.get_bucket(bucket_name)
blob = bucket.blob(destination_folder_name)
blob.upload_from_string('')
print('Created {} .'.format(destination_folder_name))
And if you are trying to upload a whole directory, you can use the codes below:
import glob
import os
from google.cloud import storage
client = storage.Client()
def upload_from_directory(directory_path: str, destination_bucket_name: str, destination_blob_name: str):
rel_paths = glob.glob(directory_path + '/**', recursive=True)
bucket = client.get_bucket(destination_bucket_name)
for local_file in rel_paths:
remote_path = f'{destination_blob_name}/{"/".join(local_file.split(os.sep)[1:])}'
if os.path.isfile(local_file):
blob = bucket.blob(remote_path)
blob.upload_from_filename(local_file)

Using python to update a file on google drive

I have the following script to upload a file unto google drive, using python27. As it is now it will upload a new copy of the file, but I want the existing file updated/overwritten. I can't find help in the Google Drive API references and guides for python. Any suggestions?
from __future__ import print_function
import os
from apiclient.discovery import build
from httplib2 import Http
from oauth2client import file, client, tools
try:
import argparse
flags = argparse.ArgumentParser(parents=[tools.argparser]).parse_args()
except ImportError:
flags = None
# Gain acces to google drive
SCOPES = 'https://www.googleapis.com/auth/drive.file'
store = file.Storage('storage.json')
creds = store.get()
if not creds or creds.invalid:
flow = client.flow_from_clientsecrets('client_secret.json', SCOPES)
creds = tools.run_flow(flow, store, flags) \
if flags else tools.run(flow, store)
DRIVE = build('drive', 'v3', http=creds.authorize(Http()))
#The file that is being uploaded
FILES = (
('all-gm-keys.txt', 'application/vnd.google-apps.document'), #in google doc format
)
#Where the file ends on google drive
for filename, mimeType in FILES:
folder_id = '0B6V-MONTYPYTHONROCKS-lTcXc' #Not the real folder id
metadata = {'name': filename,'parents': [ folder_id ] }
if mimeType:
metadata['mimeType'] = mimeType
res = DRIVE.files().create(body=metadata, media_body=filename).execute()
if res:
print('Uploaded "%s" (%s)' % (filename, res['mimeType']))
I think that you are looking for the update method. Here is a link to the documentation. There is an example on overwriting the file in python.
I think that using the official google client api instead of pure http requests should make your task easier.
from apiclient import errors
from apiclient.http import MediaFileUpload
# ...
def update_file(service, file_id, new_title, new_description, new_mime_type,
new_filename, new_revision):
"""Update an existing file's metadata and content.
Args:
service: Drive API service instance.
file_id: ID of the file to update.
new_title: New title for the file.
new_description: New description for the file.
new_mime_type: New MIME type for the file.
new_filename: Filename of the new content to upload.
new_revision: Whether or not to create a new revision for this file.
Returns:
Updated file metadata if successful, None otherwise.
"""
try:
# First retrieve the file from the API.
file = service.files().get(fileId=file_id).execute()
# File's new metadata.
file['title'] = new_title
file['description'] = new_description
file['mimeType'] = new_mime_type
# File's new content.
media_body = MediaFileUpload(
new_filename, mimetype=new_mime_type, resumable=True)
# Send the request to the API.
updated_file = service.files().update(
fileId=file_id,
body=file,
newRevision=new_revision,
media_body=media_body).execute()
return updated_file
except errors.HttpError, error:
print 'An error occurred: %s' % error
return None
Link the example: https://developers.google.com/drive/api/v2/reference/files/update#examples

Upload image to Firebase Storage from django models in FileField

I need to upload an image to Firebase Storage, I'm thinking do it with post_save signal or with the save method. But since Firebase is pure JS, how can I do it in the models.py? Here is the reference of how upload with Firebase Web:
https://firebase.google.com/docs/storage/web/upload-files
You're going to want to use google-cloud-storage for this:
# Import
from google.cloud import storage
# Initialize
client = storage.Client()
bucket = client.get_bucket('bucket-id-here')
# Download
blob = bucket.get_blob('remote/path/to/file.txt')
print(blob.download_as_string())
# Upload
blob2 = bucket.blob('remote/path/storage.txt')
blob2.upload_from_filename(filename='/local/path.txt')

Python Google Drive API : uploaded image is shown inside a doc

I am using the below code to upload an image to google drive from my python app:
import logging
from django.core.management.base import BaseCommand
from apiclient.discovery import build
from apiclient.http import MediaFileUpload
import httplib2
from gdoauth2.models import DriveCredential
class Command(BaseCommand):
def handle(self, *args, **options):
credential = DriveCredential.objects.latest('id').credential
http = credential.authorize(httplib2.Http())
service = build('drive', 'v2', http=http)
mime_type = 'image/jpg'
filename = '/<path>/test.jpg'
logging.info('uploading %s' % filename)
media_body = MediaFileUpload(
filename, mimetype=mime_type, resumable=True)
upload = service.files().insert(
body=dict(title='test.jpg', mimeType=mime_type),
media_body=media_body, convert=True).execute()
After uploading i can see the image as being inserted to a doc file with name 'test.jpg' instead of viewing it as an exact image file in my google drive. How can I upload an image as an exact image file in google drive. Also please help me to upload an image from a url.
change the last line to:
upload = service.files().insert(
body=dict(title='test.jpg', mimeType=mime_type),
media_body=media_body, convert=False).execute()
convert=True will perform OCR on the image and save the image and OCR text to a Google Doc. It sounds like that's not what you want.