...
| Code Block | ||
|---|---|---|
| ||
import os import io import boto3 from botocore.session import Session from botocore.handlers import validate_bucket_name #Initializing some values project_id = 'myprojectID123' #Fill this in your own bucketname = 'mybucket123MyFancyBucket123' #Fill this in your own bucketname = project_id + ':' + bucketname access_key = '123asdf' #Fill this in secret_access_key = '123asdf111' #Fill this in #This is a neat trick to allow us to specify our bucket name in terms of ProjectID:bucketname botocore_session = Session() botocore_session.unregister('before-parameter-build.s3', validate_bucket_name) boto3.setup_default_session(botocore_session = botocore_session) #Initialize the S3 client s3 = boto3.client('s3', endpoint_url='https://s3.waw3-1.cloudferro.com', aws_access_key_id = 'MyAccessKey'access_key, aws_secret_access_key = 'MySecretAccessKey'secret_access_key) #List all the objects in the bucket response = s3.list_objects(Bucket=bucketname) for item in response['Contents']: print(item['Key']) #Read a file into Python's memory and open it as a string filename = '/folder1/folder2/myfile.txt' #Fill this in obj = s3.get_object(Bucket=bucketname, Key=filename) myObject = obj['Body'].read().decode('utf-8') print(myObject) #Downloading a file from the bucket with open('myfile', 'wb') as f: #Fill this in s3.download_fileobj(bucketname, 'myfile.txt', f) #Uploading a file to the bucket (make sure you have write access) response = s3.upload_file('myfile', bucketname, 'myfile') #Fill this in result = s3.list_objects(Bucket = bucket, Prefix='/') for o in result.get('Contents'): data = s3.get_object(Bucket=bucket, Key=o.get('Key')) contents = data['Body'].read() print(contents.decode("utf-8")) |
Check out a full code example at the official boto3 website:
...