API details.

class S3[source]

S3()

save&load files with extra args

special headers

class ExtraArgs[source]

ExtraArgs()

save and load gz

key = 'test'
path = './CONTRIBUTING.md'
S3.saveFile(key=key,path=path,bucket=bucket,
            ExtraArgs = {**ExtraArgs.gzip, **ExtraArgs.publicRead})

Presign

test save gz

Gzip options

saveGz[source]

saveGz(bucket, key, item, extraArgs={'ContentType': 'application/json', 'ContentEncoding': 'gzip'}, path='/tmp/tmp.gz', **kwargs)

saveGz(cls, bucket, key, item, extraArgs = {ExtraArgs.gzip}, path = '/tmp/tmp.gz',kwargs) bucket:str bucket key:str key extraArgs: dict: args to pass to s3 path: str: tmp path for cache

loadGz[source]

loadGz(bucket, key, path='/tmp/test')

loadGz(clas, bucket, key, path = '/tmp/test') bucket:str: s3 bucket name key:str: s3 key extraArgs:dict: extra args to pass to s3 path:str: temp path for caching

item = {'test':'test'}
S3.saveGz(bucket=bucket,key = key, item=item)
S3.loadGz(bucket=bucket,key=key)
{'test': 'test'}

presign class

class Presign[source]

Presign()

presign upload example

p = S3.presignUpload(bucket, key='test', fields = {**ExtraArgs.png}, conditions = [["starts-with", "$Content-Type", ""]])
print(p)
r = Presign.upload(p, b'hello', key = 'test')
r.content
{'url': 'https://pybz-test.s3-accelerate.amazonaws.com/', 'fields': {'Content-Type': 'image/png', 'key': 'test', 'AWSAccessKeyId': 'AKIAVX4Z5TKDSNNNULGB', 'policy': 'eyJleHBpcmF0aW9uIjogIjIwMjEtMDQtMDZUMDk6NTY6MDFaIiwgImNvbmRpdGlvbnMiOiBbWyJzdGFydHMtd2l0aCIsICIkQ29udGVudC1UeXBlIiwgIiJdLCB7ImJ1Y2tldCI6ICJweWJ6LXRlc3QifSwgeyJrZXkiOiAidGVzdCJ9XX0=', 'signature': 'pbtwCLfTa5+47nnbZLZv1gaWOcI='}}
b''
S3.loadFile('test', bucket=bucket, path = '/tmp/test')
with open('/tmp/test', 'r') as f:
  item = f.read()
print(f'bucket {bucket}, has item {item}')
bucket pybz-test, has item hello
from io import BytesIO
p = S3.presignUpload(bucket, key='test')
url = p['url']
fields = p['fields']
bio = BytesIO(b'hello1')
files = {'file': ('test1', bio)}
r = requests.post(url, data=fields, files= files)

generalSave[source]

generalSave(key, objectToSave:dict, bucket='', compressor=<lambda>, encoder=<lambda>, **kwargs)

save a file to s3

generalLoad[source]

generalLoad(key, bucket='', fileName='/tmp/tempFile.bz', decompressor=<lambda>, decoder=<lambda>, useUrl=False, **kwargs)

load file from s3

saveZl[source]

saveZl(key, objectToSave:dict, bucket='', **kwargs)

save a file to s3

loadZl[source]

loadZl(key, bucket='', fileName='/tmp/tempFile.bz', **kwargs)

load file from s3

savePklZl[source]

savePklZl(key, objectToSave:dict, bucket='', **kwargs)

save a file to s3

loadPklZl[source]

loadPklZl(key, bucket='', fileName='/tmp/tempFile.bz', **kwargs)

load file from s3

saveRaw[source]

saveRaw(key, objectToSave, bucket='', **kwargs)

save a file to s3

loadRaw[source]

loadRaw(key, bucket='', fileName='/tmp/tempFile.bz', **kwargs)

load file from s3

import requests
print(bucket)
# sampleDict = {'hello':'world'}
%time S3.save(key,sampleDict,bucket)
%time S3.load(key,bucket, useUrl = True)
%time S3.saveZl(key,sampleDict,bucket)
%time S3.loadZl(key,bucket, useUrl = True)
%time S3.savePklZl(key,sampleDict,bucket)
%time r = S3.loadPklZl(key,bucket, useUrl = True)
%time S3.saveRaw(key,sampleDict,bucket)
%time r = S3.loadRaw(key,bucket, useUrl = True)
%time url = S3.presign(key, bucket=bucket, checkExist=False)
%time r = requests.get(url)

class Requests[source]

Requests()

for uploading and downloading contents from url

import pandas as pd
df = pd.DataFrame({'test':[1,2,3,4,5],'test2':[2,3,4,5,6]})
S3.saveDataFrame(bucket,key,df)
S3.loadDataFrame(bucket,key)
%time S3.presign(key='allData', bucket = 'product-bucket-dev-manual')
%time S3.presign(key='allData', bucket = 'product-bucket-dev-manual', checkExist=False)
%timeit S3.exist(key='allData', bucket = 'product-bucket-dev-manual')
%%time
import boto3
from botocore.errorfactory import ClientError

s3 = boto3.client('s3')
try:
    s3.head_object(Bucket='product-bucket-dev-manual', Key='allData')
except ClientError:
    # Not found
    pass

createBucket[source]

createBucket(bucket:str, **kwargs)

deleteBucket[source]

deleteBucket(bucket:str, **kwargs)