API details.
key = 'test'
path = './CONTRIBUTING.md'
S3.saveFile(key=key,path=path,bucket=bucket,
ExtraArgs = {**ExtraArgs.gzip, **ExtraArgs.publicRead})
item = {'test':'test'}
S3.saveGz(bucket=bucket,key = key, item=item)
S3.loadGz(bucket=bucket,key=key)
p = S3.presignUpload(bucket, key='test', fields = {**ExtraArgs.png}, conditions = [["starts-with", "$Content-Type", ""]])
print(p)
r = Presign.upload(p, b'hello', key = 'test')
r.content
S3.loadFile('test', bucket=bucket, path = '/tmp/test')
with open('/tmp/test', 'r') as f:
item = f.read()
print(f'bucket {bucket}, has item {item}')
from io import BytesIO
p = S3.presignUpload(bucket, key='test')
url = p['url']
fields = p['fields']
bio = BytesIO(b'hello1')
files = {'file': ('test1', bio)}
r = requests.post(url, data=fields, files= files)
import requests
print(bucket)
# sampleDict = {'hello':'world'}
%time S3.save(key,sampleDict,bucket)
%time S3.load(key,bucket, useUrl = True)
%time S3.saveZl(key,sampleDict,bucket)
%time S3.loadZl(key,bucket, useUrl = True)
%time S3.savePklZl(key,sampleDict,bucket)
%time r = S3.loadPklZl(key,bucket, useUrl = True)
%time S3.saveRaw(key,sampleDict,bucket)
%time r = S3.loadRaw(key,bucket, useUrl = True)
%time url = S3.presign(key, bucket=bucket, checkExist=False)
%time r = requests.get(url)
import pandas as pd
df = pd.DataFrame({'test':[1,2,3,4,5],'test2':[2,3,4,5,6]})
S3.saveDataFrame(bucket,key,df)
S3.loadDataFrame(bucket,key)
%time S3.presign(key='allData', bucket = 'product-bucket-dev-manual')
%time S3.presign(key='allData', bucket = 'product-bucket-dev-manual', checkExist=False)
%timeit S3.exist(key='allData', bucket = 'product-bucket-dev-manual')
%%time
import boto3
from botocore.errorfactory import ClientError
s3 = boto3.client('s3')
try:
s3.head_object(Bucket='product-bucket-dev-manual', Key='allData')
except ClientError:
# Not found
pass