2222
2323# declare objectStorageManager
2424object_storage_manager = tc .objectStorageManager (endpoint = '<http/https>://<object_storage_endpoint>:<port>' ,
25- access_key = '<user>' ,
26- secret_key = '<password>' )
25+ access_key = '<user>' ,
26+ secret_key = '<password>' )
2727
2828# Upload test-file.txt to python-test-bucket/output/example.txt
2929object_storage_manager .upload_file (bucket_name = 'python-test-bucket' ,
30- destination_file = '/output/example.txt' ,
31- source_file = "test-file.txt" )
30+ destination_file = '/output/example.txt' ,
31+ source_file = "test-file.txt" )
3232
3333# Retrieve example.txt and apply print method to each 3 bytes
3434object_storage_manager .process_file (bucket_name = 'python-test-bucket' ,
35- file = '/output/example.txt' ,
36- processing_method = print ,
37- chunk_size = 3 )
35+ file = '/output/example.txt' ,
36+ processing_method = print ,
37+ chunk_size = 3 )
3838
3939# Custom method that writes the file chunks in a CSV (he receives and writes bytes)
4040def customCSVProcessingMethod (file_chunk ):
@@ -44,11 +44,11 @@ def customCSVProcessingMethod(file_chunk):
4444
4545# Upload CSV
4646object_storage_manager .upload_file (bucket_name = 'python-test-bucket' ,
47- destination_file = '/output/reallyBigFile.csv' ,
48- source_file = "movimientos_padronales_20250822_v2.csv" )
47+ destination_file = '/output/reallyBigFile.csv' ,
48+ source_file = "movimientos_padronales_20250822_v2.csv" )
4949
5050# Retrieve reallyBigFile.csv and apply customCSVProcessingMethod method to each 1000000 bytes
5151object_storage_manager .process_file (bucket_name = 'python-test-bucket' ,
52- file = '/output/reallyBigFile.csv' ,
53- processing_method = customCSVProcessingMethod ,
54- chunk_size = 1000000 )
52+ file = '/output/reallyBigFile.csv' ,
53+ processing_method = customCSVProcessingMethod ,
54+ chunk_size = 1000000 )
0 commit comments