Skip to content

Commit 744081c

Browse files
committed
flake8 happy
1 parent e9cf869 commit 744081c

File tree

4 files changed

+25
-23
lines changed

4 files changed

+25
-23
lines changed

backup.py

Lines changed: 3 additions & 2 deletions
Original file line numberDiff line numberDiff line change
@@ -1,7 +1,6 @@
11
from fileupload import FileUpload
22
from inventory import Inventory
33
import boto3
4-
import botocore
54
import cli
65
import json
76
import yaml
@@ -10,6 +9,7 @@
109
config = yaml.safe_load(open('config.yaml'))
1110
client = boto3.client('glacier')
1211

12+
1313
def upload_file(filePath):
1414
file_upload = FileUpload(config['vaultName'], filePath)
1515
cli.pp(file_upload.upload(client))
@@ -50,7 +50,8 @@ def sync(vaultName):
5050

5151
# perform_inventory(config['vaultName'])
5252
# list_jobs(config['vaultName'])
53-
# fetch_inventory(config['vaultName'], '-YDD4AVvtcn6rn7zEYz8SF2HzNdLqqIhRnduONtSTz40jOBfAvIvycrfGJNijSefJHDS8D8A9tOCNxv6akFckF81Z493')
53+
# fetch_inventory(config['vaultName'],
54+
# '-YDD4AVvtcn6rn7zEYz8SF2HzNdLqqIhRnduONtSTz40jOBfAvIvycrfGJNijSefJHDS8D8A9tOCNxv6akFckF81Z493')
5455

5556
upload_file(config['filePath'])
5657

cli.py

Lines changed: 3 additions & 1 deletion
Original file line numberDiff line numberDiff line change
@@ -3,9 +3,11 @@
33
import sys
44
import time
55

6+
67
def get_console_dimensions():
78
return os.popen('stty size', 'r').read().split()
89

10+
911
def format_timespan(seconds):
1012
m, s = divmod(seconds, 60)
1113
h, m = divmod(m, 60)
@@ -14,7 +16,7 @@ def format_timespan(seconds):
1416

1517
def format_filesize(bytes, decimal_digits=1):
1618
format_str = "%." + str(decimal_digits) + "f %sB"
17-
for unit in ['','K','M','G','T','P','E','Z']:
19+
for unit in ['', 'K', 'M', 'G', 'T', 'P', 'E', 'Z']:
1820
if abs(bytes) < 1024.0:
1921
return format_str % (bytes, unit)
2022
bytes /= 1024.0

fileupload.py

Lines changed: 14 additions & 15 deletions
Original file line numberDiff line numberDiff line change
@@ -4,6 +4,7 @@
44
import os
55
import time
66

7+
78
class FileUpload:
89

910
def __init__(self, vaultName, filePath):
@@ -14,19 +15,16 @@ def __init__(self, vaultName, filePath):
1415
self._partSize = get_best_part_size(self._fileSizeBytes)
1516
self._partNumUploading = 0
1617

17-
1818
def formattedFileSize(self):
1919
if not hasattr(self, '_formattedFileSize'):
2020
self._formattedFileSize = cli.format_filesize(self._fileSizeBytes)
2121
return self._formattedFileSize
2222

23-
2423
def formattedPartSize(self):
2524
if not hasattr(self, '_formattedPartSize'):
2625
self._formattedPartSize = cli.format_filesize(self._partSize, 0)
2726
return self._formattedPartSize
2827

29-
3028
def upload(self, client):
3129

3230
self._upload = client.initiate_multipart_upload(
@@ -59,31 +57,31 @@ def upload(self, client):
5957
checksum=treehash.hexdigest())
6058

6159
cli.cli_progress(self._fileName,
62-
self.formattedFileSize(),
63-
self.formattedPartSize(),
64-
self._startTime,
65-
self._fileSizeBytes-1,
66-
self._fileSizeBytes-1)
60+
self.formattedFileSize(),
61+
self.formattedPartSize(),
62+
self._startTime,
63+
self._fileSizeBytes-1,
64+
self._fileSizeBytes-1)
6765

6866
return response
6967

70-
7168
def _upload_part(self,
7269
client,
7370
part,
7471
partBegin,
7572
partEnd):
7673

7774
cli.cli_progress(self._fileName,
78-
self.formattedFileSize(),
79-
self.formattedPartSize(),
80-
self._startTime,
81-
partBegin,
82-
self._fileSizeBytes-1)
75+
self.formattedFileSize(),
76+
self.formattedPartSize(),
77+
self._startTime,
78+
partBegin,
79+
self._fileSizeBytes-1)
8380

8481
for upload_attempt in range(0, 2):
8582
# print 'Uploading bytes %d through %d (%d%%)...' % (
86-
# partBegin, partEnd, float(partEnd)/(self._fileSizeBytes-1)*100)
83+
# partBegin, partEnd,
84+
# float(partEnd)/(self._fileSizeBytes-1)*100)
8785
try:
8886
response = client.upload_multipart_part(
8987
vaultName=self._vaultName,
@@ -97,6 +95,7 @@ def _upload_part(self,
9795

9896
print "\nFAILED"
9997

98+
10099
def get_best_part_size(fileSizeBytes):
101100
# We want the smallest possible part size. Maximum parts is 10,000.
102101
# So we find the first part size larger than file_len/10,000.

inventory.py

Lines changed: 5 additions & 5 deletions
Original file line numberDiff line numberDiff line change
@@ -2,6 +2,7 @@
22
import os
33
from enum import Enum
44

5+
56
class Inventory:
67

78
_FILENAME = 'pyback_inventory.json'
@@ -18,21 +19,18 @@ def __init__(self, dir_path):
1819
contents_list = os.listdir(dir_path)
1920
for entry in contents_list:
2021
if (os.path.isfile(os.path.join(dir_path, entry))
21-
and entry not in self._entries):
22+
and entry not in self._entries):
2223
self._entries[entry] = {
2324
u'state': FileState.NEW
2425
}
2526

26-
2727
def save(self):
2828
with open(self.file_path, 'w') as outfile:
2929
json.dump(self._entries, outfile, cls=EnumEncoder)
3030

31-
3231
def set_state(self, fileName, new_state):
3332
self._entries[fileName]['state'] = new_state
3433

35-
3634
def get_state(self, fileName):
3735
return self._entries[fileName]['state']
3836

@@ -42,7 +40,8 @@ class FileState(Enum):
4240
NEW = 0
4341
IN_PROGRESS = 1
4442
UPLOADED = 2
45-
VERIFIED = 3 # Appears in an inventory
43+
VERIFIED = 3 # Appears in an inventory
44+
4645

4746
# Stolen from
4847
# http://stackoverflow.com/questions/24481852/serialising-an-enum-member-to-json
@@ -52,6 +51,7 @@ def default(self, obj):
5251
return {"__enum__": str(obj)}
5352
return json.JSONEncoder.default(self, obj)
5453

54+
5555
def as_enum(d):
5656
if "__enum__" in d:
5757
name, member = d["__enum__"].split(".")

0 commit comments

Comments
 (0)