Use more with statements

This commit is contained in:
flan 2017-11-04 18:07:50 +01:00
parent 1677ed38b9
commit 0e5bbf4f9e
2 changed files with 32 additions and 40 deletions

View File

@ -179,7 +179,8 @@ class SyncMediaHandler(anki.sync.MediaSyncer):
file_path = os.path.join(self.col.media.dir(), filename)
# Save file to media directory.
open(file_path, 'wb').write(file_data)
with open(file_path, 'wb') as f:
f.write(file_data)
mtime = self.col.media._mtime(file_path)
media_to_add.append((filename, csum, mtime, 0))
@ -242,18 +243,17 @@ class SyncMediaHandler(anki.sync.MediaSyncer):
cnt = 0
sz = 0
f = BytesIO()
z = zipfile.ZipFile(f, "w", compression=zipfile.ZIP_DEFLATED)
for fname in files:
z.write(os.path.join(self.col.media.dir(), fname), str(cnt))
flist[str(cnt)] = fname
sz += os.path.getsize(os.path.join(self.col.media.dir(), fname))
if sz > SYNC_ZIP_SIZE or cnt > SYNC_ZIP_COUNT:
break
cnt += 1
with zipfile.ZipFile(f, "w", compression=zipfile.ZIP_DEFLATED) as z:
for fname in files:
z.write(os.path.join(self.col.media.dir(), fname), str(cnt))
flist[str(cnt)] = fname
sz += os.path.getsize(os.path.join(self.col.media.dir(), fname))
if sz > SYNC_ZIP_SIZE or cnt > SYNC_ZIP_COUNT:
break
cnt += 1
z.writestr("_meta", json.dumps(flist))
z.close()
z.writestr("_meta", json.dumps(flist))
return f.getvalue()
@ -389,9 +389,8 @@ class SyncApp:
import gzip
if compression:
buf = gzip.GzipFile(mode="rb", fileobj=BytesIO(data))
data = buf.read()
buf.close()
with gzip.GzipFile(mode="rb", fileobj=BytesIO(data)) as gz:
data = gz.read()
try:
data = json.loads(data.decode())
@ -423,11 +422,10 @@ class SyncApp:
f.write(data)
try:
test_db = anki.db.DB(temp_db_path)
if test_db.scalar("pragma integrity_check") != "ok":
raise HTTPBadRequest("Integrity check failed for uploaded "
"collection database file.")
test_db.close()
with anki.db.DB(temp_db_path) as test_db:
if test_db.scalar("pragma integrity_check") != "ok":
raise HTTPBadRequest("Integrity check failed for uploaded "
"collection database file.")
except sqlite.Error as e:
raise HTTPBadRequest("Uploaded collection database file is "
"corrupt.")

View File

@ -26,7 +26,8 @@ def create_named_file(filename, file_contents=None):
file_path = os.path.join(temp_file_parent_dir, filename)
if file_contents is not None:
open(file_path, 'w').write(file_contents)
with open(file_path, "w") as f:
f.write(file_contents)
return file_path
@ -41,30 +42,23 @@ def create_zip_with_existing_files(file_paths):
:return: the data of the created zip file
"""
file_buffer = BytesIO()
zip_file = zipfile.ZipFile(file_buffer,
'w',
compression=zipfile.ZIP_DEFLATED)
buf = BytesIO()
with zipfile.ZipFile(buf, "w", compression=zipfile.ZIP_DEFLATED) as z:
meta = []
size = 0
meta = []
sz = 0
for index, path in enumerate(file_paths):
z.write(path, str(index))
normname = unicodedata.normalize("NFC", os.path.basename(path))
meta.append((normname, str(index)))
for count, filePath in enumerate(file_paths):
zip_file.write(filePath, str(count))
normname = unicodedata.normalize(
"NFC",
os.path.basename(filePath)
)
meta.append((normname, str(count)))
size += os.path.getsize(path)
if size >= SYNC_ZIP_SIZE:
break
sz += os.path.getsize(filePath)
if sz >= SYNC_ZIP_SIZE:
break
z.writestr("_meta", json.dumps(meta))
zip_file.writestr("_meta", json.dumps(meta))
zip_file.close()
return file_buffer.getvalue()
return buf.getvalue()
def get_asset_path(relative_file_path):