remove the part of the code that reads the file at once if enough ram is available

based on suggestions from @prusnak i removed the part of the code that checks whether the user had enough ram to read the entire model at once. the file is now always read in chunks.
This commit is contained in:
KASR 2023-04-27 16:55:39 +02:00 committed by GitHub
parent 24317a510e
commit 6ddce362ef
No known key found for this signature in database
GPG key ID: 4AEE18F83AFDEB23

View file

@ -1,33 +1,20 @@
import os import os
import hashlib import hashlib
import psutil
def sha256sum(file): def sha256sum(file):
# Check if system has enough free RAM to read the file at once block_size = 16 * 1024 * 1024 # 16 MB block size
file_size = os.path.getsize(file) b = bytearray(block_size)
available_memory = psutil.virtual_memory().available file_hash = hashlib.sha256()
mv = memoryview(b)
if file_size < available_memory: with open(file, 'rb', buffering=0) as f:
# Read the file at once while True:
with open(file, "rb") as f: n = f.readinto(mv)
file_hash = hashlib.sha256(f.read()) if not n:
break
else: file_hash.update(mv[:n])
# Read the file in chunks
block_size = 16 * 1024 * 1024 # 16 MB block size
b = bytearray(block_size)
file_hash = hashlib.sha256()
mv = memoryview(b)
with open(file, 'rb', buffering=0) as f:
while True:
n = f.readinto(mv)
if not n:
break
file_hash.update(mv[:n])
return file_hash.hexdigest() return file_hash.hexdigest()
# Define the path to the llama directory (parent folder of script directory) # Define the path to the llama directory (parent folder of script directory)
llama_path = os.path.abspath(os.path.join(os.path.dirname(__file__), os.pardir)) llama_path = os.path.abspath(os.path.join(os.path.dirname(__file__), os.pardir))