improve backward compatibility and fix minor bugs
This commit is contained in:
parent
cb8bc31517
commit
0097e71e29
|
@ -148,7 +148,9 @@ file_size = os.stat(fn).st_size
|
|||
print(f"[init:video] File size: {file_size} bytes (~{round(file_size/1024/1024, 2)} MB)")
|
||||
|
||||
print("[init:video] Computing checksum... (might take some time)")
|
||||
file_hash = hashlib.file_digest(open(fn, 'rb'), 'sha256').hexdigest()
|
||||
with open(fn, 'rb') as f:
|
||||
file_hash = hashlib.sha256(f.read()).hexdigest()
|
||||
#file_hash = hashlib.file_digest(open(fn, 'rb'), 'sha256').hexdigest()
|
||||
print(f"[init:video] Checksum for the file: {file_hash}")
|
||||
|
||||
total_chunk_amount = math.ceil(duration / chunk_length)
|
||||
|
@ -222,19 +224,18 @@ def connection_processor(conn, addr):
|
|||
|
||||
print(f"[{addr[0]}:{addr[1]} - parser] Received command: {' '.join(header)}")
|
||||
|
||||
match header[0]:
|
||||
case "DEBUG":
|
||||
if header[0] == "DEBUG":
|
||||
with lock:
|
||||
conn.send(f"DEBUG\nCHUNKS {str(chunk_queue)}\n".encode("UTF-8"))
|
||||
case "HASH":
|
||||
elif header[0] == "HASH":
|
||||
conn.send(f"HASH {file_hash}\n".encode("UTF-8"))
|
||||
case "SIZE":
|
||||
elif header[0] == "SIZE":
|
||||
conn.send(f"SIZE {file_size}\n".encode("UTF-8"))
|
||||
case "URL":
|
||||
elif header[0] == "URL":
|
||||
conn.send(f"URL {video_url}\n".encode("UTF-8"))
|
||||
case "SCRIPT":
|
||||
elif header[0] == "SCRIPT":
|
||||
conn.send(f"SCRIPT {script_url}\n".encode("UTF-8"))
|
||||
case "NEXT":
|
||||
elif header[0] == "NEXT":
|
||||
with lock:
|
||||
next_chunk = chunk_queue.next_wanted()
|
||||
if next_chunk:
|
||||
|
@ -248,8 +249,8 @@ def connection_processor(conn, addr):
|
|||
if chunk_queue.processing_finished():
|
||||
conn.send("FINISH\n".encode("UTF-8"))
|
||||
else:
|
||||
conn.send(f"WAIT {CHUNK_CHECK_INTERVAL}")
|
||||
case "PING":
|
||||
conn.send(f"WAIT {CHUNK_CHECK_INTERVAL}\n".encode("UTF-8"))
|
||||
elif header[0] == "PING":
|
||||
if file_hash != header[1]:
|
||||
print(f"[{addr[0]}:{addr[1]} - PING] Hash mismatch: expected {file_hash}, got {header[1]}")
|
||||
conn.send("RESET\n".encode("UTF-8"))
|
||||
|
@ -302,7 +303,7 @@ def connection_processor(conn, addr):
|
|||
status = "waiting"
|
||||
|
||||
conn.send(f"PONG {status}\n".encode("UTF-8"))
|
||||
case "UPLOAD":
|
||||
elif header[0] == "UPLOAD":
|
||||
if file_hash != header[1]:
|
||||
print(f"[{addr[0]}:{addr[1]} - UPLOAD] Hash mismatch: expected {file_hash}, got {header[1]}")
|
||||
conn.send("RESET\n".encode("UTF-8"))
|
||||
|
@ -377,7 +378,7 @@ def connection_processor(conn, addr):
|
|||
|
||||
open("ready.txt", "a").write(f"{header[2]}\n")
|
||||
chunk.done = True
|
||||
case "REUPLOAD":
|
||||
elif header[0] == "REUPLOAD":
|
||||
if file_hash != header[1]:
|
||||
print(f"[{addr[0]}:{addr[1]} - UPLOAD] Hash mismatch: expected {file_hash}, got {header[1]}")
|
||||
conn.send("RESET\n".encode("UTF-8"))
|
||||
|
@ -457,7 +458,7 @@ def connection_processor(conn, addr):
|
|||
|
||||
open("ready.txt", "a").write(f"{header[2]}\n")
|
||||
chunk.done = True
|
||||
case "ABORT":
|
||||
elif header[0] == "ABORT":
|
||||
if file_hash != header[1]:
|
||||
print(f"[{addr[0]}:{addr[1]} - ABORT] Hash mismatch: expected {file_hash}, got {header[1]}")
|
||||
conn.send("RESET\n".encode("UTF-8"))
|
||||
|
@ -505,7 +506,7 @@ def connection_processor(conn, addr):
|
|||
print(f"[{addr[0]}:{addr[1]} - ABORT] Removing worker {worker.id} from chunk {chunk.seq_id}, leaving chunk with {len(chunk.assigned_workers)} workers")
|
||||
chunk.remove_worker(worker)
|
||||
print(f"[{addr[0]}:{addr[1]} - ABORT] Updated chunk {chunk.seq_id}, it is now \"{chunk.status()}\"")
|
||||
case _:
|
||||
else:
|
||||
conn.send("UNSUPPORTED\n".encode("UTF-8"))
|
||||
|
||||
|
||||
|
|
Loading…
Reference in New Issue