Book a Demo!
CoCalc Logo Icon
StoreFeaturesDocsShareSupportNewsAboutPoliciesSign UpSign In
iperov
GitHub Repository: iperov/deepfacelab
Path: blob/master/samplelib/PackedFaceset.py
628 views
1
import pickle
2
import shutil
3
import struct
4
from pathlib import Path
5
6
import samplelib.SampleLoader
7
from core.interact import interact as io
8
from samplelib import Sample
9
from core import pathex
10
11
packed_faceset_filename = 'faceset.pak'
12
13
class PackedFaceset():
14
VERSION = 1
15
16
@staticmethod
17
def pack(samples_path):
18
samples_dat_path = samples_path / packed_faceset_filename
19
20
if samples_dat_path.exists():
21
io.log_info(f"{samples_dat_path} : file already exists !")
22
io.input("Press enter to continue and overwrite.")
23
24
as_person_faceset = False
25
dir_names = pathex.get_all_dir_names(samples_path)
26
if len(dir_names) != 0:
27
as_person_faceset = io.input_bool(f"{len(dir_names)} subdirectories found, process as person faceset?", True)
28
29
if as_person_faceset:
30
image_paths = []
31
32
for dir_name in dir_names:
33
image_paths += pathex.get_image_paths(samples_path / dir_name)
34
else:
35
image_paths = pathex.get_image_paths(samples_path)
36
37
samples = samplelib.SampleLoader.load_face_samples(image_paths)
38
samples_len = len(samples)
39
40
samples_configs = []
41
for sample in io.progress_bar_generator (samples, "Processing"):
42
sample_filepath = Path(sample.filename)
43
sample.filename = sample_filepath.name
44
45
if as_person_faceset:
46
sample.person_name = sample_filepath.parent.name
47
samples_configs.append ( sample.get_config() )
48
samples_bytes = pickle.dumps(samples_configs, 4)
49
50
of = open(samples_dat_path, "wb")
51
of.write ( struct.pack ("Q", PackedFaceset.VERSION ) )
52
of.write ( struct.pack ("Q", len(samples_bytes) ) )
53
of.write ( samples_bytes )
54
55
del samples_bytes #just free mem
56
del samples_configs
57
58
sample_data_table_offset = of.tell()
59
of.write ( bytes( 8*(samples_len+1) ) ) #sample data offset table
60
61
data_start_offset = of.tell()
62
offsets = []
63
64
for sample in io.progress_bar_generator(samples, "Packing"):
65
try:
66
if sample.person_name is not None:
67
sample_path = samples_path / sample.person_name / sample.filename
68
else:
69
sample_path = samples_path / sample.filename
70
71
72
with open(sample_path, "rb") as f:
73
b = f.read()
74
75
offsets.append ( of.tell() - data_start_offset )
76
of.write(b)
77
except:
78
raise Exception(f"error while processing sample {sample_path}")
79
80
offsets.append ( of.tell() )
81
82
of.seek(sample_data_table_offset, 0)
83
for offset in offsets:
84
of.write ( struct.pack("Q", offset) )
85
of.seek(0,2)
86
of.close()
87
88
if io.input_bool(f"Delete original files?", True):
89
for filename in io.progress_bar_generator(image_paths, "Deleting files"):
90
Path(filename).unlink()
91
92
if as_person_faceset:
93
for dir_name in io.progress_bar_generator(dir_names, "Deleting dirs"):
94
dir_path = samples_path / dir_name
95
try:
96
shutil.rmtree(dir_path)
97
except:
98
io.log_info (f"unable to remove: {dir_path} ")
99
100
@staticmethod
101
def unpack(samples_path):
102
samples_dat_path = samples_path / packed_faceset_filename
103
if not samples_dat_path.exists():
104
io.log_info(f"{samples_dat_path} : file not found.")
105
return
106
107
samples = PackedFaceset.load(samples_path)
108
109
for sample in io.progress_bar_generator(samples, "Unpacking"):
110
person_name = sample.person_name
111
if person_name is not None:
112
person_path = samples_path / person_name
113
person_path.mkdir(parents=True, exist_ok=True)
114
115
target_filepath = person_path / sample.filename
116
else:
117
target_filepath = samples_path / sample.filename
118
119
with open(target_filepath, "wb") as f:
120
f.write( sample.read_raw_file() )
121
122
samples_dat_path.unlink()
123
124
@staticmethod
125
def path_contains(samples_path):
126
samples_dat_path = samples_path / packed_faceset_filename
127
return samples_dat_path.exists()
128
129
@staticmethod
130
def load(samples_path):
131
samples_dat_path = samples_path / packed_faceset_filename
132
if not samples_dat_path.exists():
133
return None
134
135
f = open(samples_dat_path, "rb")
136
version, = struct.unpack("Q", f.read(8) )
137
if version != PackedFaceset.VERSION:
138
raise NotImplementedError
139
140
sizeof_samples_bytes, = struct.unpack("Q", f.read(8) )
141
142
samples_configs = pickle.loads ( f.read(sizeof_samples_bytes) )
143
samples = []
144
for sample_config in samples_configs:
145
sample_config = pickle.loads(pickle.dumps (sample_config))
146
samples.append ( Sample (**sample_config) )
147
148
offsets = [ struct.unpack("Q", f.read(8) )[0] for _ in range(len(samples)+1) ]
149
data_start_offset = f.tell()
150
f.close()
151
152
for i, sample in enumerate(samples):
153
start_offset, end_offset = offsets[i], offsets[i+1]
154
sample.set_filename_offset_size( str(samples_dat_path), data_start_offset+start_offset, end_offset-start_offset )
155
156
return samples
157
158