Register
Login
Resources
Docs Blog Datasets Glossary Case Studies Tutorials & Webinars
Product
Data Engine LLMs Platform Enterprise
Pricing Explore
Connect to our Discord channel

PackedFaceset.py 5.4 KB

You have to be logged in to leave a comment. Sign In
1
2
3
4
5
6
7
8
9
10
11
12
13
14
15
16
17
18
19
20
21
22
23
24
25
26
27
28
29
30
31
32
33
34
35
36
37
38
39
40
41
42
43
44
45
46
47
48
49
50
51
52
53
54
55
56
57
58
59
60
61
62
63
64
65
66
67
68
69
70
71
72
73
74
75
76
77
78
79
80
81
82
83
84
85
86
87
88
89
90
91
92
93
94
95
96
97
98
99
100
101
102
103
104
105
106
107
108
109
110
111
112
113
114
115
116
117
118
119
120
121
122
123
124
125
126
127
128
129
130
131
132
133
134
135
136
137
138
139
140
141
142
143
144
145
146
147
148
149
150
151
152
153
154
155
156
  1. import pickle
  2. import shutil
  3. import struct
  4. from pathlib import Path
  5. import samplelib.SampleLoader
  6. from core.interact import interact as io
  7. from samplelib import Sample
  8. from core import pathex
  9. packed_faceset_filename = 'faceset.pak'
  10. class PackedFaceset():
  11. VERSION = 1
  12. @staticmethod
  13. def pack(samples_path):
  14. samples_dat_path = samples_path / packed_faceset_filename
  15. if samples_dat_path.exists():
  16. io.log_info(f"{samples_dat_path} : file already exists !")
  17. io.input("Press enter to continue and overwrite.")
  18. as_person_faceset = False
  19. dir_names = pathex.get_all_dir_names(samples_path)
  20. if len(dir_names) != 0:
  21. as_person_faceset = io.input_bool(f"{len(dir_names)} subdirectories found, process as person faceset?", True)
  22. if as_person_faceset:
  23. image_paths = []
  24. for dir_name in dir_names:
  25. image_paths += pathex.get_image_paths(samples_path / dir_name)
  26. else:
  27. image_paths = pathex.get_image_paths(samples_path)
  28. samples = samplelib.SampleLoader.load_face_samples(image_paths)
  29. samples_len = len(samples)
  30. samples_configs = []
  31. for sample in io.progress_bar_generator (samples, "Processing"):
  32. sample_filepath = Path(sample.filename)
  33. sample.filename = sample_filepath.name
  34. if as_person_faceset:
  35. sample.person_name = sample_filepath.parent.name
  36. samples_configs.append ( sample.get_config() )
  37. samples_bytes = pickle.dumps(samples_configs, 4)
  38. of = open(samples_dat_path, "wb")
  39. of.write ( struct.pack ("Q", PackedFaceset.VERSION ) )
  40. of.write ( struct.pack ("Q", len(samples_bytes) ) )
  41. of.write ( samples_bytes )
  42. del samples_bytes #just free mem
  43. del samples_configs
  44. sample_data_table_offset = of.tell()
  45. of.write ( bytes( 8*(samples_len+1) ) ) #sample data offset table
  46. data_start_offset = of.tell()
  47. offsets = []
  48. for sample in io.progress_bar_generator(samples, "Packing"):
  49. try:
  50. if sample.person_name is not None:
  51. sample_path = samples_path / sample.person_name / sample.filename
  52. else:
  53. sample_path = samples_path / sample.filename
  54. with open(sample_path, "rb") as f:
  55. b = f.read()
  56. offsets.append ( of.tell() - data_start_offset )
  57. of.write(b)
  58. except:
  59. raise Exception(f"error while processing sample {sample_path}")
  60. offsets.append ( of.tell() )
  61. of.seek(sample_data_table_offset, 0)
  62. for offset in offsets:
  63. of.write ( struct.pack("Q", offset) )
  64. of.seek(0,2)
  65. of.close()
  66. if io.input_bool(f"Delete original files?", True):
  67. for filename in io.progress_bar_generator(image_paths, "Deleting files"):
  68. Path(filename).unlink()
  69. if as_person_faceset:
  70. for dir_name in io.progress_bar_generator(dir_names, "Deleting dirs"):
  71. dir_path = samples_path / dir_name
  72. try:
  73. shutil.rmtree(dir_path)
  74. except:
  75. io.log_info (f"unable to remove: {dir_path} ")
  76. @staticmethod
  77. def unpack(samples_path):
  78. samples_dat_path = samples_path / packed_faceset_filename
  79. if not samples_dat_path.exists():
  80. io.log_info(f"{samples_dat_path} : file not found.")
  81. return
  82. samples = PackedFaceset.load(samples_path)
  83. for sample in io.progress_bar_generator(samples, "Unpacking"):
  84. person_name = sample.person_name
  85. if person_name is not None:
  86. person_path = samples_path / person_name
  87. person_path.mkdir(parents=True, exist_ok=True)
  88. target_filepath = person_path / sample.filename
  89. else:
  90. target_filepath = samples_path / sample.filename
  91. with open(target_filepath, "wb") as f:
  92. f.write( sample.read_raw_file() )
  93. samples_dat_path.unlink()
  94. @staticmethod
  95. def path_contains(samples_path):
  96. samples_dat_path = samples_path / packed_faceset_filename
  97. return samples_dat_path.exists()
  98. @staticmethod
  99. def load(samples_path):
  100. samples_dat_path = samples_path / packed_faceset_filename
  101. if not samples_dat_path.exists():
  102. return None
  103. f = open(samples_dat_path, "rb")
  104. version, = struct.unpack("Q", f.read(8) )
  105. if version != PackedFaceset.VERSION:
  106. raise NotImplementedError
  107. sizeof_samples_bytes, = struct.unpack("Q", f.read(8) )
  108. samples_configs = pickle.loads ( f.read(sizeof_samples_bytes) )
  109. samples = []
  110. for sample_config in samples_configs:
  111. sample_config = pickle.loads(pickle.dumps (sample_config))
  112. samples.append ( Sample (**sample_config) )
  113. offsets = [ struct.unpack("Q", f.read(8) )[0] for _ in range(len(samples)+1) ]
  114. data_start_offset = f.tell()
  115. f.close()
  116. for i, sample in enumerate(samples):
  117. start_offset, end_offset = offsets[i], offsets[i+1]
  118. sample.set_filename_offset_size( str(samples_dat_path), data_start_offset+start_offset, end_offset-start_offset )
  119. return samples
Tip!

Press p or to see the previous file or, n or to see the next file

Comments

Loading...