-
Notifications
You must be signed in to change notification settings - Fork 84
/
Copy pathexporter.py
2671 lines (2190 loc) · 112 KB
/
exporter.py
1
2
3
4
5
6
7
8
9
10
11
12
13
14
15
16
17
18
19
20
21
22
23
24
25
26
27
28
29
30
31
32
33
34
35
36
37
38
39
40
41
42
43
44
45
46
47
48
49
50
51
52
53
54
55
56
57
58
59
60
61
62
63
64
65
66
67
68
69
70
71
72
73
74
75
76
77
78
79
80
81
82
83
84
85
86
87
88
89
90
91
92
93
94
95
96
97
98
99
100
101
102
103
104
105
106
107
108
109
110
111
112
113
114
115
116
117
118
119
120
121
122
123
124
125
126
127
128
129
130
131
132
133
134
135
136
137
138
139
140
141
142
143
144
145
146
147
148
149
150
151
152
153
154
155
156
157
158
159
160
161
162
163
164
165
166
167
168
169
170
171
172
173
174
175
176
177
178
179
180
181
182
183
184
185
186
187
188
189
190
191
192
193
194
195
196
197
198
199
200
201
202
203
204
205
206
207
208
209
210
211
212
213
214
215
216
217
218
219
220
221
222
223
224
225
226
227
228
229
230
231
232
233
234
235
236
237
238
239
240
241
242
243
244
245
246
247
248
249
250
251
252
253
254
255
256
257
258
259
260
261
262
263
264
265
266
267
268
269
270
271
272
273
274
275
276
277
278
279
280
281
282
283
284
285
286
287
288
289
290
291
292
293
294
295
296
297
298
299
300
301
302
303
304
305
306
307
308
309
310
311
312
313
314
315
316
317
318
319
320
321
322
323
324
325
326
327
328
329
330
331
332
333
334
335
336
337
338
339
340
341
342
343
344
345
346
347
348
349
350
351
352
353
354
355
356
357
358
359
360
361
362
363
364
365
366
367
368
369
370
371
372
373
374
375
376
377
378
379
380
381
382
383
384
385
386
387
388
389
390
391
392
393
394
395
396
397
398
399
400
401
402
403
404
405
406
407
408
409
410
411
412
413
414
415
416
417
418
419
420
421
422
423
424
425
426
427
428
429
430
431
432
433
434
435
436
437
438
439
440
441
442
443
444
445
446
447
448
449
450
451
452
453
454
455
456
457
458
459
460
461
462
463
464
465
466
467
468
469
470
471
472
473
474
475
476
477
478
479
480
481
482
483
484
485
486
487
488
489
490
491
492
493
494
495
496
497
498
499
500
501
502
503
504
505
506
507
508
509
510
511
512
513
514
515
516
517
518
519
520
521
522
523
524
525
526
527
528
529
530
531
532
533
534
535
536
537
538
539
540
541
542
543
544
545
546
547
548
549
550
551
552
553
554
555
556
557
558
559
560
561
562
563
564
565
566
567
568
569
570
571
572
573
574
575
576
577
578
579
580
581
582
583
584
585
586
587
588
589
590
591
592
593
594
595
596
597
598
599
600
601
602
603
604
605
606
607
608
609
610
611
612
613
614
615
616
617
618
619
620
621
622
623
624
625
626
627
628
629
630
631
632
633
634
635
636
637
638
639
640
641
642
643
644
645
646
647
648
649
650
651
652
653
654
655
656
657
658
659
660
661
662
663
664
665
666
667
668
669
670
671
672
673
674
675
676
677
678
679
680
681
682
683
684
685
686
687
688
689
690
691
692
693
694
695
696
697
698
699
700
701
702
703
704
705
706
707
708
709
710
711
712
713
714
715
716
717
718
719
720
721
722
723
724
725
726
727
728
729
730
731
732
733
734
735
736
737
738
739
740
741
742
743
744
745
746
747
748
749
750
751
752
753
754
755
756
757
758
759
760
761
762
763
764
765
766
767
768
769
770
771
772
773
774
775
776
777
778
779
780
781
782
783
784
785
786
787
788
789
790
791
792
793
794
795
796
797
798
799
800
801
802
803
804
805
806
807
808
809
810
811
812
813
814
815
816
817
818
819
820
821
822
823
824
825
826
827
828
829
830
831
832
833
834
835
836
837
838
839
840
841
842
843
844
845
846
847
848
849
850
851
852
853
854
855
856
857
858
859
860
861
862
863
864
865
866
867
868
869
870
871
872
873
874
875
876
877
878
879
880
881
882
883
884
885
886
887
888
889
890
891
892
893
894
895
896
897
898
899
900
901
902
903
904
905
906
907
908
909
910
911
912
913
914
915
916
917
918
919
920
921
922
923
924
925
926
927
928
929
930
931
932
933
934
935
936
937
938
939
940
941
942
943
944
945
946
947
948
949
950
951
952
953
954
955
956
957
958
959
960
961
962
963
964
965
966
967
968
969
970
971
972
973
974
975
976
977
978
979
980
981
982
983
984
985
986
987
988
989
990
991
992
993
994
995
996
997
998
999
1000
# Copyright (C) 2021 Victor Soupday
# This file is part of CC/iC Blender Tools <https://github.com/soupday/cc_blender_tools>
#
# CC/iC Blender Tools is free software: you can redistribute it and/or modify
# it under the terms of the GNU General Public License as published by
# the Free Software Foundation, either version 3 of the License, or
# (at your option) any later version.
#
# CC/iC Blender Tools is distributed in the hope that it will be useful,
# but WITHOUT ANY WARRANTY; without even the implied warranty of
# MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the
# GNU General Public License for more details.
#
# You should have received a copy of the GNU General Public License
# along with CC/iC Blender Tools. If not, see <https://www.gnu.org/licenses/>.
import os
import copy
import shutil
import re
import mathutils
import math
import bpy
from filecmp import cmp
from . import (hik, rigging, rigutils, bake, shaders, physics, rigidbody, wrinkle, bones, modifiers,
imageutils, meshutils, nodeutils, jsonutils, utils, params, vars)
UNPACK_INDEX = 1001
def get_export_armature(chr_cache, objects):
arm = None
if chr_cache:
arm = chr_cache.get_armature()
if arm:
return arm
arm = utils.get_armature_from_objects(objects)
return arm
def check_valid_export_fbx(chr_cache, objects):
report = []
check_valid = True
check_warn = False
arm = get_export_armature(chr_cache, objects)
standard = False
if chr_cache:
standard = chr_cache.is_standard()
if not objects:
message = f"ERROR: Nothing to export!"
report.append(message)
utils.log_warn(message)
check_valid = False
if standard and not arm:
if chr_cache:
message = f"ERROR: Character {chr_cache.character_name} has no armature!"
else:
message = f"ERROR: Character has no armature!"
report.append(message)
utils.log_warn(message)
check_valid = False
else:
obj : bpy.types.Object
for obj in objects:
if obj != arm and utils.object_exists_is_mesh(obj):
if standard:
armature_mod : bpy.types.ArmatureModifier = modifiers.get_object_modifier(obj, "ARMATURE")
if armature_mod is None:
message = f"ERROR: Object: {obj.name} does not have an armature modifier."
report.append(message)
utils.log_warn(message)
check_valid = False
if obj.parent != arm:
message = f"ERROR: Object: {obj.name} is not parented to character armature."
report.append(message)
utils.log_warn(message)
check_valid = False
if armature_mod and armature_mod.object != arm:
message = f"ERROR: Object: {obj.name}'s armature modifier is not set to this character's armature."
report.append(message)
utils.log_warn(message)
check_valid = False
if len(obj.vertex_groups) == 0:
message = f"ERROR: Object: {obj.name} has no vertex groups."
report.append(message)
utils.log_warn(message)
check_valid = False
# doesn't seem to be an issue anymore
if False and obj.type == "MESH" and obj.data and len(obj.data.vertices) < 150:
message = f"WARNING: Object: {obj.name} has a low number of vertices (less than 150), this is can cause CTD issues with CC3's importer."
report.append(message)
utils.log_warn(message)
message = f" (if CC3 crashes when importing this character, consider increasing vertex count or joining this object to another.)"
report.append(message)
utils.log_warn(message)
check_warn = True
return check_valid, check_warn, report
def remove_modifiers_for_export(chr_cache, objects, reset_pose, rig=None):
if not rig:
rig = get_export_armature(chr_cache, objects)
if not rig:
return
rig.data.pose_position = "POSE"
obj : bpy.types.Object
for obj in objects:
if reset_pose:
if obj.type == "MESH" and obj.data.shape_keys and obj.data.shape_keys.key_blocks:
utils.safe_set_action(obj.data.shape_keys, None)
if chr_cache:
obj_cache = chr_cache.get_object_cache(obj)
if obj_cache:
if obj_cache.object_type == "OCCLUSION" or obj_cache.object_type == "TEARLINE" or obj_cache.object_type == "EYE":
mod : bpy.types.Modifier
for mod in obj.modifiers:
if vars.NODE_PREFIX in mod.name:
obj.modifiers.remove(mod)
if reset_pose:
utils.safe_set_action(rig, None)
bones.clear_pose(rig)
def restore_modifiers(chr_cache, objects):
obj : bpy.types.Object
for obj in objects:
obj_cache = chr_cache.get_object_cache(obj)
if obj_cache:
if obj_cache.object_type == "OCCLUSION":
modifiers.add_eye_occlusion_modifiers(obj)
elif obj_cache.object_type == "TEARLINE":
modifiers.add_tearline_modifiers(obj)
elif obj_cache.object_type == "EYE":
modifiers.add_eye_modifiers(obj)
def prep_export(context, chr_cache, new_name, objects, json_data, old_path, new_path,
copy_textures, revert_duplicates, apply_fixes, as_blend_file, bake_values,
materials=None, sync=False, force_bake=False):
prefs = vars.prefs()
if sync:
revert_duplicates=False
apply_fixes = False
as_blend_file = False
bake_values = True
copy_textures = False
bake_nodes = prefs.export_bake_nodes
bake_bump_to_normal = prefs.export_bake_bump_to_normal
if force_bake:
bake_nodes = True
bake_bump_to_normal = True
utils.log_info(f"Prepping Export: {new_name}")
if as_blend_file:
if prefs.export_unity_remove_objects:
# remove everything not part of the character for blend file exports.
arm = get_export_armature(chr_cache, objects)
for obj in bpy.data.objects:
if not (obj == arm or obj.parent == arm or chr_cache.has_object(obj)):
utils.log_info(f"Removing {obj.name} from blend file")
bpy.data.objects.remove(obj)
if not chr_cache or not json_data:
return None
objects_map = {}
physics_map = {}
mats_processed = {}
images_processed = {}
# old path might be blank, so try to use blend file path or export target path
base_path = old_path
if not base_path:
base_path = utils.local_path()
if not base_path:
base_path = new_path
# reset and unlock shape keys
if not sync:
utils.reset_shape_keys(objects)
# update character name in json data
old_name = chr_cache.get_character_id()
if new_name != old_name:
if (old_name in json_data.keys() and
old_name in json_data[old_name]["Object"].keys() and
new_name not in json_data.keys()):
# rename the object and character keys
json_data[old_name]["Object"][new_name] = json_data[old_name]["Object"].pop(chr_cache.get_character_id())
json_data[new_name] = json_data.pop(old_name)
chr_json = json_data[new_name]["Object"][new_name]
# create soft physics json if none
physics_json = jsonutils.add_json_path(chr_json, "Physics/Soft Physics/Meshes")
# set custom JSON data
json_data[new_name]["Blender_Project"] = True
if not copy_textures:
json_data[new_name]["Import_Dir"] = chr_cache.get_import_dir()
json_data[new_name]["Import_Name"] = chr_cache.get_character_id()
else:
json_data[new_name].pop("Import_Dir", None)
json_data[new_name].pop("Import_Name", None)
if not chr_cache.link_id:
chr_cache.link_id = utils.generate_random_id(20)
json_data[new_name]["Link_ID"] = chr_cache.link_id
if chr_cache.is_non_standard():
set_non_standard_generation(json_data, new_name, chr_cache.non_standard_type, chr_cache.generation)
# unpack embedded textures.
if chr_cache.import_embedded:
unpack_embedded_textures(chr_cache, chr_json, objects, base_path)
if revert_duplicates:
# get a list of all cached materials in the export back to CC3
export_mats = []
for obj in objects:
obj_cache = chr_cache.get_object_cache(obj)
if obj_cache and obj.type == "MESH":
for mat in obj.data.materials:
mat_cache = chr_cache.get_material_cache(mat)
if mat and mat_cache and mat not in export_mats:
export_mats.append(mat)
# CC3 will replace any ' ' or '.' with underscores on export, so the only .00X suffix is from Blender
# get a use count of each material source name (stripped of any blender duplicate name suffixes)
mat_count = {}
for mat in export_mats:
mat_name = mat.name
mat_safe_name = utils.safe_export_name(utils.strip_name(mat_name), is_material=True)
if mat_safe_name in mat_count.keys():
mat_count[mat_safe_name] += 1
else:
mat_count[mat_safe_name] = 1
# determine a single source of any duplicate material names, prefer an exact match
mat_remap = {}
for mat_safe_name in mat_count.keys():
count = mat_count[mat_safe_name]
if count > 1:
for mat in export_mats:
if mat.name == mat_safe_name:
mat_remap[mat_safe_name] = mat
break
elif mat.name.startswith(mat_safe_name):
mat_remap[mat_safe_name] = mat
obj_names = []
obj : bpy.types.Object
for obj in objects:
if not utils.object_exists_is_mesh(obj):
continue
utils.log_info(f"Object: {obj.name} / {obj.data.name}")
utils.log_indent()
obj_name = obj.name
obj_cache = chr_cache.get_object_cache(obj)
is_split = chr_cache.is_split_object(obj)
split_source_name = obj_cache.source_name if (is_split and obj_cache) else None
source_changed = False
is_new_object = False
if obj_cache and not is_split:
obj_expected_source_name = utils.safe_export_name(utils.strip_name(obj_name))
obj_source_name = obj_cache.source_name
utils.log_info(f"Object source name: {obj_source_name}")
source_changed = obj_expected_source_name != obj_source_name
if source_changed:
obj_safe_name = utils.safe_export_name(obj_name)
utils.log_info(f"Object name changed from source, using: {obj_safe_name}")
else:
obj_safe_name = obj_source_name
else:
is_new_object = True
obj_safe_name = utils.safe_export_name(obj_name, is_split=is_split)
obj_source_name = obj_safe_name
obj["rl_do_not_restore_name"] = True
# if the Object name has been changed in some way
if obj_name != obj_safe_name or obj.data.name != obj_safe_name:
new_obj_name = obj_safe_name
if is_new_object or source_changed or new_obj_name in obj_names:
new_obj_name = utils.make_unique_name_in(obj_safe_name, bpy.data.objects.keys())
elif new_obj_name in obj_names:
# if multiple objects imported had the same name there will be duplicate source names:
# so if the new name is already in use, create a new unique name
# this will also trigger a new json object to be created which is needed
# as json object names should be unique and it's not possible in Blender to export
# two different objects with the same name.
new_obj_name = utils.make_unique_name_in(obj_safe_name, bpy.data.objects.keys())
utils.log_info(f"Using new safe Object & Mesh name: {obj_name} to {new_obj_name}")
if source_changed:
if jsonutils.rename_json_key(chr_json["Meshes"], obj_source_name, new_obj_name):
utils.log_info(f"Updating Object source json name: {obj_source_name} to {new_obj_name}")
if physics_json and jsonutils.rename_json_key(physics_json, obj_source_name, new_obj_name):
utils.log_info(f"Updating Physics Object source json name: {obj_source_name} to {new_obj_name}")
obj_source_name = new_obj_name
if not sync:
utils.force_object_name(obj, new_obj_name)
utils.force_mesh_name(obj.data, new_obj_name)
obj_name = new_obj_name
obj_safe_name = new_obj_name
obj_names.append(obj_name)
# fetch or create the object json
obj_json = jsonutils.get_object_json(chr_json, obj_source_name)
physics_mesh_json = jsonutils.get_physics_mesh_json(physics_json, obj_source_name)
if not obj_json:
utils.log_info(f"Adding Object Json: {obj_name}")
obj_json = copy.deepcopy(params.JSON_MESH_DATA)
chr_json["Meshes"][obj_name] = obj_json
if not physics_mesh_json and obj_cache and obj_cache.cloth_physics == "ON":
utils.log_info(f"Adding Physics Object Json: {obj_name}")
physics_mesh_json = copy.deepcopy(params.JSON_PHYSICS_MESH)
physics_json[obj_name] = physics_mesh_json
# store the json keys
obj_key = jsonutils.get_object_json_key(chr_json, obj_json)
objects_map.setdefault(obj_key, [])
if physics_mesh_json:
physics_mesh_key = jsonutils.get_physics_mesh_json_key(physics_json, physics_mesh_json)
physics_map.setdefault(physics_mesh_key, [])
for slot in obj.material_slots:
mat = slot.material
if mat is None: continue
if materials and mat not in materials: continue
mat_name = mat.name
mat_cache = chr_cache.get_material_cache(mat)
source_changed = False
new_material = False
utils.log_info(f"Material: {mat.name}")
utils.log_indent()
if mat.name not in mats_processed.keys():
mats_processed[mat.name] = { "processed": False, "write_back": False, "copied": False, "remapped": False }
mat_data = mats_processed[mat.name]
if mat_cache:
mat_expected_source_name = (utils.safe_export_name(utils.strip_name(mat_name), is_material=True)
if revert_duplicates else
utils.safe_export_name(mat_name, is_material=True))
mat_source_name = mat_cache.source_name
source_changed = mat_expected_source_name != mat_source_name
if source_changed:
mat_safe_name = utils.safe_export_name(mat_name, is_material=True)
else:
mat_safe_name = mat_source_name
else:
new_material = True
mat_safe_name = utils.safe_export_name(mat_name, is_material=True)
mat_source_name = mat_safe_name
if mat_name != mat_safe_name:
new_mat_name = mat_safe_name
if new_material or source_changed:
new_mat_name = utils.make_unique_name_in(mat_safe_name, bpy.data.materials.keys())
utils.log_info(f"Using new safe Material name: {mat_name} to {new_mat_name}")
if source_changed:
if jsonutils.rename_json_key(obj_json["Materials"], mat_source_name, new_mat_name):
utils.log_info(f"Updating material json name: {mat_source_name} to {new_mat_name}")
if physics_mesh_json and jsonutils.rename_json_key(physics_mesh_json["Materials"], mat_source_name, new_mat_name):
utils.log_info(f"Updating physics material json name: {mat_source_name} to {new_mat_name}")
if not sync:
utils.force_material_name(mat, new_mat_name)
mat_name = new_mat_name
mat_safe_name = new_mat_name
mat_source_name = new_mat_name
# fetch or create the material json
write_json = prefs.export_json_changes
write_physics_json = write_json
write_textures = prefs.export_texture_changes
write_physics_textures = write_textures
mat_json = jsonutils.get_material_json(obj_json, mat)
physics_mat_json = jsonutils.get_physics_material_json(physics_mesh_json, mat)
# the object and materials may have been split from it's origin,
# so try to find the material in the source object json
if obj_cache and mat_cache and not mat_json and split_source_name:
split_obj_json = jsonutils.get_object_json(chr_json, split_source_name)
if split_obj_json:
split_mat_json = jsonutils.get_material_json(split_obj_json, mat_source_name)
if split_mat_json:
utils.log_info(f"Copying Material Json: {mat_safe_name} from split source material: {split_source_name} / {mat_source_name}")
mat_json = copy.deepcopy(split_mat_json)
if mat_json:
obj_json["Materials"][mat_safe_name] = mat_json
write_json = True
write_textures = True
# then look for same material in source character objects
if mat_cache and not mat_json:
for other_obj_cache in chr_cache.object_cache:
other = other_obj_cache.get_object()
if utils.object_exists_is_mesh(other):
if mat.name in other.data.materials:
other_source_name = other_obj_cache.source_name
other_obj_json = jsonutils.get_object_json(chr_json, other_source_name)
if other_obj_json:
other_mat_json = jsonutils.get_material_json(other_obj_json, mat_source_name)
if other_mat_json:
utils.log_info(f"Copying Material Json: {mat_safe_name} from existing material Json in Obj: {other_source_name} / {mat_source_name}")
mat_json = copy.deepcopy(other_mat_json)
break
if mat_json:
obj_json["Materials"][mat_safe_name] = mat_json
write_json = True
write_textures = True
# finally try to find a mat_json of the same shader type
# with the same source material name in any mesh in the json
if mat_cache and not mat_json:
for o_json_name, o_json in chr_json["Meshes"].items():
for m_json_name, m_json in o_json["Materials"].items():
if m_json_name.lower() == mat_source_name.lower():
shader_name = params.get_rl_shader_name(mat_cache)
m_shader_name = jsonutils.get_custom_shader(m_json)
if shader_name == m_shader_name:
utils.log_info(f"Copying Material Json: {mat_safe_name} from existing material Json of same name and type: {o_json_name} / {m_json_name}")
mat_json = copy.deepcopy(m_json)
break
if mat_json:
break
if mat_json:
obj_json["Materials"][mat_safe_name] = mat_json
write_json = True
write_textures = True
# if still no json, try to create the material json data from the mat_cache shader def
if mat_cache and not mat_json:
shader_name = params.get_shader_name(mat_cache)
json_template = params.get_mat_shader_template(mat_cache)
utils.log_info(f"Adding Material Json: {mat_name} for Shader: {shader_name}")
if json_template:
mat_json = copy.deepcopy(json_template)
obj_json["Materials"][mat_safe_name] = mat_json
write_json = True
write_textures = True
# fallback default to PBR material json data
if not mat_json:
utils.log_info(f"Adding Default PBR Material Json: {mat_name}")
mat_json = copy.deepcopy(params.JSON_PBR_MATERIAL)
obj_json["Materials"][mat_safe_name] = mat_json
write_json = True
write_textures = True
material_physics_enabled = physics.is_cloth_physics_enabled(mat_cache, mat, obj)
if physics_mesh_json and not physics_mat_json and material_physics_enabled:
physics_mat_json = copy.deepcopy(params.JSON_PHYSICS_MATERIAL)
physics_mesh_json["Materials"][mat_safe_name] = physics_mat_json
write_physics_json = True
write_physics_textures = True
# store the json keys
mat_key = jsonutils.get_material_json_key(obj_json, mat_json)
objects_map[obj_key].append(mat_key)
if physics_mat_json:
physics_mat_key = jsonutils.get_physics_material_json_key(physics_mesh_json, physics_mat_json)
physics_map[physics_mesh_key].append(physics_mat_key)
if mat_cache:
utils.log_info("Writing Json:")
utils.log_indent()
# update the json parameters with any changes
if write_textures:
write_back_textures(context, mat_json, mat, mat_cache, base_path, old_name, bake_values, mat_data,
bake_nodes, bake_bump_to_normal, images_processed)
if write_json:
write_back_json(mat_json, mat, mat_cache)
if write_physics_json:
# there isn't a meaningful way to convert between Blender physics and RL PhysX
pass
if write_physics_textures:
write_back_physics_weightmap(physics_mat_json, obj, mat, mat_cache, base_path, old_name, mat_data)
if not sync and revert_duplicates:
# replace duplicate materials with a reference to a single source material
# (this is to ensure there are no duplicate suffixes in the fbx export)
if mat_count[mat_safe_name] > 1:
new_mat = mat_remap[mat_safe_name]
slot.material = new_mat
mat = new_mat
mat_name = new_mat.name
if mat_name != mat_safe_name:
utils.log_info(f"Reverting material name: {mat_name} to {mat_safe_name}")
utils.force_material_name(mat, mat_safe_name)
utils.log_recess()
else:
# add pbr material to json for non-cached base object/material
write_pbr_material_to_json(context, mat, mat_json, base_path, old_name, bake_values)
# copy or remap the texture paths
utils.log_info("Finalizing Texture Paths:")
utils.log_indent()
if copy_textures:
images_copied = []
for channel in mat_json["Textures"].keys():
copy_and_update_texture_path(mat_json["Textures"][channel], "Texture Path", old_path, new_path, old_name, new_name, as_blend_file, mat_name, mat_data, images_copied)
if "Custom Shader" in mat_json.keys():
for channel in mat_json["Custom Shader"]["Image"].keys():
copy_and_update_texture_path(mat_json["Custom Shader"]["Image"][channel], "Texture Path", old_path, new_path, old_name, new_name, as_blend_file, mat_name, mat_data, images_copied)
if physics_mat_json:
copy_and_update_texture_path(physics_mat_json, "Weight Map Path", old_path, new_path, old_name, new_name, as_blend_file, mat_name, mat_data, images_copied)
if "Wrinkle" in mat_json.keys():
for channel in mat_json["Wrinkle"]["Textures"].keys():
copy_and_update_texture_path(mat_json["Wrinkle"]["Textures"][channel], "Texture Path", old_path, new_path, old_name, new_name, as_blend_file, mat_name, mat_data, images_copied)
else:
for channel in mat_json["Textures"].keys():
remap_texture_path(mat_json["Textures"][channel], "Texture Path", old_path, new_path, mat_data)
if "Custom Shader" in mat_json.keys():
for channel in mat_json["Custom Shader"]["Image"].keys():
remap_texture_path(mat_json["Custom Shader"]["Image"][channel], "Texture Path", old_path, new_path, mat_data)
if physics_mat_json:
remap_texture_path(physics_mat_json, "Weight Map Path", old_path, new_path, mat_data)
if "Wrinkle" in mat_json.keys():
for channel in mat_json["Wrinkle"]["Textures"].keys():
remap_texture_path(mat_json["Wrinkle"]["Textures"][channel], "Texture Path", old_path, new_path, mat_data)
mat_data["processed"] = True
# texure paths
utils.log_recess()
# material
utils.log_recess()
# object
utils.log_recess()
if apply_fixes and prefs.export_legacy_bone_roll_fix:
if obj.type == "ARMATURE":
if utils.object_mode():
utils.set_active_object(obj)
if utils.set_mode("EDIT"):
utils.log_info("Applying upper and lower teeth bones roll fix.")
bone = obj.data.edit_bones["CC_Base_Teeth01"]
bone.roll = 0
bone = obj.data.edit_bones["CC_Base_Teeth02"]
bone.roll = 0
utils.object_mode()
if sync:
# find all mesh/material keys not used
meshes_json = jsonutils.get_json(json_data, f"{new_name}/Object/{new_name}/Meshes")
del_keys = []
for obj_key in objects_map:
mat_keys = objects_map[obj_key]
obj_json = jsonutils.get_json(json_data, f"{new_name}/Object/{new_name}/Meshes/{obj_key}")
for key in obj_json["Materials"]:
if key not in mat_keys:
utils.log_detail(f"Removing: material {obj_key}/{key}")
del_keys.append((obj_json["Materials"], key))
for key in meshes_json:
if key not in objects_map:
utils.log_detail(f"Removing: object {key}")
del_keys.append((meshes_json, key))
# find all physics mesh/material keys not used
physics_meshes_json = jsonutils.get_json(json_data, f"{new_name}/Object/{new_name}/Physics/Soft Physics/Meshes")
for physics_mesh_key in physics_map:
physics_mat_keys = physics_map[physics_mesh_key]
physics_mesh_json = jsonutils.get_json(json_data, f"{new_name}/Object/{new_name}/Physics/Soft Physics/Meshes/{physics_mesh_key}")
for key in physics_mesh_json["Materials"]:
if key not in physics_mat_keys:
utils.log_detail(f"Removing: physics material {physics_mesh_key}/{key}")
del_keys.append((physics_mesh_json["Materials"], key))
for key in physics_meshes_json:
if key not in physics_map:
utils.log_detail(f"Removing: physics object {key}")
del_keys.append((physics_meshes_json, key))
# remove the keys
for dictionary, key in del_keys:
if key in dictionary:
del(dictionary[key])
# as the baking system can deselect everything, reselect the export objects here.
utils.try_select_objects(objects, True)
def remap_texture_path(tex_info, path_key, old_path, new_path, mat_data):
# at this point all the image paths have been re-written as absolute paths
# (except those not used in the Blender material shaders)
if path_key in tex_info.keys():
if tex_info[path_key]:
tex_path = tex_info[path_key]
if os.path.isabs(tex_path):
abs_path = tex_path
else:
abs_path = os.path.normpath(os.path.join(old_path, tex_path))
rel_path = utils.relpath(abs_path, new_path)
tex_info[path_key] = os.path.normpath(rel_path)
utils.log_info(f"Remapping JSON texture path to: {tex_info[path_key]}")
return
def copy_and_update_texture_path(tex_info, path_key, old_path, new_path, old_name, new_name, as_blend_file, mat_name, mat_data, images_copied):
"""keep the same relative folder structure and copy the textures to their target folder.
update the images in the blend file with the new location."""
# at this point all the image paths have been re-written as absolute paths
sep = os.path.sep
old_tex_base = os.path.join(old_path, f"textures{sep}{old_name}")
old_fbm_base = os.path.join(old_path, f"{old_name}.fbm")
if path_key in tex_info.keys():
tex_path : str = tex_info[path_key]
if tex_path:
if not os.path.isabs(tex_path):
tex_path = os.path.normpath(os.path.join(old_path, tex_path))
old_abs_path = os.path.normpath(tex_path)
# old_path will only be set from a successful import from CC/iC
# so it should have expected the CC/iC folder structure
if old_path:
rel_tex_path = utils.relpath(os.path.normpath(tex_path), old_path)
# only remap the tex_path if it is inside the expected texture folders
if utils.path_is_parent(old_tex_base, old_abs_path) or utils.path_is_parent(old_fbm_base, old_abs_path):
if old_name != new_name:
rel_tex_path = rel_tex_path.replace(f"textures{sep}{old_name}{sep}{old_name}{sep}", f"textures{sep}{new_name}{sep}{new_name}{sep}")
rel_tex_path = rel_tex_path.replace(f"textures{sep}{old_name}{sep}", f"textures{sep}{new_name}{sep}")
rel_tex_path = rel_tex_path.replace(f"{old_name}.fbm{sep}", f"{new_name}.fbm{sep}")
new_abs_path = os.path.normpath(os.path.join(new_path, rel_tex_path))
new_rel_path = os.path.normpath(utils.relpath(new_abs_path, new_path))
utils.log_info(f"Remapping JSON texture path to: {new_rel_path}")
else:
# otherwise put the textures in folders in the textures/CHARACTER_NAME/Extras/MATERIAL_NAME/ folder
dir, file = os.path.split(tex_path)
extras_dir = f"textures{sep}{new_name}{sep}Extras{sep}{mat_name}"
new_rel_path = os.path.normpath(os.path.join(extras_dir, file))
new_abs_path = os.path.normpath(os.path.join(new_path, new_rel_path))
utils.log_info(f"Setting JSON texture path to: {new_rel_path}")
copy_file = False
if os.path.exists(old_abs_path):
if os.path.exists(new_abs_path):
if not cmp(old_abs_path, new_abs_path):
copy_file = True
else:
copy_file = True
if copy_file:
# make sure path exists
dir_path = os.path.dirname(new_abs_path)
os.makedirs(dir_path, exist_ok=True)
# copy the texture
utils.log_info(f"Copying texture: {old_abs_path}")
utils.log_info(f" to: {new_abs_path}")
shutil.copyfile(old_abs_path, new_abs_path)
# update the json texture path with the new relative path
tex_info[path_key] = new_rel_path
# update images with changed file path (if it changed, and only if exporting as blend file)
if as_blend_file and os.path.exists(old_abs_path) and os.path.exists(new_abs_path):
# if the original path and new path are different
if os.path.normpath(old_abs_path) != os.path.normpath(new_abs_path):
image : bpy.types.Image
for image in bpy.data.images:
# for each image not already copied
if image and image.filepath and image not in images_copied:
image_file_path = bpy.path.abspath(image.filepath)
if os.path.exists(image_file_path):
# if this is the image specified in the json path
if os.path.samefile(image_file_path, old_abs_path):
utils.log_info(f"Updating .blend Image: {image.name}")
utils.log_info(f" to: {new_abs_path}")
image.filepath = new_abs_path
images_copied.append(image)
def restore_export(export_changes : list):
if not export_changes:
return
# undo everything prep_export did
# (but don't bother with the json data as it is temporary)
for info in export_changes:
op = info[0]
if op == "OBJECT_RENAME":
obj = info[1]
utils.force_object_name(obj, info[2])
if obj.type == "MESH" and obj.data:
utils.force_mesh_name(obj.data, info[3])
if obj.type == "ARMATURE" and obj.data:
utils.force_armature_name(obj.data, info[3])
elif op == "MATERIAL_RENAME":
mat = info[1]
utils.force_material_name(mat, info[2])
elif op == "MATERIAL_SLOT_REPLACE":
slot = info[1]
slot.material = info[2]
slot.material = info[2]
return
def get_prop_value(mat_cache, prop_name, default):
parameters = mat_cache.parameters
try:
return eval("parameters." + prop_name, None, locals())
except:
return default
def write_back_json(mat_json, mat, mat_cache):
shader_name = params.get_shader_name(mat_cache)
shader_def = params.get_shader_def(shader_name)
if mat_json is None:
return
if shader_def:
if "vars" in shader_def.keys():
for var_def in shader_def["vars"]:
prop_name = var_def[0]
prop_default = var_def[1]
func = var_def[2]
if func == "":
args = var_def[3:]
json_var = args[0]
if json_var and json_var != "":
prop_value = get_prop_value(mat_cache, prop_name, prop_default)
jsonutils.set_material_json_var(mat_json, json_var, prop_value)
if "export" in shader_def.keys():
for export_def in shader_def["export"]:
json_var = export_def[0]
json_default = export_def[1]
func = export_def[2]
args = export_def[3:]
json_value = shaders.eval_parameters_func(mat_cache.parameters, func, args, json_default)
jsonutils.set_material_json_var(mat_json, json_var, json_value)
def write_back_textures(context, mat_json: dict, mat, mat_cache, base_path, old_name, bake_values, mat_data,
bake_nodes, bake_bump_to_normal, images_processed):
global UNPACK_INDEX
prefs = vars.prefs()
if mat_json is None:
return
shader_name = params.get_shader_name(mat_cache)
rl_shader_name = params.get_rl_shader_name(mat_cache)
shader_def = params.get_shader_def(shader_name)
bsdf_node, shader_node, mix_node = nodeutils.get_shader_nodes(mat, shader_name)
has_custom_shader = "Custom Shader" in mat_json.keys()
unpack_path = os.path.join(base_path, "textures", old_name, "Unpack")
bake_path = os.path.join(base_path, "textures", old_name, "Baked")
custom_path = os.path.join(base_path, "textures", old_name, "Custom")
bake.init_bake()
UNPACK_INDEX = 1001
# determine if we are combining bump maps into normal maps:
normal_socket = params.get_shader_texture_socket(shader_def, "NORMAL")
bump_socket = params.get_shader_texture_socket(shader_def, "BUMP")
normal_connected = normal_socket and nodeutils.has_connected_input(shader_node, normal_socket)
bump_combining = False
if bake_bump_to_normal and bake_nodes:
bump_combining = normal_connected and bump_socket and nodeutils.has_connected_input(shader_node, bump_socket)
if shader_def and shader_node:
if "textures" in shader_def.keys():
for tex_def in shader_def["textures"]:
tex_type = tex_def[2]
shader_socket = tex_def[0]
tex_id = params.get_texture_json_id(tex_type)
is_pbr_texture = tex_type in params.PBR_TYPES
is_pbr_shader = shader_name == "rl_pbr_shader" or shader_name == "rl_sss_shader"
tex_node = nodeutils.get_node_connected_to_input(shader_node, shader_socket)
tex_info = None
bake_value_texture = False
bake_shader_socket = ""
bake_value_size = 64
roughness_modified = False
if tex_type == "ROUGHNESS":
roughness = 0.5
if not nodeutils.has_connected_input(shader_node, "Roughness Map"):
roughness = nodeutils.get_node_input_value(shader_node, "Roughness Map", 0.5)
def_min = 0
def_max = 1
def_pow = 1
#if shader_name == "rl_sss_shader":
# def_pow = 0.75
roughness_min = nodeutils.get_node_input_value(shader_node, "Roughness Min", def_min)
roughness_max = nodeutils.get_node_input_value(shader_node, "Roughness Max", def_max)
roughness_pow = nodeutils.get_node_input_value(shader_node, "Roughness Power", def_pow)
if roughness_min != def_min or roughness_max != def_max or roughness != 0.5:
roughness_modified = True
# find or generate tex_info json.
if is_pbr_texture:
# CC3 cannot set metallic or roughness values without textures, so must bake a small value texture
if not tex_node:
if tex_type == "DIFFUSE":
if bake_values:
bake_value_texture = True
bake_shader_socket = "Base Color"
if tex_type == "ROUGHNESS":
if bake_values and roughness_modified:
bake_value_texture = True
bake_shader_socket = "Roughness"
elif not bake_values:
mat_json["Roughness_Value"] = roughness
elif tex_type == "METALLIC":
metallic = nodeutils.get_node_input_value(shader_node, "Metallic Map", 0)
if bake_values and metallic > 0:
bake_value_texture = True
bake_shader_socket = "Metallic"
elif not bake_values:
mat_json["Metallic_Value"] = metallic
# fetch the tex_info data for the channel
if tex_id in mat_json["Textures"]:
tex_info = mat_json["Textures"][tex_id]
# or create a new tex_info if missing or baking a new texture
elif tex_node or bake_value_texture:
tex_info = copy.deepcopy(params.JSON_PBR_TEX_INFO)
location, rotation, scale = nodeutils.get_image_node_mapping(tex_node)
tex_info["Tiling"] = [scale[0], scale[1]]
tex_info["Offset"] = [location[0], location[1]]
mat_json["Textures"][tex_id] = tex_info
# note: strength values for textures defined in the shader vars are written after in write_back_json()
elif has_custom_shader:
if tex_id in mat_json["Custom Shader"]["Image"]:
tex_info = mat_json["Custom Shader"]["Image"][tex_id]
elif tex_node:
tex_info = copy.deepcopy(params.JSON_CUSTOM_TEX_INFO)
mat_json["Custom Shader"]["Image"][tex_id] = tex_info
# if bump and normal are connected and we are combining them,
# remove bump maps from the Json and don't process it:
if tex_info and tex_type == "BUMP" and bump_combining:
tex_info = None
del mat_json["Textures"][tex_id]
if tex_info:
processed_image = None
if tex_type in mat_data.keys():
processed_image = mat_data[tex_type]
if processed_image:
utils.log_info(f"Reusing already processed material image: {processed_image.name}")
if tex_node or bake_value_texture:
image : bpy.types.Image = None
# re-use the already processed image if available
if processed_image:
image = processed_image
else:
# if it needs a value texture, bake the value
if bake_value_texture:
# turn off ao for diffuse bakes
if tex_type == "DIFFUSE":
ao = nodeutils.get_node_input_value(shader_node, "AO Strength", 1.0)
nodeutils.set_node_input_value(shader_node, "AO Strength", 0)
image = bake.bake_node_socket_input(context, bsdf_node, bake_shader_socket,
mat, tex_id, bake_path,
override_size=bake_value_size)
if tex_type == "DIFFUSE":
ao = nodeutils.get_node_input_value(shader_node, "AO Strength", ao)
elif nodeutils.is_texture_pack_system(tex_node):
utils.log_info(f"Texture: {tex_id} for socket: {shader_socket} is connected to a texture pack. Skipping.")
continue
elif wrinkle.is_wrinkle_system(tex_node):
utils.log_info(f"Texture: {tex_id} for socket: {shader_socket} is connected to the wrinkle shader. Skipping.")
continue
# if there is an image texture link to the socket
elif tex_node and tex_node.type == "TEX_IMAGE":
# bake roughnesss min/max adjustments (but not power)
if tex_type == "ROUGHNESS" and roughness_modified:
roughness_pow = nodeutils.get_node_input_value(shader_node, "Roughness Power", def_pow)
nodeutils.set_node_input_value(shader_node, "Roughness Power", 1.0)
image = bake.bake_node_socket_input(context, bsdf_node, "Roughness",
mat, tex_id, bake_path,
size_override_node=shader_node,
size_override_socket="Roughness Map")
nodeutils.set_node_input_value(shader_node, "Roughness Power", roughness_pow)
# if there is a normal and a bump map connected, combine into a normal
elif bake_nodes and tex_type == "NORMAL" and bump_combining:
image = bake.bake_rl_bump_and_normal(context, shader_node, bsdf_node,
mat, tex_id, bake_path,
normal_socket_name=shader_socket,
bump_socket_name=bump_socket)
# otherwise use the image texture
else:
image = tex_node.image
elif bake_nodes:
# if something is connected to the shader socket but is not a texture image
# and baking is enabled: then bake the socket input into a texture for exporting:
if tex_type == "NORMAL" and bump_combining:
image = bake.bake_rl_bump_and_normal(context, shader_node, bsdf_node, mat, tex_id, bake_path,
normal_socket_name = shader_socket,
bump_socket_name = bump_socket)
else:
utils.log_info(f"Baking Socket Input: {shader_node.name} {shader_socket}")
image = bake.bake_node_socket_input(context, shader_node, shader_socket,
mat, tex_id, bake_path)
tex_info["Texture Path"] = ""
mat_data[tex_type] = image
if image:
try_unpack_image(image, unpack_path, True)
if not image.filepath:
try:
# image is not saved?
if image.file_format:
format = image.file_format
else:
format = "PNG"
imageutils.save_image_to_format_dir(image, format, custom_path, image.name)
except:
utils.log_warn(f"Unable to save unsaved image: {image.name} to custom image dir!")
if image.filepath:
image_data = None
if image in images_processed.keys():
image_data = images_processed[image]
else:
abs_image_path = os.path.normpath(bpy.path.abspath(image.filepath))
image_data = { "old_path": abs_image_path }
images_processed[image] = image_data
abs_image_path = image_data["old_path"]
tex_info["Texture Path"] = abs_image_path
utils.log_info(f"{mat.name}/{tex_id}: Source texture path: {abs_image_path}")
elif not tex_node:
tex_info["Texture Path"] = ""
mat_data["write_back"] = True
def write_back_physics_weightmap(physics_mat_json : dict, obj, mat, mat_cache, base_path, old_name, mat_data):
global UNPACK_INDEX
prefs = vars.prefs()