utilities.py 69 KB

123456789101112131415161718192021222324252627282930313233343536373839404142434445464748495051525354555657585960616263646566676869707172737475767778798081828384858687888990919293949596979899100101102103104105106107108109110111112113114115116117118119120121122123124125126127128129130131132133134135136137138139140141142143144145146147148149150151152153154155156157158159160161162163164165166167168169170171172173174175176177178179180181182183184185186187188189190191192193194195196197198199200201202203204205206207208209210211212213214215216217218219220221222223224225226227228229230231232233234235236237238239240241242243244245246247248249250251252253254255256257258259260261262263264265266267268269270271272273274275276277278279280281282283284285286287288289290291292293294295296297298299300301302303304305306307308309310311312313314315316317318319320321322323324325326327328329330331332333334335336337338339340341342343344345346347348349350351352353354355356357358359360361362363364365366367368369370371372373374375376377378379380381382383384385386387388389390391392393394395396397398399400401402403404405406407408409410411412413414415416417418419420421422423424425426427428429430431432433434435436437438439440441442443444445446447448449450451452453454455456457458459460461462463464465466467468469470471472473474475476477478479480481482483484485486487488489490491492493494495496497498499500501502503504505506507508509510511512513514515516517518519520521522523524525526527528529530531532533534535536537538539540541542543544545546547548549550551552553554555556557558559560561562563564565566567568569570571572573574575576577578579580581582583584585586587588589590591592593594595596597598599600601602603604605606607608609610611612613614615616617618619620621622623624625626627628629630631632633634635636637638639640641642643644645646647648649650651652653654655656657658659660661662663664665666667668669670671672673674675676677678679680681682683684685686687688689690691692693694695696697698699700701702703704705706707708709710711712713714715716717718719720721722723724725726727728729730731732733734735736737738739740741742743744745746747748749750751752753754755756757758759760761762763764765766767768769770771772773774775776777778779780781782783784785786787788789790791792793794795796797798799800801802803804805806807808809810811812813814815816817818819820821822823824825826827828829830831832833834835836837838839840841842843844845846847848849850851852853854855856857858859860861862863864865866867868869870871872873874875876877878879880881882883884885886887888889890891892893894895896897898899900901902903904905906907908909910911912913914915916917918919920921922923924925926927928929930931932933934935936937938939940941942943944945946947948949950951952953954955956957958959960961962963964965966967968969970971972973974975976977978979980981982983984985986987988989990991992993994995996997998999100010011002100310041005100610071008100910101011101210131014101510161017101810191020102110221023102410251026102710281029103010311032103310341035103610371038103910401041104210431044104510461047104810491050105110521053105410551056105710581059106010611062106310641065106610671068106910701071107210731074107510761077107810791080108110821083108410851086108710881089109010911092109310941095109610971098109911001101110211031104110511061107110811091110111111121113111411151116111711181119112011211122112311241125112611271128112911301131113211331134113511361137113811391140114111421143114411451146114711481149115011511152115311541155115611571158115911601161116211631164116511661167116811691170117111721173117411751176117711781179118011811182118311841185118611871188118911901191119211931194119511961197119811991200120112021203120412051206120712081209121012111212121312141215121612171218121912201221122212231224122512261227122812291230123112321233123412351236123712381239124012411242124312441245124612471248124912501251125212531254125512561257125812591260126112621263126412651266126712681269127012711272127312741275127612771278127912801281128212831284128512861287128812891290129112921293129412951296129712981299130013011302130313041305130613071308130913101311131213131314131513161317131813191320132113221323132413251326132713281329133013311332133313341335133613371338133913401341134213431344134513461347134813491350135113521353135413551356135713581359136013611362136313641365136613671368136913701371137213731374137513761377137813791380138113821383138413851386138713881389139013911392139313941395139613971398139914001401140214031404140514061407140814091410141114121413141414151416141714181419142014211422142314241425142614271428142914301431143214331434143514361437143814391440144114421443144414451446144714481449145014511452145314541455145614571458145914601461146214631464146514661467146814691470147114721473147414751476147714781479148014811482148314841485148614871488148914901491149214931494149514961497149814991500150115021503150415051506150715081509151015111512151315141515151615171518151915201521152215231524152515261527152815291530153115321533153415351536153715381539154015411542154315441545154615471548154915501551155215531554155515561557155815591560
  1. #fool: should be wrColor like prColor... dumb
  2. def wrapRed(skk): return "\033[91m{}\033[00m".format(skk)
  3. def wrapGreen(skk): return "\033[92m{}\033[00m".format(skk)
  4. def wrapPurple(skk): return "\033[95m{}\033[00m".format(skk)
  5. def wrapWhite(skk): return "\033[97m{}\033[00m".format(skk)
  6. def wrapOrange(skk): return "\033[0;33m{}\033[00m".format(skk)
  7. # these should reimplement the print interface..
  8. def prRed(*args): print (*[wrapRed(arg) for arg in args])
  9. def prGreen(*args): print (*[wrapGreen(arg) for arg in args])
  10. def prPurple(*args): print (*[wrapPurple(arg) for arg in args])
  11. def prWhite(*args): print (*[wrapWhite(arg) for arg in args])
  12. def prOrange(*args): print (*[wrapOrange(arg) for arg in args])
  13. # add THIS to the top of a file for easy access:
  14. # from mantis.utilities import (prRed, prGreen, prPurple, prWhite,
  15. # prOrange,
  16. # wrapRed, wrapGreen, wrapPurple, wrapWhite,
  17. # wrapOrange,)
  18. def float_lerp(a : float, b : float, factor : float) -> float:
  19. return (a * (1.0 - factor)) + (b * factor)
  20. # A fuction for getting to the end of a Reroute.
  21. # TODO: this seems really inefficient!
  22. def socket_seek(start_link, links):
  23. link = start_link
  24. while(link.from_socket):
  25. for newlink in links:
  26. if link.from_socket.node.inputs:
  27. if link.from_node.bl_idname != 'NodeReroute':
  28. return link.from_socket
  29. if newlink.to_socket == link.from_socket.node.inputs[0]:
  30. link=newlink; break
  31. else:
  32. break
  33. return link.from_socket
  34. # THIS ONE is better. I don't know what I was thinking up above.
  35. # TODO: try and refactor to use this function instead
  36. def find_reroute_start_socket(reroute, track='BACK'):
  37. # "BACK" traces back through the tree
  38. # "FORWARD" traces forward through the tree
  39. socket = None
  40. while (reroute and track == 'BACK'):
  41. if len(reroute.inputs[0].links) == 1:
  42. link = reroute.inputs[0].links[0]
  43. socket = link.from_socket
  44. if link.from_node.bl_idname == 'NodeReroute':
  45. reroute = link.from_node
  46. else:
  47. link, reroute = None, None
  48. while (reroute and track == 'FORWARD'):
  49. if len(reroute.outputs[0].links) == 1:
  50. link = reroute.outputs[0].links[0]
  51. socket = link.to_socket
  52. if link.to_node.bl_idname == 'NodeReroute':
  53. reroute = link.to_node
  54. else:
  55. link, reroute = None, None
  56. return socket
  57. # this creates fake links that have the same interface as Blender's
  58. # so that I can bypass Reroutes
  59. def clear_reroutes(links):
  60. from .base_definitions import DummyLink
  61. kept_links, rerouted_starts = [], []
  62. rerouted = []
  63. all_links = links.copy()
  64. while(all_links):
  65. link = all_links.pop()
  66. to_cls = link.to_socket.node.bl_idname
  67. from_cls = link.from_socket.node.bl_idname
  68. reroute_classes = ["NodeReroute"]
  69. if (to_cls in reroute_classes and
  70. from_cls in reroute_classes):
  71. rerouted.append(link)
  72. elif (to_cls in reroute_classes and not
  73. from_cls in reroute_classes):
  74. rerouted.append(link)
  75. elif (from_cls in reroute_classes and not
  76. to_cls in reroute_classes):
  77. rerouted_starts.append(link)
  78. else:
  79. kept_links.append(link)
  80. for start in rerouted_starts:
  81. from_socket = socket_seek(start, rerouted)
  82. new_link = DummyLink(from_socket=from_socket, to_socket=start.to_socket, nc_from=None, nc_to=None, multi_input_sort_id=start.multi_input_sort_id )
  83. kept_links.append(new_link)
  84. return kept_links
  85. def tree_from_nc(sig, base_tree):
  86. if (sig[0] == 'MANTIS_AUTOGENERATED'):
  87. sig = sig[:-2] # cut off the end part of the signature (because it uses socket.name and socket.identifier)
  88. # this will lead to totally untraceble bugs in the event of a change in how signatures are assigned
  89. tree = base_tree
  90. for i, path_item in enumerate(sig):
  91. if (i == 0) or (i == len(sig) - 1):
  92. continue
  93. tree = tree.nodes.get(path_item).node_tree
  94. return tree
  95. def get_node_prototype(sig, base_tree):
  96. return tree_from_nc(sig, base_tree).nodes.get( sig[-1] )
  97. # This one is the simplest case so it is easiest to use its own function.
  98. def set_string_variables_at_creation_time(n, prototype, mContext):
  99. # we're gonna store the variables using the node's signature
  100. prev_group_key = ''
  101. prev_group_vars = {}
  102. for i in range(len(n.signature[:-1])):
  103. if i == 0: continue # this will cut any AUTOGEN or None in the base
  104. prev_group_key+=n.signature[i]
  105. prev_group_vars=mContext.string_variables.get(prev_group_key, {})
  106. # IS THIS WISE???
  107. from copy import deepcopy # so nothing spooky will happen
  108. group_vars = mContext.string_variables["".join(n.signature[1:])] = deepcopy(prev_group_vars)
  109. for input in prototype.inputs:
  110. if hasattr(input, "default_value") and not input.is_linked:
  111. if isinstance (input.default_value, str):
  112. group_vars[input.name]=input.default_value
  113. elif hasattr(input.default_value, "name"):
  114. group_vars[input.name]=input.default_value.name
  115. def set_string_variables_during_exec(n, mContext):
  116. print(n)
  117. pass
  118. # so for this we need to get the UI node to get the string
  119. # when a node is executed, we check for string variables that were set at runtime
  120. # we need the dummy node for this
  121. ##################################################################################################
  122. # groups and changing sockets -- this is used extensively by Schema.
  123. ##################################################################################################
  124. # this one returns None if there is an error.
  125. def get_socket_maps(node, force=False):
  126. maps = [{}, {}]
  127. node_collection = ["inputs", "outputs"]
  128. links = ["from_socket", "to_socket"]
  129. for collection, map, linked_socket in zip(node_collection, maps, links):
  130. for sock in getattr(node, collection):
  131. if sock.is_linked:
  132. other_sockets = []
  133. # Sort the links first (in case they are mult-input), because Blender doesn't
  134. links = sorted(list(sock.links), key = lambda l : l.multi_input_sort_id)
  135. # HACK here because Blender will crash if the socket values in the NodeReroute
  136. # are mutated. Because this seems to happen in a deffered way, I can't account
  137. # for it except by checking the node later...
  138. # TODO: The fact that I need this hack means I can probably solve this problem
  139. # for all node types in a safer way, since they may also be dynamic somehow
  140. for l in links:
  141. if "from" in linked_socket and l.from_node.bl_idname == "NodeReroute":
  142. other_sockets.append(l.from_node)
  143. elif "to" in linked_socket and l.to_node.bl_idname == "NodeReroute":
  144. other_sockets.append(l.to_node)
  145. else:
  146. other_sockets.append(getattr(l, linked_socket))
  147. from bpy.types import NodeSocket
  148. keep_sockets=[]
  149. for other_socket in other_sockets.copy():
  150. if isinstance(other_socket, NodeSocket) and \
  151. other_socket.bl_idname == 'NodeSocketUndefined':
  152. continue # this one is bad
  153. keep_sockets.append(other_socket)
  154. map[sock.identifier]= keep_sockets
  155. elif hasattr(sock, "default_value"):
  156. if sock.get("default_value") is not None:
  157. val = sock['default_value']
  158. elif sock.bl_idname == "EnumCurveSocket" and sock.get("default_value") is None:
  159. # HACK I need to add this special case because during file-load,
  160. # this value is None and should not be altered until it is set once.
  161. continue
  162. elif "Enum" in sock.bl_idname and isinstance(sock.get("default_value"), int):
  163. continue # for string enum properties that have not yet initialized (at startup)
  164. elif (val := sock.default_value) is not None:
  165. pass
  166. elif not force:
  167. continue
  168. map[sock.identifier]=val
  169. else:
  170. from .socket_definitions import no_default_value
  171. if sock.bl_idname in no_default_value:
  172. map[sock.identifier]=None
  173. else:
  174. raise RuntimeError(f"ERROR: Could not get socket data for socket of type: {sock.bl_idname}")
  175. return maps
  176. # this function is completely overloaded with different purposes and code paths
  177. # TODO refactor everything that funnels into this function
  178. # make this stuff simpler.
  179. def do_relink(node, socket, map, in_out='INPUT', parent_name = ''):
  180. if not node.__class__.is_registered_node_type(): return
  181. tree = node.id_data; interface_in_out = 'OUTPUT' if in_out == 'INPUT' else 'INPUT'
  182. if hasattr(node, "node_tree"):
  183. tree = node.node_tree
  184. interface_in_out=in_out
  185. from bpy.types import NodeSocket, Node
  186. get_string = '__extend__'
  187. if socket: get_string = socket.identifier
  188. from .base_definitions import SchemaUINode
  189. if (hasattr(node, "node_tree") or isinstance(node, SchemaUINode)) and get_string not in map.keys():
  190. # this happens when we are creating a new node group and need to update it from nothing.
  191. return
  192. val = map[get_string] # this will throw an error if the socket isn't there. Good!
  193. if isinstance(val, list):
  194. for sub_val in val:
  195. # this will only happen once because it assigns socket, so it is safe to do in the for loop.
  196. if socket is None:
  197. socket = sub_val
  198. if sub_val.bl_idname == "NodeReroute":
  199. # we have to trace the reroute node...
  200. if in_out == 'INPUT':
  201. socket = find_reroute_start_socket(sub_val)
  202. else:
  203. socket = find_reroute_start_socket(sub_val, track="FORWARD")
  204. sock_type = socket.interface_type
  205. name = unique_socket_name(node, socket, tree)
  206. if parent_name:
  207. interface_socket = update_interface(tree.interface, name, interface_in_out, sock_type, parent_name)
  208. if in_out =='INPUT':
  209. socket = node.inputs.new(sock_type, name, identifier=interface_socket.identifier)
  210. else:
  211. socket = node.outputs.new(sock_type, name, identifier=interface_socket.identifier)
  212. if parent_name == 'Array': socket.display_shape='SQUARE_DOT'
  213. if parent_name == 'Constant': socket.display_shape='CIRCLE_DOT'
  214. # then move it up and delete the other link.
  215. # this also needs to modify the interface of the node tree.
  216. if isinstance(sub_val, NodeSocket):
  217. l = None
  218. if in_out =='INPUT':
  219. l = node.id_data.links.new(input=sub_val, output=socket)
  220. else:
  221. l = node.id_data.links.new(input=socket, output=sub_val)
  222. if l is None:
  223. raise RuntimeError("Could not create link")
  224. elif isinstance(sub_val, Node):
  225. l = None
  226. # this happens when it is a NodeReroute
  227. if not socket.is_output:
  228. l = node.id_data.links.new(input=sub_val.outputs[0], output=socket)
  229. else:
  230. l = node.id_data.links.new(input=socket, output=sub_val.inputs[0])
  231. if l is None:
  232. raise RuntimeError("Could not create link")
  233. else:
  234. raise RuntimeError("Unhandled case in do_relink()")
  235. elif get_string != "__extend__":
  236. if not socket.is_output:
  237. from bpy.app import version as bpy_version
  238. if bpy_version >=(4,5,0): # VERSIONING
  239. # for some reason, this is throwing an error now
  240. from bpy.types import bpy_prop_array
  241. if isinstance(val, bpy_prop_array):
  242. if in_out == "INPUT" and hasattr(socket, 'input') and socket.input == False:
  243. return # doesn't matter, this is a Matrix socket in a bone or something
  244. # raise RuntimeError(
  245. # f"Cannot set property in socket of type {socket.bl_idname} due to bug in Blender: "
  246. # f"{node.id_data.name}:{node.name}:{socket.name} ")
  247. # TODO: report this weird bug!
  248. try:
  249. if socket.bl_idname == 'BooleanThreeTupleSocket':
  250. # it is so annoying that I have to do this
  251. socket.default_value = [bool(val[0]), bool(val[1]), bool(val[2])]
  252. else:
  253. socket.default_value = val
  254. except (AttributeError, ValueError): # must be readonly or maybe it doesn't have a d.v.
  255. pass
  256. def read_schema_type(interface_item):
  257. # VERSIONING CODE
  258. tree=interface_item.id_data
  259. version = tree.mantis_version
  260. old_version = False
  261. if version[0] == 0:
  262. if version[1] < 12: old_version = True
  263. elif version[1] == 12 and version[2] < 27: old_version = True
  264. # unfortunately we need to check this stuff for the versioning code to run correctly the first time.
  265. # UNLESS I can find a way to prevent this code from running before versioning
  266. if old_version or (not hasattr(interface_item, 'is_array')):
  267. # it is not a custom interface class and/or the file is old.
  268. if interface_item.parent:
  269. return interface_item.parent.name
  270. else:
  271. if interface_item.is_array:
  272. return 'Array'
  273. if interface_item.is_connection:
  274. return 'Connection'
  275. return 'Constant'
  276. def update_interface(interface, name, in_out, sock_type, parent_name):
  277. from bpy.app import version as bpy_version
  278. if parent_name:
  279. if not (interface_parent := interface.items_tree.get(parent_name)):
  280. interface_parent = interface.new_panel(name=parent_name)
  281. if bpy_version != (4,5,0):
  282. socket = interface.new_socket(name=name,in_out=in_out, socket_type=sock_type, parent=interface_parent)
  283. else: # blender 4.5.0 LTS, have to workaround a bug!
  284. from .versioning import workaround_4_5_0_interface_update
  285. socket = workaround_4_5_0_interface_update(tree=interface.id_data, name=name, in_out=in_out,
  286. sock_type=sock_type, parent_name=parent_name, do_parent=True)
  287. if parent_name == 'Connection':
  288. in_out = 'OUTPUT' if in_out == 'INPUT' else 'INPUT' # flip this make sure connections always do both
  289. interface.new_socket(name=name,in_out=in_out, socket_type=sock_type, parent=interface_parent)
  290. return socket
  291. else:
  292. raise RuntimeError(wrapRed("Cannot add interface item to tree without specifying type."))
  293. # D.node_groups['Rigging Nodes'].interface.new_socket('beans', description='the b word', socket_type='NodeSocketGeometry')
  294. #UGLY BAD REFACTOR
  295. def relink_socket_map_add_socket(node, socket_collection, item, in_out=None,):
  296. from bpy.app import version as bpy_version
  297. # if not in_out: in_out=item.in_out
  298. multi=False
  299. if in_out == 'INPUT' and read_schema_type(item) == 'Array':
  300. multi = True
  301. # have to work around a bug in 4.5.0 that prevents me from declaring custom socket types
  302. # I have arbitrarily chosen to use the NodeSocketGeometry type to signal that this one is affected.
  303. if bpy_version == (4, 5, 0) and item.bl_socket_idname == 'NodeSocketGeometry':
  304. from .versioning import socket_add_workaround_for_4_5_0_LTS
  305. s = socket_add_workaround_for_4_5_0_LTS(item, socket_collection, multi)
  306. else:
  307. s = socket_collection.new(type=item.bl_socket_idname, name=item.name, identifier=item.identifier, use_multi_input=multi)
  308. if hasattr(s, 'default_value') and hasattr(s, 'is_valid_interface_type') and \
  309. s.is_valid_interface_type == True:
  310. if s.bl_idname not in ['MatrixSocket']: # no default value implemented
  311. from bpy.types import bpy_prop_array
  312. from mathutils import Vector
  313. default_value = 'REPORT BUG ON GITLAB' # default to bug string
  314. val_type = type(s.default_value) # why tf can't I match/case here?
  315. if val_type is bool: default_value = item.default_bool
  316. if val_type is int: default_value = item.default_int
  317. if val_type is float: default_value = item.default_float
  318. if val_type is Vector: default_value = item.default_vector
  319. if val_type is str: default_value = item.default_string
  320. if val_type is bpy_prop_array: default_value = item.default_bool_vector
  321. s.default_value = default_value
  322. if read_schema_type(item) == 'Array': s.display_shape = 'SQUARE_DOT'
  323. elif node.bl_idname in ['MantisSchemaGroup'] and read_schema_type(item) == 'Constant':
  324. s.display_shape='CIRCLE_DOT'
  325. # if item.parent.name == 'Array': s.display_shape = 'SQUARE_DOT'
  326. # elif item.parent.name == 'Constant': s.display_shape='CIRCLE_DOT'
  327. return s
  328. # TODO REFACTOR THIS
  329. # I did this awful thing because I needed the above code
  330. # but I have provided this interface to Mantis
  331. # I did not follow the Single Responsibility Principle
  332. # I am now suffering for it, as I rightly deserve.
  333. def relink_socket_map(node, socket_collection, map, item, in_out):
  334. new_socket = relink_socket_map_add_socket(node, socket_collection, item, in_out,)
  335. do_relink(node, new_socket, map, in_out, parent_name=read_schema_type(item))
  336. def unique_socket_name(node, other_socket, tree):
  337. name_stem = other_socket.bl_label; num=0
  338. # if hasattr(other_socket, "default_value"):
  339. # name_stem = type(other_socket.default_value).__name__
  340. for item in tree.interface.items_tree:
  341. if item.item_type == 'PANEL': continue
  342. if other_socket.is_output and item.in_out == 'INPUT': continue
  343. if not other_socket.is_output and item.in_out == 'OUTPUT': continue
  344. if name_stem in item.name: num+=1
  345. name = name_stem + '.' + str(num).zfill(3)
  346. return name
  347. ##############################
  348. # Dealing with Objects
  349. ##############################
  350. # use this to ensure the active object is set back when changing it
  351. def preserve_active_object(func):
  352. def wrapper(*args, **kwargs):
  353. import bpy
  354. original_active = bpy.context.active_object
  355. func(*args, **kwargs)
  356. bpy.context.view_layer.objects.active = original_active
  357. return wrapper
  358. def switch_mode(mode='OBJECT', objects = []):
  359. active = None
  360. if objects:
  361. from bpy import context, ops
  362. active = objects[-1]
  363. context.view_layer.objects.active = active
  364. if (active):
  365. with context.temp_override(**{'active_object':active, 'selected_objects':objects}):
  366. ops.object.mode_set(mode=mode)
  367. return active
  368. # run this in Object mode, during bFinalize
  369. @preserve_active_object
  370. def bind_modifier_operator(modifier, operator):
  371. # now we have to bind it
  372. ob = modifier.id_data
  373. ob.modifiers.active = modifier
  374. import bpy
  375. bpy.context.view_layer.objects.active = ob
  376. # Context override does not do anything here... it isn't handled in the C code
  377. # I have verified this by building Blender with print statements to debug.
  378. # let's just make sure the target object has its modifiers disabled and update the dg
  379. targ_attr = "target"
  380. if hasattr(modifier, "object"): targ_attr = "object"
  381. target = getattr(modifier, targ_attr)
  382. if target:
  383. prWhite(f"Binding Deformer {modifier.name} to target {target.name}")
  384. operator(modifier=modifier.name)
  385. def get_default_collection(collection_type='WIDGET'):
  386. from .preferences import get_bl_addon_object
  387. from bpy import data, context
  388. mantis_addon = get_bl_addon_object(raise_error=True)
  389. match collection_type:
  390. case "WIDGET":
  391. default_collection_name=mantis_addon.preferences.WidgetDefaultCollection
  392. case "CURVE":
  393. default_collection_name=mantis_addon.preferences.CurveDefaultCollection
  394. case "ARMATURE":
  395. default_collection_name=mantis_addon.preferences.MetaArmatureDefaultCollection
  396. if default_collection_name:
  397. if not (default_collection := data.collections.get(default_collection_name)):
  398. default_collection = data.collections.new(default_collection_name)
  399. context.scene.collection.children.link(default_collection)
  400. collection = default_collection
  401. else: collection = context.collection
  402. return collection
  403. def import_widget_obj(path,):
  404. from bpy.app import version as bpy_version
  405. from bpy import context, data
  406. from os import path as os_path
  407. file_name = os_path.split(path)[-1]
  408. obj_name = os_path.splitext(file_name)[0]
  409. collection = get_default_collection(collection_type='WIDGET')
  410. if bpy_version < (4,5,0):
  411. original_active = context.active_object
  412. # for blender versions prior to 4.5.0, we have to import with an operator
  413. from bpy.ops import wm as wm_ops
  414. ob_names_before = data.objects.keys()
  415. wm_ops.obj_import(
  416. filepath=path,
  417. check_existing=False,
  418. forward_axis='NEGATIVE_Z',
  419. up_axis='Y',
  420. validate_meshes=True,)
  421. # just make sure the active object doesn't change
  422. context.view_layer.objects.active = original_active
  423. # the below is a HACK... I can find the objects in the .obj file
  424. # by scanning the file for the "o" prefix and checking the name.
  425. # but that may be slow if the obj is big. which would make a bad widget!
  426. ob = None
  427. for ob in data.objects:
  428. if ob.name in ob_names_before: continue
  429. # this is easier than setting the active collection before import.
  430. for other_collection in ob.users_collection:
  431. other_collection.objects.unlink(ob)
  432. from math import pi as PI
  433. from mathutils import Matrix
  434. m = ob.data
  435. for v in m.vertices:
  436. v.co = Matrix.Rotation(PI/2, 4, 'X') @ v.co
  437. collection.objects.link(ob)
  438. return ob # return the first one, that should be the one
  439. else: # no new object was found - fail.
  440. # I don't expect this to happen unless there is an error in the operator.
  441. raise RuntimeError(f"Failed to import {file_name}. This is probably"
  442. "a bug or a corrupted file.")
  443. else:
  444. prWhite(f"INFO: using Geometry Nodes to import {file_name}")
  445. mesh = data.meshes.new(obj_name)
  446. ob = data.objects.new(name=obj_name, object_data=mesh)
  447. # we'll do a geometry nodes import
  448. collection.objects.link(ob)
  449. if (import_modifier := ob.modifiers.get("Import OBJ")) is None:
  450. import_modifier = ob.modifiers.new("Import OBJ", type='NODES')
  451. ng = data.node_groups.get("Import OBJ")
  452. if ng is None:
  453. from .geometry_node_graphgen import gen_import_obj_node_group
  454. ng = gen_import_obj_node_group()
  455. import_modifier.node_group = ng
  456. import_modifier["Socket_0"]=path
  457. return ob
  458. def import_object_from_file(path):
  459. # first let's check to see if we need it.
  460. from os import path as os_path
  461. file_name = os_path.split(path)[-1]
  462. obj_name = os_path.splitext(file_name)[0]
  463. extension = os_path.splitext(file_name)[1]
  464. if extension == '.obj':
  465. return import_widget_obj(path,)
  466. else:
  467. raise RuntimeError(f"Failed to parse filename {path}")
  468. def import_metarig_data(metarig_data : dict, ):
  469. from bpy import data, context
  470. from mathutils import Matrix
  471. from collections import deque
  472. # the metarig data is a dict with a bunch of nodes in it
  473. # start at node 'MANTIS_RESERVED'
  474. armature_data = metarig_data['MANTIS_RESERVED']
  475. children = deque(armature_data["children"].copy())
  476. if (armature := data.armatures.get(armature_data['name'])) is None:
  477. armature = data.armatures.new(armature_data['name'])
  478. # if we need to do anything here...
  479. if (armature_object := data.objects.get(armature_data['name'])) is None:
  480. armature_object = data.objects.new(armature_data['name'], object_data=armature)
  481. armature_object.matrix_world = Matrix(
  482. ( armature_data['matrix'][:4],
  483. armature_data['matrix'][4:8],
  484. armature_data['matrix'][8:12],
  485. armature_data['matrix'][12:16], )
  486. )
  487. prGreen (armature_data['name'])
  488. # have to add it to the view layer to switch modes.
  489. collection = get_default_collection(collection_type="ARMATURE")
  490. collection.objects.link(armature_object)
  491. # we'll do this to ensure it is actually in the scene for the mode switch
  492. context.scene.collection.objects.link(armature_object)
  493. switch_mode('EDIT', objects = [armature_object])
  494. while (children):
  495. child_name = children.pop()
  496. child_data = metarig_data[child_name]
  497. eb = armature.edit_bones.new(name=child_data['name'])
  498. if parent_name := child_data['parent']:
  499. eb.parent = armature.edit_bones[parent_name]
  500. eb.length = child_data['length']
  501. eb.matrix = Matrix(
  502. ( child_data['matrix'][:4],
  503. child_data['matrix'][4:8],
  504. child_data['matrix'][8:12],
  505. child_data['matrix'][12:16], )
  506. )
  507. displacement = eb.matrix.to_3x3().transposed().row[1] * child_data['length']
  508. eb.tail = eb.matrix.decompose()[0] + displacement
  509. children.extendleft (child_data['children'].copy())
  510. switch_mode('OBJECT', objects = [armature_object])
  511. # and now we can remove it from the scene collection, since it is in the Armature collection
  512. context.scene.collection.objects.unlink(armature_object)
  513. # note that this will not correct if the object exists and is wrong.
  514. return armature_object
  515. def import_curve_data_to_object(curve_name, curve_data):
  516. # the curve data will come as a single curve's data
  517. from bpy import data
  518. curve_object = data.objects.new(curve_name, data.curves.new(name=curve_name, type='CURVE'))
  519. curve_object.data.dimensions = '3D'
  520. prGreen (curve_name)
  521. for spline_data in curve_data:
  522. spline = curve_object.data.splines.new(type=spline_data['type'])
  523. points_data = spline_data['points']
  524. points_collection = spline.points
  525. if spline.type == 'BEZIER':
  526. # the points are bez_pts
  527. spline.bezier_points.add(len(points_data)-1)
  528. points_collection = spline.bezier_points
  529. else:
  530. spline.points.add(len(points_data)-1) # it starts with 1 already
  531. for i, point_data in enumerate(points_data):
  532. if spline.type == 'BEZIER':
  533. pt = spline.bezier_points[i]
  534. else:
  535. pt = spline.points[i]
  536. for prop in dir(pt):
  537. if prop == 'w':
  538. continue
  539. if prop == 'co' and spline.type != 'BEZIER':
  540. value = point_data[prop]
  541. pt.co = (value[0], value[1], value[2], point_data['w'])
  542. continue
  543. if prop in point_data.keys():
  544. setattr(pt, prop, point_data[prop])
  545. for prop in dir(spline):
  546. if prop in spline_data.keys():
  547. if prop in ['points', 'type', 'index']: continue
  548. setattr(spline, prop, spline_data[prop])
  549. collection = get_default_collection(collection_type="CURVE")
  550. collection.objects.link(curve_object)
  551. return curve_object
  552. def get_component_library_items(path='ADD_ARMATURE'):
  553. from os import path as os_path
  554. from .preferences import get_bl_addon_object
  555. bl_mantis_addon = get_bl_addon_object()
  556. return_value=[]
  557. if bl_mantis_addon:
  558. match path:
  559. case 'ADD_ARMATURE':
  560. components_path = bl_mantis_addon.preferences.ComponentsLibraryFolder
  561. case 'AUTOLOAD':
  562. components_path = bl_mantis_addon.preferences.ComponentsAutoLoadFolder
  563. component_names = {}
  564. from os import walk as os_walk
  565. for path_root, dirs, files, in os_walk(components_path):
  566. for file in files:
  567. relative_file_name = os_path.join(os_path.sep.join(dirs), file)
  568. if file.endswith('.rig'):
  569. component_names[relative_file_name[:-4]] = relative_file_name
  570. if component_names.keys():
  571. for i, (name, path) in enumerate(component_names.items()):
  572. return_value.append( (path, name, path, 'NODE_TREE', i) )
  573. return return_value
  574. ##############################
  575. # READ TREE and also Schema Solve!
  576. ##############################
  577. # TODO: refactor the following two functions, they should be one function with arguments.
  578. def init_connections(nc):
  579. c, hc = [], []
  580. for i in nc.outputs.values():
  581. for l in i.links:
  582. # if l.from_node != nc:
  583. # continue
  584. if l.is_hierarchy:
  585. hc.append(l.to_node)
  586. c.append(l.to_node)
  587. nc.hierarchy_connections = hc
  588. nc.connections = c
  589. def init_dependencies(nc):
  590. c, hc = [], []
  591. for i in nc.inputs.values():
  592. for l in i.links:
  593. # if l.to_node != nc:
  594. # continue
  595. if l.is_hierarchy:
  596. hc.append(l.from_node)
  597. c.append(l.from_node)
  598. nc.hierarchy_dependencies = hc
  599. nc.dependencies = c
  600. def schema_dependency_handle_item(schema, all_nc, item,):
  601. hierarchy = True
  602. from .base_definitions import from_name_filter, to_name_filter
  603. if item.in_out == 'INPUT':
  604. dependencies = schema.dependencies
  605. hierarchy_dependencies = schema.hierarchy_dependencies
  606. parent_name = read_schema_type(item)
  607. if parent_name == 'Array':
  608. for schema_idname in ['SchemaArrayInput', 'SchemaArrayInputGet', 'SchemaArrayInputAll']:
  609. if (nc := all_nc.get( (*schema.signature, schema_idname) )):
  610. for to_link in nc.outputs[item.name].links:
  611. if to_link.to_socket in to_name_filter:
  612. # hierarchy_reason='a'
  613. hierarchy = False
  614. for from_link in schema.inputs[item.identifier].links:
  615. if from_link.from_socket in from_name_filter:
  616. hierarchy = False
  617. # hierarchy_reason='b'
  618. if from_link.from_node not in dependencies:
  619. if hierarchy:
  620. hierarchy_dependencies.append(from_link.from_node)
  621. dependencies.append(from_link.from_node)
  622. if parent_name == 'Constant':
  623. if nc := all_nc.get((*schema.signature, 'SchemaConstInput')):
  624. for to_link in nc.outputs[item.name].links:
  625. if to_link.to_socket in to_name_filter:
  626. # hierarchy_reason='dependencies'
  627. hierarchy = False
  628. for from_link in schema.inputs[item.identifier].links:
  629. if from_link.from_socket in from_name_filter:
  630. # hierarchy_reason='d'
  631. hierarchy = False
  632. if from_link.from_node not in dependencies:
  633. if hierarchy:
  634. hierarchy_dependencies.append(from_link.from_node)
  635. dependencies.append(from_link.from_node)
  636. if parent_name == 'Connection':
  637. if nc := all_nc.get((*schema.signature, 'SchemaIncomingConnection')):
  638. for to_link in nc.outputs[item.name].links:
  639. if to_link.to_socket in to_name_filter:
  640. # hierarchy_reason='e'
  641. hierarchy = False
  642. for from_link in schema.inputs[item.identifier].links:
  643. if from_link.from_socket in from_name_filter:
  644. # hierarchy_reason='f'
  645. hierarchy = False
  646. if from_link.from_node not in dependencies:
  647. if hierarchy:
  648. hierarchy_dependencies.append(from_link.from_node)
  649. dependencies.append(from_link.from_node)
  650. def init_schema_dependencies(schema, all_nc):
  651. """ Initialize the dependencies for Schema, and mark them as hierarchy or non-hierarchy dependencies
  652. Non-hierarchy dependencies are e.g. drivers and custom transforms.
  653. """
  654. tree = schema.prototype.node_tree
  655. if tree is None:
  656. raise RuntimeError(f"Cannot get dependencies for schema {schema}")
  657. schema.dependencies = []
  658. schema.hierarchy_dependencies = []
  659. for l in schema.inputs["Schema Length"].links:
  660. schema.hierarchy_dependencies.append(l.from_node)
  661. if tree.interface:
  662. for item in tree.interface.items_tree:
  663. if item.item_type == 'PANEL':
  664. continue
  665. schema_dependency_handle_item(schema, all_nc, item,)
  666. def check_and_add_root(n, roots, include_non_hierarchy=False):
  667. if (include_non_hierarchy * len(n.dependencies)) > 0:
  668. return
  669. elif len(n.hierarchy_dependencies) > 0:
  670. return
  671. roots.append(n)
  672. def get_link_in_out(link):
  673. from .base_definitions import replace_types
  674. from_name, to_name = link.from_socket.node.name, link.to_socket.node.name
  675. # catch special bl_idnames and bunch the connections up
  676. if link.from_socket.node.bl_idname in replace_types:
  677. from_name = link.from_socket.node.bl_idname
  678. if link.to_socket.node.bl_idname in replace_types:
  679. to_name = link.to_socket.node.bl_idname
  680. return from_name, to_name
  681. def link_node_containers(tree_path_names, link, local_nc, from_suffix='', to_suffix=''):
  682. dummy_types = ["DUMMY", "DUMMY_SCHEMA"]
  683. from_name, to_name = get_link_in_out(link)
  684. nc_from = local_nc.get( (*tree_path_names, from_name+from_suffix) )
  685. nc_to = local_nc.get( (*tree_path_names, to_name+to_suffix))
  686. if (nc_from and nc_to):
  687. from_s, to_s = link.from_socket.name, link.to_socket.name
  688. if nc_to.node_type in dummy_types: to_s = link.to_socket.identifier
  689. if nc_from.node_type in dummy_types: from_s = link.from_socket.identifier
  690. try:
  691. connection = nc_from.outputs[from_s].connect(node=nc_to, socket=to_s, sort_id=link.multi_input_sort_id)
  692. if connection is None:
  693. prWhite(f"Already connected: {from_name}:{from_s}->{to_name}:{to_s}")
  694. return connection
  695. except KeyError as e:
  696. prRed(f"{nc_from}:{from_s} or {nc_to}:{to_s} missing; review the connections printed below:")
  697. print (nc_from.outputs.keys())
  698. print (nc_to.inputs.keys())
  699. raise e
  700. else:
  701. prRed(nc_from, nc_to, (*tree_path_names, from_name+from_suffix), (*tree_path_names, to_name+to_suffix))
  702. raise RuntimeError(wrapRed(f"Link not connected: {nc_from} -> {nc_to} in tree" ))
  703. def get_all_dependencies(nc):
  704. from .base_definitions import GraphError
  705. """ find all dependencies for a mantis node"""
  706. nodes = []
  707. check_nodes = [nc]
  708. nodes_checked = set()
  709. while (len(check_nodes) > 0):
  710. node = check_nodes.pop()
  711. nodes_checked.add (node)
  712. connected_nodes = node.hierarchy_dependencies
  713. for new_node in connected_nodes:
  714. if new_node in nodes:
  715. continue
  716. nodes.append(new_node)
  717. if new_node not in nodes_checked:
  718. check_nodes.append(new_node)
  719. return nodes
  720. def get_all_nodes_of_type(base_tree, bl_idname):
  721. nodes = []
  722. check_nodes = list(base_tree.nodes)
  723. while (len(check_nodes) > 0):
  724. node = check_nodes.pop()
  725. if node.bl_idname in bl_idname:
  726. nodes.append(node)
  727. if hasattr(node, "node_tree"):
  728. check_nodes.extend(list(node.node_tree.nodes))
  729. return nodes
  730. def trace_all_nodes_from_root(root, nodes):
  731. from .base_definitions import GraphError
  732. """ find all dependencies for a mantis node"""
  733. nodes.add(root); check_nodes = [root]
  734. nodes_checked = set()
  735. while (len(check_nodes) > 0):
  736. node = check_nodes.pop(); nodes_checked.add (node)
  737. connected_nodes = []
  738. for output in node.outputs:
  739. for l in output.links:
  740. if l.to_node not in nodes:
  741. connected_nodes.append(l.to_node)
  742. for new_node in connected_nodes:
  743. nodes.add(new_node)
  744. if new_node not in nodes_checked:
  745. check_nodes.append(new_node)
  746. return nodes
  747. ##################################################################################################
  748. # misc
  749. ##################################################################################################
  750. # TODO: get the matrix to return a mathutils.Matrix so I don't need a function call here
  751. def to_mathutils_value(socket):
  752. if hasattr(socket, "default_value"):
  753. val = socket.default_value
  754. if socket.bl_idname in ['MatrixSocket']:
  755. return socket.TellValue()
  756. else:
  757. return val
  758. else:
  759. return None
  760. def all_trees_in_tree(base_tree, selected=False):
  761. """ Recursively finds all trees referenced in a given base-tree."""
  762. # note that this is recursive but not by tail-end recursion
  763. # a while-loop is a better way to do recursion in Python.
  764. trees = [base_tree]
  765. can_descend = True
  766. check_trees = [base_tree]
  767. while (len(check_trees) > 0): # this seems innefficient, why 2 loops?
  768. new_trees = []
  769. while (len(check_trees) > 0):
  770. tree = check_trees.pop()
  771. for node in tree.nodes:
  772. if selected == True and node.select == False:
  773. continue
  774. if new_tree := getattr(node, "node_tree", None):
  775. if new_tree in trees: continue
  776. new_trees.append(new_tree)
  777. trees.append(new_tree)
  778. check_trees = new_trees
  779. return trees
  780. # this is a destructive operation, not a pure function or whatever. That isn't good but I don't care.
  781. def SugiyamaGraph(tree, iterations):
  782. from grandalf.graphs import Vertex, Edge, Graph, graph_core
  783. class defaultview(object):
  784. w,h = 1,1
  785. xz = (0,0)
  786. graph = Graph()
  787. no_links = set()
  788. verts = {}
  789. for n in tree.nodes:
  790. if n.select == True:
  791. v = Vertex(n.name)
  792. v.view = defaultview()
  793. v.view.xy = n.location
  794. v.view.h = n.height*2.5
  795. v.view.w = n.width*2.2
  796. verts[n.name] = v
  797. no_links.add(n.name)
  798. graph.add_vertex(v)
  799. n.select=False
  800. edges = []
  801. inverted_edges=[]
  802. not_a_root = set()
  803. for link in tree.links:
  804. if (link.from_node.name not in verts.keys()) or (link.to_node.name not in verts.keys()):
  805. continue # problem??
  806. weight = 1 # maybe this is useful
  807. not_a_root.add(link.to_node.name) # if it has a edge-input it is not a root.
  808. e = Edge(verts[link.from_node.name], verts[link.to_node.name], weight)
  809. graph.add_edge(e)
  810. edges.append(e )
  811. if link.is_valid == False:
  812. inverted_edges.append(e)
  813. if link.from_node.name in no_links:
  814. no_links.remove(link.from_node.name)
  815. if link.to_node.name in no_links:
  816. no_links.remove(link.to_node.name)
  817. try:
  818. from grandalf.layouts import SugiyamaLayout
  819. # .C[0] is the first "graph core" that contains a connected graph.
  820. sug = SugiyamaLayout(graph.C[0])
  821. sug.init_all()
  822. sug.draw(iterations)
  823. # Digco is good for small graphs.
  824. # from grandalf.layouts import DigcoLayout
  825. # dco = DigcoLayout(graph.C[0])
  826. # dco.init_all()
  827. # dco.draw(iterations)
  828. except KeyboardInterrupt:
  829. pass # just use what it has calculated so far, I guess
  830. for v in graph.C[0].sV:
  831. for n in tree.nodes:
  832. if n.name == v.data:
  833. n.location.x = v.view.xy[1]
  834. n.location.y = v.view.xy[0]
  835. n.select = True
  836. # now we can take all the input nodes and try to put them in a sensible place
  837. # not sure why but this absolutely does not do anything
  838. for n_name in no_links:
  839. n = tree.nodes.get(n_name)
  840. next_node = None
  841. for output in n.outputs:
  842. if output.is_linked == True:
  843. next_node = output.links[0].to_node
  844. break
  845. # let's see if the next node
  846. if next_node:
  847. # need to find the other node in the same layer...
  848. other_node = None
  849. for s_input in next_node.inputs:
  850. if s_input.is_linked:
  851. other_node = s_input.links[0].from_node
  852. if other_node is n:
  853. continue
  854. else:
  855. break
  856. if other_node:
  857. n.location = other_node.location
  858. n.location.y -= other_node.height*2
  859. else: # we'll just position it next to the next node
  860. n.location = next_node.location
  861. n.location.x -= next_node.width*1.5
  862. def project_point_to_plane(point, origin, normal):
  863. return point - normal.dot(point- origin)*normal
  864. ##################################################################################################
  865. # stuff I should probably refactor!!
  866. ##################################################################################################
  867. # This is really, really stupid way to do this
  868. def gen_nc_input_for_data(socket):
  869. # Class List #TODO deduplicate
  870. from . import xForm_nodes, link_nodes, misc_nodes, primitives_nodes, deformer_nodes, math_nodes, schema_nodes
  871. from .internal_containers import NoOpNode
  872. classes = {}
  873. for module in [xForm_nodes, link_nodes, misc_nodes, primitives_nodes, deformer_nodes, math_nodes, schema_nodes]:
  874. for cls in module.TellClasses():
  875. classes[cls.__name__] = cls
  876. #
  877. socket_class_map = {
  878. "MatrixSocket" : classes["InputMatrix"],
  879. "xFormSocket" : None,
  880. "RelationshipSocket" : NoOpNode,
  881. "DeformerSocket" : NoOpNode,
  882. "GeometrySocket" : classes["InputExistingGeometryData"],
  883. "EnableSocket" : classes["InputBoolean"],
  884. "HideSocket" : classes["InputBoolean"],
  885. #
  886. "DriverSocket" : None,
  887. "DriverVariableSocket" : None,
  888. "FCurveSocket" : None,
  889. "KeyframeSocket" : None,
  890. "BoneCollectionSocket" : classes["InputString"],
  891. #
  892. "xFormParameterSocket" : None,
  893. "ParameterBoolSocket" : classes["InputBoolean"],
  894. "ParameterIntSocket" : classes["InputFloat"], #TODO: make an Int node for this
  895. "ParameterFloatSocket" : classes["InputFloat"],
  896. "ParameterVectorSocket" : classes["InputVector"],
  897. "ParameterStringSocket" : classes["InputString"],
  898. #
  899. "TransformSpaceSocket" : classes["InputTransformSpace"],
  900. "BooleanSocket" : classes["InputBoolean"],
  901. "BooleanThreeTupleSocket" : classes["InputBooleanThreeTuple"],
  902. "RotationOrderSocket" : classes["InputRotationOrder"],
  903. "QuaternionSocket" : None,
  904. "QuaternionSocketAA" : None,
  905. "UnsignedIntSocket" : classes["InputFloat"],
  906. "IntSocket" : classes["InputFloat"],
  907. "StringSocket" : classes["InputString"],
  908. #
  909. "BoolUpdateParentNode" : classes["InputBoolean"],
  910. "IKChainLengthSocket" : classes["InputFloat"],
  911. "EnumInheritScale" : classes["InputString"],
  912. "EnumRotationMix" : classes["InputString"],
  913. "EnumRotationMixCopyTransforms" : classes["InputString"],
  914. "EnumMaintainVolumeStretchTo" : classes["InputString"],
  915. "EnumRotationStretchTo" : classes["InputString"],
  916. "EnumTrackAxis" : classes["InputString"],
  917. "EnumUpAxis" : classes["InputString"],
  918. "EnumLockAxis" : classes["InputString"],
  919. "EnumLimitMode" : classes["InputString"],
  920. "EnumYScaleMode" : classes["InputString"],
  921. "EnumXZScaleMode" : classes["InputString"],
  922. "EnumCurveSocket" : classes["InputString"],
  923. "EnumMetaRigSocket" : classes["InputString"],
  924. # Deformers
  925. "EnumSkinning" : classes["InputString"],
  926. #
  927. "FloatSocket" : classes["InputFloat"],
  928. "FloatFactorSocket" : classes["InputFloat"],
  929. "FloatPositiveSocket" : classes["InputFloat"],
  930. "FloatAngleSocket" : classes["InputFloat"],
  931. "VectorSocket" : classes["InputVector"],
  932. "VectorEulerSocket" : classes["InputVector"],
  933. "VectorTranslationSocket" : classes["InputVector"],
  934. "VectorScaleSocket" : classes["InputVector"],
  935. # Drivers
  936. "EnumDriverVariableType" : classes["InputString"],
  937. "EnumDriverVariableEvaluationSpace" : classes["InputString"],
  938. "EnumDriverRotationMode" : classes["InputString"],
  939. "EnumDriverType" : classes["InputString"],
  940. "EnumKeyframeInterpTypeSocket" : classes["InputString"],
  941. "EnumKeyframeBezierHandleTypeSocket" : classes["InputString"],
  942. # Math
  943. "MathFloatOperation" : classes["InputString"],
  944. "MathVectorOperation" : classes["InputString"],
  945. "MatrixTransformOperation" : classes["InputString"],
  946. # Schema
  947. "WildcardSocket" : None,
  948. }
  949. return socket_class_map.get(socket.bl_idname, None)
  950. ####################################
  951. # CURVE STUFF
  952. ####################################
  953. def make_perpendicular(v1, v2):
  954. from .base_definitions import FLOAT_EPSILON
  955. if (v1.length_squared < FLOAT_EPSILON) or (v2.length_squared < FLOAT_EPSILON):
  956. raise RuntimeError("Cannot generate perpendicular vetor for zero-length vector")
  957. projected = (v2.dot(v1) / v1.dot(v1)) * v1
  958. perpendicular = v2 - projected
  959. return perpendicular
  960. # this stuff could be branchless but I don't use it much TODO
  961. def cap(val, maxValue):
  962. if (val > maxValue):
  963. return maxValue
  964. return val
  965. def capMin(val, minValue):
  966. if (val < minValue):
  967. return minValue
  968. return val
  969. def wrap(min : float, max : float, value: float) -> float:
  970. range = max-min; remainder = value % range
  971. if remainder > max: return min + remainder-max
  972. else: return remainder
  973. def lerpVal(a, b, fac = 0.5):
  974. return a + ( (b-a) * fac)
  975. #wtf this doesn't do anything even remotely similar to wrap
  976. # HACK BAD FIXME UNBREAK ME BAD
  977. # I don't understand what this function does but I am using it in multiple places?
  978. def old_bad_wrap_that_should_be_refactored(val, maxValue, minValue = None):
  979. if (val > maxValue):
  980. return (-1 * ((maxValue - val) + 1))
  981. if ((minValue) and (val < minValue)):
  982. return (val + maxValue)
  983. return val
  984. #TODO clean this up
  985. def extract_spline_suffix(spline_index):
  986. return ".spline."+str(spline_index).zfill(3)+".extracted"
  987. def do_extract_spline(data, spline):
  988. remove_me = []
  989. for other_spline in data.splines:
  990. if other_spline != spline: remove_me.append(other_spline)
  991. while remove_me: data.splines.remove(remove_me.pop())
  992. def extract_spline(curve, spline_index):
  993. """ Given a curve object and spline index, returns a new object
  994. containing only the selcted spline. The new object is bound to
  995. the original curve.
  996. """
  997. if len(curve.data.splines) == 1:
  998. return curve # nothing to do here.
  999. spline_suffix = extract_spline_suffix(spline_index)
  1000. from bpy import data
  1001. if (new_ob := data.objects.get(curve.name+spline_suffix)) is None:
  1002. new_ob=curve.copy(); new_ob.name=curve.name+spline_suffix
  1003. # if the data exists, it is probably stale, so delete it and start over.
  1004. if (old_data := data.objects.get(curve.data.name+spline_suffix)) is not None:
  1005. data.curves.remove(old_data)
  1006. new_data=curve.data.copy(); new_data.name=curve.data.name+spline_suffix
  1007. new_ob.data = new_data
  1008. # do not check for index error here, it is the calling function's responsibility
  1009. do_extract_spline(new_data, new_data.splines[spline_index])
  1010. return new_ob
  1011. def bind_extracted_spline_to_curve(new_ob, curve):
  1012. # Set up a relationship between the new object and the old object
  1013. # now, weirdly enough - we can't use parenting very easily because Blender
  1014. # defines the parent on a curve relative to the evaluated path animation
  1015. # Setting the inverse matrix is too much work. Use Copy Transforms instead.
  1016. from .xForm_nodes import reset_object_data
  1017. reset_object_data(new_ob)
  1018. c = new_ob.constraints.new("COPY_TRANSFORMS"); c.target=curve
  1019. new_ob.parent=curve
  1020. return new_ob
  1021. def get_extracted_spline_object(proto_curve, spline_index, mContext):
  1022. # we're storing it separately like this to ensure all nodes use the same
  1023. # object if they extract the same spline for use by Mantis.
  1024. # this should be transparent to the user since it is working around a
  1025. # a limitation in Blender.
  1026. extracted_spline_name = proto_curve.name+extract_spline_suffix(spline_index)
  1027. if curve := mContext.b_objects.get(extracted_spline_name):
  1028. return curve
  1029. else:
  1030. curve = extract_spline(proto_curve, spline_index)
  1031. if curve.name != proto_curve.name: # if there is only one spline, no
  1032. bind_extracted_spline_to_curve(curve, proto_curve)# dupe is created.
  1033. mContext.b_objects[extracted_spline_name] = curve
  1034. return curve
  1035. def nurbs_copy_bez_spline(curve, bez_spline, do_setup=True):
  1036. other_spline= curve.data.splines.new('NURBS')
  1037. other_spline.use_endpoint_u=True
  1038. other_spline.use_bezier_u=True
  1039. bez_pts = bez_spline.bezier_points
  1040. bez_data=[]
  1041. for i, bez_pt in enumerate(bez_pts):
  1042. if i > 0:
  1043. bez_data.append(bez_pt.handle_left.copy())
  1044. bez_data.append(bez_pt.co.copy())
  1045. if i != len(bez_pts)-1:
  1046. bez_data.append(bez_pt.handle_right.copy())
  1047. other_spline.points.add(len(bez_data)-1)
  1048. for i, pt in enumerate(bez_data):
  1049. other_spline.points[i].co=(*pt,1.0) # add the W value here
  1050. if do_setup: # do the stuff that makes it behave the same as a bez spline
  1051. other_spline.use_endpoint_u = True; other_spline.use_bezier_u = True
  1052. other_spline.order_u=4 # set to 1 for poly
  1053. return other_spline
  1054. def RibbonMeshEdgeLengths(m, ribbon):
  1055. tE = ribbon[0]; bE = ribbon[1]; c = ribbon[2]
  1056. lengths = []
  1057. for i in range( len( tE ) ): #tE and bE are same length
  1058. if (c == True):
  1059. v1NextInd = tE[old_bad_wrap_that_should_be_refactored((i+1), len(tE) - 1)]
  1060. else:
  1061. v1NextInd = tE[cap((i+1) , len(tE) - 1 )]
  1062. v1 = m.vertices[tE[i]]; v1Next = m.vertices[v1NextInd]
  1063. if (c == True):
  1064. v2NextInd = bE[old_bad_wrap_that_should_be_refactored((i+1), len(bE) - 1)]
  1065. else:
  1066. v2NextInd = bE[cap((i+1) , len(bE) - 1 )]
  1067. v2 = m.vertices[bE[i]]; v2Next = m.vertices[v2NextInd]
  1068. v = v1.co.lerp(v2.co, 0.5); vNext = v1Next.co.lerp(v2Next.co, 0.5)
  1069. # get the center, edges may not be straight so total length
  1070. # of one edge may be more than the ribbon center's length
  1071. lengths.append(( v - vNext ).length)
  1072. return lengths
  1073. def EnsureCurveIsRibbon(crv, defaultRadius = 0.1):
  1074. from .base_definitions import FLOAT_EPSILON
  1075. crvRadius = 0
  1076. crv.data.offset = 0
  1077. if (crv.data.bevel_depth < FLOAT_EPSILON):
  1078. crvRadius = crv.data.extrude
  1079. else: #Set ribbon from bevel depth
  1080. crvRadius = crv.data.bevel_depth
  1081. crv.data.bevel_depth = 0
  1082. crv.data.extrude = crvRadius
  1083. if (crvRadius < FLOAT_EPSILON):
  1084. crv.data.extrude = defaultRadius
  1085. def SetRibbonData(m, ribbon):
  1086. #maybe this could be incorporated into the DetectWireEdges function?
  1087. #maybe I can check for closed poly curves here? under what other circumstance
  1088. # will I find the ends of the wire have identical coordinates?
  1089. ribbonData = []
  1090. tE = ribbon[0].copy(); bE = ribbon[1].copy()# circle = ribbon[2]
  1091. #
  1092. lengths = RibbonMeshEdgeLengths(m, ribbon)
  1093. lengths.append(0)
  1094. totalLength = sum(lengths)
  1095. # m.calc_normals() #calculate normals
  1096. # it appears this has been removed.
  1097. for i, (t, b) in enumerate(zip(tE, bE)):
  1098. ind = old_bad_wrap_that_should_be_refactored( (i + 1), len(tE) - 1 )
  1099. tNext = tE[ind]; bNext = bE[ind]
  1100. ribbonData.append( ( (t,b), (tNext, bNext), lengths[i] ) )
  1101. #if this is a circle, the last v in vertData has a length, otherwise 0
  1102. return ribbonData, totalLength
  1103. def WireMeshEdgeLengths(m, wire):
  1104. circle = False
  1105. vIndex = wire.copy()
  1106. for e in m.edges:
  1107. if ((e.vertices[0] == vIndex[-1]) and (e.vertices[1] == vIndex[0])):
  1108. #this checks for an edge between the first and last vertex in the wire
  1109. circle = True
  1110. break
  1111. lengths = []
  1112. for i in range(len(vIndex)):
  1113. v = m.vertices[vIndex[i]]
  1114. if (circle == True):
  1115. vNextInd = vIndex[old_bad_wrap_that_should_be_refactored((i+1), len(vIndex) - 1)]
  1116. else:
  1117. vNextInd = vIndex[cap((i+1), len(vIndex) - 1 )]
  1118. vNext = m.vertices[vNextInd]
  1119. lengths.append(( v.co - vNext.co ).length)
  1120. #if this is a circular wire mesh, this should wrap instead of cap
  1121. return lengths
  1122. def GetDataFromWire(m, wire):
  1123. vertData = []
  1124. vIndex = wire.copy()
  1125. lengths = WireMeshEdgeLengths(m, wire)
  1126. lengths.append(0)
  1127. totalLength = sum(lengths)
  1128. for i, vInd in enumerate(vIndex):
  1129. #-1 to avoid IndexError
  1130. vNext = vIndex[ (old_bad_wrap_that_should_be_refactored(i+1, len(vIndex) - 1)) ]
  1131. vertData.append((vInd, vNext, lengths[i]))
  1132. #if this is a circle, the last v in vertData has a length, otherwise 0
  1133. return vertData, totalLength
  1134. def DetectWireEdges(mesh):
  1135. # Returns a list of vertex indices belonging to wire meshes
  1136. # NOTE: this assumes a mesh object with only wire meshes
  1137. ret = []
  1138. import bmesh
  1139. bm = bmesh.new()
  1140. try:
  1141. bm.from_mesh(mesh)
  1142. ends = []
  1143. for v in bm.verts:
  1144. if (len(v.link_edges) == 1):
  1145. ends.append(v.index)
  1146. for e in bm.edges:
  1147. assert (e.is_wire == True),"This function can only run on wire meshes"
  1148. if (e.verts[1].index - e.verts[0].index != 1):
  1149. ends.append(e.verts[1].index)
  1150. ends.append(e.verts[0].index)
  1151. for i in range(len(ends)//2): # // is floor division
  1152. beg = ends[i*2]
  1153. end = ends[(i*2)+1]
  1154. indices = [(j + beg) for j in range ((end - beg) + 1)]
  1155. ret.append(indices)
  1156. finally:
  1157. bm.free()
  1158. return ret
  1159. def FindNearestPointOnWireMesh(m, pointsList):
  1160. from mathutils import Vector
  1161. from mathutils.geometry import intersect_point_line
  1162. from math import sqrt
  1163. wires = DetectWireEdges(m)
  1164. ret = []
  1165. # prevFactor = None
  1166. for wire, points in zip(wires, pointsList):
  1167. vertData, total_length = GetDataFromWire(m, wire)
  1168. factorsOut = []
  1169. for p in points:
  1170. prevDist = float('inf')
  1171. curDist = float('inf')
  1172. v1 = None
  1173. v2 = None
  1174. for i in range(len(vertData) - 1):
  1175. #but it shouldn't check the last one
  1176. if (p == m.vertices[i].co):
  1177. v1 = vertData[i]
  1178. v2 = vertData[i+1]
  1179. offset = 0
  1180. break
  1181. else:
  1182. curDist = ( ((m.vertices[vertData[i][0]].co - p).length) +
  1183. ((m.vertices[vertData[i][1]].co - p).length) )/2
  1184. if (curDist < prevDist):
  1185. v1 = vertData[i]
  1186. v2 = vertData[i+1]
  1187. prevDist = curDist
  1188. offset = intersect_point_line(p, m.vertices[v1[0]].co,
  1189. m.vertices[v2[0]].co)[1]
  1190. if (offset < 0):
  1191. offset = 0
  1192. elif (offset > 1):
  1193. offset = 1
  1194. # Assume the vertices are in order
  1195. v1Length = 0
  1196. v2Length = v2[2]
  1197. for i in range(v1[0]):
  1198. v1Length += vertData[i][2]
  1199. factor = ((offset * (v2Length)) + v1Length )/total_length
  1200. factor = wrap(0, 1, factor) # doesn't hurt to wrap it if it's over 1 or less than 0
  1201. factorsOut.append(factor)
  1202. ret.append( factorsOut )
  1203. return ret
  1204. def mesh_from_curve(crv, context, ribbon=True):
  1205. """Utility function for converting a mesh to a curve
  1206. which will return the correct mesh even with modifiers"""
  1207. import bpy
  1208. m = None
  1209. bevel = crv.data.bevel_depth
  1210. extrude = crv.data.extrude
  1211. offset = crv.data.offset
  1212. try:
  1213. if (len(crv.modifiers) > 0):
  1214. do_unlink = False
  1215. if (not context.scene.collection.all_objects.get(crv.name)):
  1216. context.collection.objects.link(crv) # i guess this forces the dg to update it?
  1217. do_unlink = True
  1218. dg = context.view_layer.depsgraph
  1219. # just gonna modify it for now lol
  1220. if ribbon:
  1221. EnsureCurveIsRibbon(crv)
  1222. else:
  1223. crv.data.bevel_depth=0
  1224. crv.data.extrude=0
  1225. crv.data.offset=0
  1226. # try:
  1227. dg.update()
  1228. mOb = crv.evaluated_get(dg)
  1229. m = bpy.data.meshes.new_from_object(mOb)
  1230. m.name=crv.data.name+'_mesh'
  1231. if (do_unlink):
  1232. context.collection.objects.unlink(crv)
  1233. else: # (ಥ﹏ಥ) why can't I just use this !
  1234. # for now I will just do it like this
  1235. if ribbon:
  1236. EnsureCurveIsRibbon(crv)
  1237. else:
  1238. crv.data.bevel_depth=0
  1239. crv.data.extrude=0
  1240. crv.data.offset=0
  1241. m = bpy.data.meshes.new_from_object(crv)
  1242. finally:
  1243. crv.data.bevel_depth = bevel
  1244. crv.data.extrude = extrude
  1245. crv.data.offset = offset
  1246. return m
  1247. def DetectRibbon(f, bm, skipMe):
  1248. fFirst = f.index
  1249. cont = True
  1250. circle = False
  1251. tEdge, bEdge = [],[]
  1252. while (cont == True):
  1253. skipMe.add(f.index)
  1254. tEdge.append (f.loops[0].vert.index) # top-left
  1255. bEdge.append (f.loops[3].vert.index) # bottom-left
  1256. nEdge = bm.edges.get([f.loops[1].vert, f.loops[2].vert])
  1257. nFaces = nEdge.link_faces
  1258. if (len(nFaces) == 1):
  1259. cont = False
  1260. else:
  1261. for nFace in nFaces:
  1262. if (nFace != f):
  1263. f = nFace
  1264. break
  1265. if (f.index == fFirst):
  1266. cont = False
  1267. circle = True
  1268. if (cont == False): # we've reached the end, get the last two:
  1269. tEdge.append (f.loops[1].vert.index) # top-right
  1270. bEdge.append (f.loops[2].vert.index) # bottom-right
  1271. # this will create a loop for rings --
  1272. # "the first shall be the last and the last shall be first"
  1273. return (tEdge,bEdge,circle)
  1274. def DetectRibbons(m, fReport = None):
  1275. # Returns list of vertex indices belonging to ribbon mesh edges
  1276. # NOTE: this assumes a mesh object with only ribbon meshes
  1277. # ---DO NOT call this script with a mesh that isn't a ribbon!--- #
  1278. import bmesh
  1279. bm = bmesh.new()
  1280. bm.from_mesh(m)
  1281. mIslands, mIsland = [], []
  1282. skipMe = set()
  1283. bm.faces.ensure_lookup_table()
  1284. #first, get a list of mesh islands
  1285. for f in bm.faces:
  1286. if (f.index in skipMe):
  1287. continue #already done here
  1288. checkMe = [f]
  1289. while (len(checkMe) > 0):
  1290. facesFound = 0
  1291. for f in checkMe:
  1292. if (f.index in skipMe):
  1293. continue #already done here
  1294. mIsland.append(f)
  1295. skipMe.add(f.index)
  1296. for e in f.edges:
  1297. checkMe += e.link_faces
  1298. if (facesFound == 0):
  1299. #this is the last iteration
  1300. mIslands.append(mIsland)
  1301. checkMe, mIsland = [], []
  1302. ribbons = []
  1303. skipMe = set() # to store ends already checked
  1304. for mIsl in mIslands:
  1305. ribbon = None
  1306. first = float('inf')
  1307. for f in mIsl:
  1308. if (f.index in skipMe):
  1309. continue #already done here
  1310. if (f.index < first):
  1311. first = f.index
  1312. adjF = 0
  1313. for e in f.edges:
  1314. adjF+= (len(e.link_faces) - 1)
  1315. # every face other than this one is added to the list
  1316. if (adjF == 1):
  1317. ribbon = (DetectRibbon(f, bm, skipMe) )
  1318. break
  1319. if (ribbon == None):
  1320. ribbon = (DetectRibbon(bm.faces[first], bm, skipMe) )
  1321. ribbons.append(ribbon)
  1322. # print (ribbons)
  1323. return ribbons
  1324. def data_from_ribbon_mesh(m, factorsList, mat, ribbons = None, fReport = None):
  1325. #Note, factors list should be equal in length the the number of wires
  1326. #Now working for multiple wires, ugly tho
  1327. if (ribbons == None):
  1328. ribbons = DetectRibbons(m, fReport=fReport)
  1329. if (ribbons is None):
  1330. if (fReport):
  1331. fReport(type = {'ERROR'}, message="No ribbon to get data from.")
  1332. else:
  1333. print ("No ribbon to get data from.")
  1334. return None
  1335. ret = []
  1336. for factors, ribbon in zip(factorsList, ribbons):
  1337. points = []
  1338. widths = []
  1339. normals = []
  1340. ribbonData, totalLength = SetRibbonData(m, ribbon)
  1341. for fac in factors:
  1342. if (fac == 0):
  1343. data = ribbonData[0]
  1344. curFac = 0
  1345. elif (fac == 1):
  1346. data = ribbonData[-1]
  1347. curFac = 0
  1348. else:
  1349. targetLength = totalLength * fac
  1350. data = ribbonData[0]
  1351. curLength = 0
  1352. for ( (t, b), (tNext, bNext), length,) in ribbonData:
  1353. if (curLength >= targetLength):
  1354. break
  1355. curLength += length
  1356. data = ( (t, b), (tNext, bNext), length,)
  1357. targetLengthAtEdge = (curLength - targetLength)
  1358. if (targetLength == 0):
  1359. curFac = 0
  1360. elif (targetLength == totalLength):
  1361. curFac = 1
  1362. else:
  1363. # NOTE: This can be Zero. Find out why!
  1364. if data[2] == 0:
  1365. curFac=0
  1366. else:
  1367. curFac = 1 - (targetLengthAtEdge/ data[2]) #length
  1368. t1 = m.vertices[data[0][0]]; b1 = m.vertices[data[0][1]]
  1369. t2 = m.vertices[data[1][0]]; b2 = m.vertices[data[1][1]]
  1370. #location
  1371. loc1 = (t1.co).lerp(b1.co, 0.5)
  1372. loc2 = (t2.co).lerp(b2.co, 0.5)
  1373. #width
  1374. w1 = (t1.co - b1.co).length/2
  1375. w2 = (t2.co - b2.co).length/2 #radius, not diameter
  1376. #normal
  1377. n1 = (t1.normal).slerp(b1.normal, 0.5)
  1378. n2 = (t1.normal).slerp(b2.normal, 0.5)
  1379. if ((data[0][0] > data[1][0]) and (ribbon[2] == False)):
  1380. curFac = 0
  1381. #don't interpolate if at the end of a ribbon that isn't circular
  1382. if ( 0 < curFac < 1):
  1383. outPoint = loc1.lerp(loc2, curFac)
  1384. outNorm = n1.lerp(n2, curFac)
  1385. outWidth = w1 + ( (w2-w1) * curFac)
  1386. elif (curFac <= 0):
  1387. outPoint = loc1.copy()
  1388. outNorm = n1
  1389. outWidth = w1
  1390. elif (curFac >= 1):
  1391. outPoint = loc2.copy()
  1392. outNorm = n2
  1393. outWidth = w2
  1394. outPoint = mat @ outPoint
  1395. outNorm.normalize()
  1396. points.append ( outPoint.copy() ) #copy because this is an actual vertex location
  1397. widths.append ( outWidth )
  1398. normals.append( outNorm )
  1399. ret.append( (points, widths, normals) )
  1400. return ret # this is a list of tuples containing three lists
  1401. #This bisection search is generic, and it searches based on the
  1402. # magnitude of the error, rather than the sign.
  1403. # If the sign of the error is meaningful, a simpler function
  1404. # can be used.
  1405. def do_bisect_search_by_magnitude(
  1406. owner,
  1407. attribute,
  1408. index = None,
  1409. test_function = None,
  1410. modify = None,
  1411. max_iterations = 10000,
  1412. threshold = 0.0001,
  1413. thresh2 = 0.0005,
  1414. context = None,
  1415. update_dg = None,
  1416. ):
  1417. from math import floor
  1418. i = 0; best_so_far = 0; best = float('inf')
  1419. min = 0; center = max_iterations//2; max = max_iterations
  1420. # enforce getting the absolute value, in case the function has sign information
  1421. # The sign may be useful in a sign-aware bisect search, but this one is more robust!
  1422. test = lambda : abs(test_function(owner, attribute, index, context = context,))
  1423. while (i <= max_iterations):
  1424. upper = (max - ((max-center))//2)
  1425. modify(owner, attribute, index, upper, context = context); error1 = test()
  1426. lower = (center - ((center-min))//2)
  1427. modify(owner, attribute, index, lower, context = context); error2 = test()
  1428. if (error1 < error2):
  1429. min = center
  1430. center, check = upper, upper
  1431. error = error1
  1432. else:
  1433. max = center
  1434. center, check = lower, lower
  1435. error = error2
  1436. if (error <= threshold) or (min == max-1):
  1437. break
  1438. if (error < thresh2):
  1439. j = min
  1440. while (j < max):
  1441. modify(owner, attribute, index, j * 1/max_iterations, context = context)
  1442. error = test()
  1443. if (error < best):
  1444. best_so_far = j; best = error
  1445. if (error <= threshold):
  1446. break
  1447. j+=1
  1448. else: # loop has completed without finding a solution
  1449. i = best_so_far; error = test()
  1450. modify(owner, attribute, index, best_so_far, context = context)
  1451. break
  1452. if (error < best):
  1453. best_so_far = check; best = error
  1454. i+=1
  1455. if update_dg:
  1456. update_dg.update()
  1457. else: # Loop has completed without finding a solution
  1458. i = best_so_far
  1459. modify(owner, attribute, best_so_far, context = context); i+=1