UTILS.PY 15KB

123456789101112131415161718192021222324252627282930313233343536373839404142434445464748495051525354555657585960616263646566676869707172737475767778798081828384858687888990919293949596979899100101102103104105106107108109110111112113114115116117118119120121122123124125126127128129130131132133134135136137138139140141142143144145146147148149150151152153154155156157158159160161162163164165166167168169170171172173174175176177178179180181182183184185186187188189190191192193194195196197198199200201202203204205206207208209210211212213214215216217218219220221222223224225226227228229230231232233234235236237238239240241242243244245246247248249250251252253254255256257258259260261262263264265266267268269270271272273274275276277278279280281282283284285286287288289290291292293294295296297298299300301302303304305306307308309310311312313314315316317318319320321322323324325326327328329330331332333334335336337338339340341342343344345346347348349350351352353354355356357358359360361362363364365366367368369370371372373374375376377378379380381382383384385386387388389390391392393394395396397398399400401402403404405406407408409410411412413414415416417418419420421422423424425426427428429430431432433434435436437438439440441442443444445446447448449450451452453454455456457458459460461462
  1. # ##### BEGIN GPL LICENSE BLOCK #####
  2. #
  3. # This program is free software; you can redistribute it and/or
  4. # modify it under the terms of the GNU General Public License
  5. # as published by the Free Software Foundation; either version 2
  6. # of the License, or (at your option) any later version.
  7. #
  8. # This program is distributed in the hope that it will be useful,
  9. # but WITHOUT ANY WARRANTY; without even the implied warranty of
  10. # MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the
  11. # GNU General Public License for more details.
  12. #
  13. # You should have received a copy of the GNU General Public License
  14. # along with this program; if not, write to the Free Software Foundation,
  15. # Inc., 51 Franklin Street, Fifth Floor, Boston, MA 02110-1301, USA.
  16. #
  17. # ##### END GPL LICENSE BLOCK #####
  18. import bpy
  19. import threading
  20. import numpy as np
  21. import multiprocessing
  22. from multiprocessing import Process, Pool
  23. from mathutils import Vector
  24. try: from .numba_functions import numba_lerp2
  25. except: pass
  26. weight = []
  27. n_threads = multiprocessing.cpu_count()
  28. class ThreadVertexGroup(threading.Thread):
  29. def __init__ ( self, id, vertex_group, n_verts):
  30. self.id = id
  31. self.vertex_group = vertex_group
  32. self.n_verts = n_verts
  33. threading.Thread.__init__ ( self )
  34. def run (self):
  35. global weight
  36. global n_threads
  37. verts = np.arange(int(self.n_verts/8))*8 + self.id
  38. for v in verts:
  39. try:
  40. weight[v] = self.vertex_group.weight(v)
  41. except:
  42. pass
  43. def thread_read_weight(_weight, vertex_group):
  44. global weight
  45. global n_threads
  46. print(n_threads)
  47. weight = _weight
  48. n_verts = len(weight)
  49. threads = [ThreadVertexGroup(i, vertex_group, n_verts) for i in range(n_threads)]
  50. for t in threads: t.start()
  51. for t in threads: t.join()
  52. return weight
  53. def process_read_weight(id, vertex_group, n_verts):
  54. global weight
  55. global n_threads
  56. verts = np.arange(int(self.n_verts/8))*8 + self.id
  57. for v in verts:
  58. try:
  59. weight[v] = self.vertex_group.weight(v)
  60. except:
  61. pass
  62. def read_weight(_weight, vertex_group):
  63. global weight
  64. global n_threads
  65. print(n_threads)
  66. weight = _weight
  67. n_verts = len(weight)
  68. n_cores = multiprocessing.cpu_count()
  69. pool = Pool(processes=n_cores)
  70. multiple_results = [pool.apply_async(process_read_weight, (i, vertex_group, n_verts)) for i in range(n_cores)]
  71. #processes = [Process(target=process_read_weight, args=(i, vertex_group, n_verts)) for i in range(n_threads)]
  72. #for t in processes: t.start()
  73. #for t in processes: t.join()
  74. return weight
  75. #Recursivly transverse layer_collection for a particular name
  76. def recurLayerCollection(layerColl, collName):
  77. found = None
  78. if (layerColl.name == collName):
  79. return layerColl
  80. for layer in layerColl.children:
  81. found = recurLayerCollection(layer, collName)
  82. if found:
  83. return found
  84. def auto_layer_collection():
  85. # automatically change active layer collection
  86. layer = bpy.context.view_layer.active_layer_collection
  87. layer_collection = bpy.context.view_layer.layer_collection
  88. if layer.hide_viewport or layer.collection.hide_viewport:
  89. collections = bpy.context.object.users_collection
  90. for c in collections:
  91. lc = recurLayerCollection(layer_collection, c.name)
  92. if not c.hide_viewport and not lc.hide_viewport:
  93. bpy.context.view_layer.active_layer_collection = lc
  94. def lerp(a, b, t):
  95. return a + (b - a) * t
  96. def _lerp2(v1, v2, v3, v4, v):
  97. v12 = v1.lerp(v2,v.x) # + (v2 - v1) * v.x
  98. v34 = v3.lerp(v4,v.x) # + (v4 - v3) * v.x
  99. return v12.lerp(v34, v.y)# + (v34 - v12) * v.y
  100. def lerp2(v1, v2, v3, v4, v):
  101. v12 = v1 + (v2 - v1) * v.x
  102. v34 = v3 + (v4 - v3) * v.x
  103. return v12 + (v34 - v12) * v.y
  104. def lerp3(v1, v2, v3, v4, v):
  105. loc = lerp2(v1.co, v2.co, v3.co, v4.co, v)
  106. nor = lerp2(v1.normal, v2.normal, v3.normal, v4.normal, v)
  107. nor.normalize()
  108. return loc + nor * v.z
  109. def np_lerp2(v00, v10, v01, v11, vx, vy):
  110. #try:
  111. # co2 = numba_lerp2(v00, v10, v01, v11, vx, vy)
  112. #except:
  113. co0 = v00 + (v10 - v00) * vx
  114. co1 = v01 + (v11 - v01) * vx
  115. co2 = co0 + (co1 - co0) * vy
  116. return co2
  117. # Prevent Blender Crashes with handlers
  118. def set_animatable_fix_handler(self, context):
  119. old_handlers = []
  120. blender_handlers = bpy.app.handlers.render_init
  121. for h in blender_handlers:
  122. if "turn_off_animatable" in str(h):
  123. old_handlers.append(h)
  124. for h in old_handlers: blender_handlers.remove(h)
  125. ################ blender_handlers.append(turn_off_animatable)
  126. return
  127. def turn_off_animatable(scene):
  128. for o in bpy.data.objects:
  129. o.tissue_tessellate.bool_run = False
  130. o.reaction_diffusion_settings.run = False
  131. #except: pass
  132. return
  133. ### OBJECTS ###
  134. def convert_object_to_mesh(ob, apply_modifiers=True, preserve_status=True):
  135. try: ob.name
  136. except: return None
  137. if ob.type != 'MESH':
  138. if not apply_modifiers:
  139. mod_visibility = [m.show_viewport for m in ob.modifiers]
  140. for m in ob.modifiers: m.show_viewport = False
  141. #ob.modifiers.update()
  142. #dg = bpy.context.evaluated_depsgraph_get()
  143. #ob_eval = ob.evaluated_get(dg)
  144. #me = bpy.data.meshes.new_from_object(ob_eval, preserve_all_data_layers=True, depsgraph=dg)
  145. me = simple_to_mesh(ob)
  146. new_ob = bpy.data.objects.new(ob.data.name, me)
  147. new_ob.location, new_ob.matrix_world = ob.location, ob.matrix_world
  148. if not apply_modifiers:
  149. for m,vis in zip(ob.modifiers,mod_visibility): m.show_viewport = vis
  150. else:
  151. if apply_modifiers:
  152. new_ob = ob.copy()
  153. new_me = simple_to_mesh(ob)
  154. new_ob.modifiers.clear()
  155. new_ob.data = new_me
  156. else:
  157. new_ob = ob.copy()
  158. new_ob.data = ob.data.copy()
  159. new_ob.modifiers.clear()
  160. bpy.context.collection.objects.link(new_ob)
  161. if preserve_status:
  162. new_ob.select_set(False)
  163. else:
  164. for o in bpy.context.view_layer.objects: o.select_set(False)
  165. new_ob.select_set(True)
  166. bpy.context.view_layer.objects.active = new_ob
  167. return new_ob
  168. def simple_to_mesh(ob):
  169. dg = bpy.context.evaluated_depsgraph_get()
  170. ob_eval = ob.evaluated_get(dg)
  171. me = bpy.data.meshes.new_from_object(ob_eval, preserve_all_data_layers=True, depsgraph=dg)
  172. me.calc_normals()
  173. return me
  174. def join_objects(objects, link_to_scene=True, make_active=False):
  175. C = bpy.context
  176. bm = bmesh.new()
  177. materials = {}
  178. faces_materials = []
  179. dg = C.evaluated_depsgraph_get()
  180. for o in objects:
  181. bm.from_object(o, dg)
  182. # add object's material to the dictionary
  183. for m in o.data.materials:
  184. if m not in materials: materials[m] = len(materials)
  185. for f in o.data.polygons:
  186. index = f.material_index
  187. mat = o.material_slots[index].material
  188. new_index = materials[mat]
  189. faces_materials.append(new_index)
  190. bm.verts.ensure_lookup_table()
  191. bm.edges.ensure_lookup_table()
  192. bm.faces.ensure_lookup_table()
  193. # assign new indexes
  194. for index, f in zip(faces_materials, bm.faces): f.material_index = index
  195. # create object
  196. me = bpy.data.meshes.new('joined')
  197. bm.to_mesh(me)
  198. me.update()
  199. ob = bpy.data.objects.new('joined', me)
  200. if link_to_scene: C.collection.objects.link(ob)
  201. # make active
  202. if make_active:
  203. for o in C.view_layer.objects: o.select_set(False)
  204. ob.select_set(True)
  205. C.view_layer.objects.active = ob
  206. # add materials
  207. for m in materials.keys(): ob.data.materials.append(m)
  208. return ob
  209. ### MESH FUNCTIONS
  210. def get_vertices_numpy(mesh):
  211. n_verts = len(mesh.vertices)
  212. verts = [0]*n_verts*3
  213. mesh.vertices.foreach_get('co', verts)
  214. verts = np.array(verts).reshape((n_verts,3))
  215. return verts
  216. def get_vertices_and_normals_numpy(mesh):
  217. n_verts = len(mesh.vertices)
  218. verts = [0]*n_verts*3
  219. normals = [0]*n_verts*3
  220. mesh.vertices.foreach_get('co', verts)
  221. mesh.vertices.foreach_get('normal', normals)
  222. verts = np.array(verts).reshape((n_verts,3))
  223. normals = np.array(normals).reshape((n_verts,3))
  224. return verts, normals
  225. def get_edges_numpy(mesh):
  226. n_edges = len(mesh.edges)
  227. edges = [0]*n_edges*2
  228. mesh.edges.foreach_get('vertices', edges)
  229. edges = np.array(edges).reshape((n_edges,2)).astype('int')
  230. return edges
  231. def get_edges_id_numpy(mesh):
  232. n_edges = len(mesh.edges)
  233. edges = [0]*n_edges*2
  234. mesh.edges.foreach_get('vertices', edges)
  235. edges = np.array(edges).reshape((n_edges,2))
  236. indexes = np.arange(n_edges).reshape((n_edges,1))
  237. edges = np.concatenate((edges,indexes), axis=1)
  238. return edges
  239. def get_vertices(mesh):
  240. n_verts = len(mesh.vertices)
  241. verts = [0]*n_verts*3
  242. mesh.vertices.foreach_get('co', verts)
  243. verts = np.array(verts).reshape((n_verts,3))
  244. verts = [Vector(v) for v in verts]
  245. return verts
  246. def get_faces(mesh):
  247. faces = [[v for v in f.vertices] for f in mesh.polygons]
  248. return faces
  249. def get_faces_numpy(mesh):
  250. faces = [[v for v in f.vertices] for f in mesh.polygons]
  251. return np.array(faces)
  252. def get_faces_edges_numpy(mesh):
  253. faces = [v.edge_keys for f in mesh.polygons]
  254. return np.array(faces)
  255. #try:
  256. #from numba import jit, njit
  257. #from numba.typed import List
  258. '''
  259. @jit
  260. def find_curves(edges, n_verts):
  261. #verts_dict = {key:[] for key in range(n_verts)}
  262. verts_dict = {}
  263. for key in range(n_verts): verts_dict[key] = []
  264. for e in edges:
  265. verts_dict[e[0]].append(e[1])
  266. verts_dict[e[1]].append(e[0])
  267. curves = []#List()
  268. loop1 = True
  269. while loop1:
  270. if len(verts_dict) == 0:
  271. loop1 = False
  272. continue
  273. # next starting point
  274. v = list(verts_dict.keys())[0]
  275. # neighbors
  276. v01 = verts_dict[v]
  277. if len(v01) == 0:
  278. verts_dict.pop(v)
  279. continue
  280. curve = []#List()
  281. curve.append(v) # add starting point
  282. curve.append(v01[0]) # add neighbors
  283. verts_dict.pop(v)
  284. loop2 = True
  285. while loop2:
  286. last_point = curve[-1]
  287. #if last_point not in verts_dict: break
  288. v01 = verts_dict[last_point]
  289. # curve end
  290. if len(v01) == 1:
  291. verts_dict.pop(last_point)
  292. loop2 = False
  293. continue
  294. if v01[0] == curve[-2]:
  295. curve.append(v01[1])
  296. verts_dict.pop(last_point)
  297. elif v01[1] == curve[-2]:
  298. curve.append(v01[0])
  299. verts_dict.pop(last_point)
  300. else:
  301. loop2 = False
  302. continue
  303. if curve[0] == curve[-1]:
  304. loop2 = False
  305. continue
  306. curves.append(curve)
  307. return curves
  308. '''
  309. def find_curves(edges, n_verts):
  310. verts_dict = {key:[] for key in range(n_verts)}
  311. for e in edges:
  312. verts_dict[e[0]].append(e[1])
  313. verts_dict[e[1]].append(e[0])
  314. curves = []
  315. while True:
  316. if len(verts_dict) == 0: break
  317. # next starting point
  318. v = list(verts_dict.keys())[0]
  319. # neighbors
  320. v01 = verts_dict[v]
  321. if len(v01) == 0:
  322. verts_dict.pop(v)
  323. continue
  324. curve = []
  325. if len(v01) > 1: curve.append(v01[1]) # add neighbors
  326. curve.append(v) # add starting point
  327. curve.append(v01[0]) # add neighbors
  328. verts_dict.pop(v)
  329. # start building curve
  330. while True:
  331. #last_point = curve[-1]
  332. #if last_point not in verts_dict: break
  333. # try to change direction if needed
  334. if curve[-1] in verts_dict: pass
  335. elif curve[0] in verts_dict: curve.reverse()
  336. else: break
  337. # neighbors points
  338. last_point = curve[-1]
  339. v01 = verts_dict[last_point]
  340. # curve end
  341. if len(v01) == 1:
  342. verts_dict.pop(last_point)
  343. if curve[0] in verts_dict: continue
  344. else: break
  345. # chose next point
  346. new_point = None
  347. if v01[0] == curve[-2]: new_point = v01[1]
  348. elif v01[1] == curve[-2]: new_point = v01[0]
  349. #else: break
  350. #if new_point != curve[1]:
  351. curve.append(new_point)
  352. verts_dict.pop(last_point)
  353. if curve[0] == curve[-1]:
  354. verts_dict.pop(new_point)
  355. break
  356. curves.append(curve)
  357. return curves
  358. def curve_from_points(points, name='Curve'):
  359. curve = bpy.data.curves.new(name,'CURVE')
  360. for c in points:
  361. s = curve.splines.new('POLY')
  362. s.points.add(len(c))
  363. for i,p in enumerate(c): s.points[i].co = p.xyz + [1]
  364. ob_curve = bpy.data.objects.new(name,curve)
  365. return ob_curve
  366. def curve_from_pydata(points, indexes, name='Curve', skip_open=False, merge_distance=1, set_active=True):
  367. curve = bpy.data.curves.new(name,'CURVE')
  368. curve.dimensions = '3D'
  369. for c in indexes:
  370. # cleanup
  371. pts = np.array([points[i] for i in c])
  372. if merge_distance > 0:
  373. pts1 = np.roll(pts,1,axis=0)
  374. dist = np.linalg.norm(pts1-pts, axis=1)
  375. count = 0
  376. n = len(dist)
  377. mask = np.ones(n).astype('bool')
  378. for i in range(n):
  379. count += dist[i]
  380. if count > merge_distance: count = 0
  381. else: mask[i] = False
  382. pts = pts[mask]
  383. bool_cyclic = c[0] == c[-1]
  384. if skip_open and not bool_cyclic: continue
  385. s = curve.splines.new('POLY')
  386. n_pts = len(pts)
  387. s.points.add(n_pts-1)
  388. w = np.ones(n_pts).reshape((n_pts,1))
  389. co = np.concatenate((pts,w),axis=1).reshape((n_pts*4))
  390. s.points.foreach_set('co',co)
  391. s.use_cyclic_u = bool_cyclic
  392. ob_curve = bpy.data.objects.new(name,curve)
  393. bpy.context.collection.objects.link(ob_curve)
  394. if set_active:
  395. bpy.context.view_layer.objects.active = ob_curve
  396. return ob_curve
  397. def curve_from_vertices(indexes, verts, name='Curve'):
  398. curve = bpy.data.curves.new(name,'CURVE')
  399. for c in indexes:
  400. s = curve.splines.new('POLY')
  401. s.points.add(len(c))
  402. for i,p in enumerate(c): s.points[i].co = verts[p].co.xyz + [1]
  403. ob_curve = bpy.data.objects.new(name,curve)
  404. return ob_curve
  405. ### WEIGHT FUNCTIONS ###
  406. def get_weight(vertex_group, n_verts):
  407. weight = [0]*n_verts
  408. for i in range(n_verts):
  409. try: weight[i] = vertex_group.weight(i)
  410. except: pass
  411. return weight
  412. def get_weight_numpy(vertex_group, n_verts):
  413. weight = [0]*n_verts
  414. for i in range(n_verts):
  415. try: weight[i] = vertex_group.weight(i)
  416. except: pass
  417. return np.array(weight)