236 lines
8.9 KiB
Python
236 lines
8.9 KiB
Python
import struct
|
|
import os
|
|
import logging
|
|
import asset_reader
|
|
import cStringIO
|
|
import maya.api.OpenMaya as OpenMaya
|
|
import maya.cmds as mc
|
|
import maya.mel as mm
|
|
import xml.etree.ElementTree as ET
|
|
|
|
from zipfile import ZipFile
|
|
|
|
logger = logging.getLogger(__name__)
|
|
logger.setLevel(logging.DEBUG)
|
|
|
|
|
|
class LXFLoader(object):
|
|
def __init__(self):
|
|
self.material_index = {}
|
|
|
|
def get_material(self):
|
|
self.material_index = {}
|
|
|
|
material_data = asset_reader.asset_extract(material_only=True)
|
|
if not material_data:
|
|
logger.warning('Can\'t extract material info')
|
|
return False
|
|
|
|
root = ET.fromstring(material_data)
|
|
|
|
gMainProgressBar = mm.eval('$tmp = $gMainProgressBar')
|
|
|
|
mc.progressBar(gMainProgressBar,
|
|
edit=True,
|
|
beginProgress=True,
|
|
isInterruptable=True,
|
|
status='Reading LDD materials ...',
|
|
maxValue=len(root))
|
|
|
|
logger.info('Get material data ...')
|
|
|
|
for materail in root:
|
|
if mc.progressBar(gMainProgressBar, query=True, isCancelled=True):
|
|
mc.progressBar(gMainProgressBar, edit=True, endProgress=True)
|
|
return False
|
|
|
|
mat_id = materail.get('MatID')
|
|
mat_r = materail.get('Red')
|
|
mat_g = materail.get('Green')
|
|
mat_b = materail.get('Blue')
|
|
mat_a = materail.get('Alpha')
|
|
|
|
if mat_id not in self.material_index.keys():
|
|
self.material_index[mat_id] = {'r': int(mat_r),
|
|
'g': int(mat_g),
|
|
'b': int(mat_b),
|
|
'a': int(mat_a)}
|
|
|
|
mc.progressBar(gMainProgressBar, edit=True, step=1)
|
|
|
|
mc.progressBar(gMainProgressBar, edit=True, endProgress=True)
|
|
|
|
return True
|
|
|
|
@staticmethod
|
|
def get_lxfml_name(zipf):
|
|
name_list = zipf.namelist()
|
|
for name in name_list:
|
|
if os.path.splitext(name)[-1] in ['.LXFML']:
|
|
return name
|
|
return ''
|
|
|
|
@staticmethod
|
|
def get_geometry(design_id, part_data_map, output=None, debug=True):
|
|
|
|
def convert_to_obj(v_list, n_list, f_ids, output_file):
|
|
with open(output_file, 'w') as f:
|
|
f.write("# OBJ file\n")
|
|
for vid in range(0, len(v_list), 3):
|
|
f.write('v %s %s %s\n' % (v_list[vid], v_list[vid + 1], v_list[vid + 2]))
|
|
f.write('\n')
|
|
|
|
for nid in range(0, len(n_list), 3):
|
|
f.write('vn %s %s %s\n' % (n_list[nid], n_list[nid + 1], n_list[nid + 2]))
|
|
|
|
f.write('\n')
|
|
for fid in range(0, len(f_ids), 3):
|
|
f.write('f %s %s %s\n' % (f_ids[fid] + 1, f_ids[fid + 1] + 1, f_ids[fid + 2] + 1))
|
|
|
|
if design_id not in part_data_map.keys():
|
|
return False, [], [], []
|
|
|
|
vertex = []
|
|
normal = []
|
|
index = []
|
|
|
|
g_buffer = part_data_map[design_id]
|
|
|
|
f = cStringIO.StringIO(g_buffer)
|
|
|
|
# Read header part
|
|
struct.unpack('<i', f.read(4))
|
|
vertex_count = struct.unpack('<i', f.read(4))[0]
|
|
index_count = struct.unpack('<i', f.read(4))[0]
|
|
struct.unpack('<i', f.read(4))
|
|
if debug:
|
|
logger.debug('-' * 60)
|
|
logger.debug('\tRead Part %s : vertex count : %s' % (design_id, vertex_count))
|
|
logger.debug('\tRead Part %s : index count : %s' % (design_id, index_count))
|
|
logger.debug('-' * 60)
|
|
|
|
# Read content part
|
|
for i in range(vertex_count):
|
|
vertex.append(struct.unpack('<f', f.read(4))[0])
|
|
vertex.append(struct.unpack('<f', f.read(4))[0])
|
|
vertex.append(struct.unpack('<f', f.read(4))[0])
|
|
|
|
for i in range(vertex_count):
|
|
normal.append(struct.unpack('<f', f.read(4))[0])
|
|
normal.append(struct.unpack('<f', f.read(4))[0])
|
|
normal.append(struct.unpack('<f', f.read(4))[0])
|
|
|
|
for i in range(index_count):
|
|
index.append(struct.unpack('<i', f.read(4))[0])
|
|
|
|
f.close()
|
|
|
|
if output:
|
|
convert_to_obj(vertex, normal, index, output)
|
|
return True, vertex, normal, index
|
|
|
|
return True, vertex, normal, index
|
|
|
|
def read(self, file_path_name):
|
|
has_material = self.get_material()
|
|
|
|
part_cache = {}
|
|
zipf = ZipFile(file_path_name)
|
|
lxfml = self.get_lxfml_name(zipf)
|
|
lxfml_content = zipf.open(lxfml).read()
|
|
root = ET.fromstring(lxfml_content)
|
|
part_data_list = []
|
|
design_ids = []
|
|
|
|
for element in root:
|
|
if element.tag == 'Bricks':
|
|
for brick in element:
|
|
for part in brick:
|
|
design_id = part.get('designID')
|
|
mat_id = part.get('materials')
|
|
|
|
for bone in part:
|
|
m = [float(x) for x in bone.get('transformation').split(',')]
|
|
part_data = dict(designID=design_id, materials=mat_id, transformation=m)
|
|
part_data_list.append(part_data)
|
|
if design_id not in design_ids:
|
|
design_ids.append(design_id)
|
|
|
|
part_data_map = asset_reader.asset_extract(design_ids=design_ids)
|
|
|
|
if not part_data_map:
|
|
return
|
|
|
|
gMainProgressBar = mm.eval('$tmp = $gMainProgressBar')
|
|
|
|
mc.progressBar(gMainProgressBar,
|
|
edit=True,
|
|
beginProgress=True,
|
|
isInterruptable=True,
|
|
status='Importing LDD models ...',
|
|
maxValue=len(part_data_list))
|
|
|
|
for part_data in part_data_list:
|
|
|
|
if mc.progressBar(gMainProgressBar, query=True, isCancelled=True):
|
|
logger.warning('Import LDD models interrupt...')
|
|
break
|
|
|
|
design_id = part_data['designID']
|
|
mat_id = part_data['materials']
|
|
m = part_data['transformation']
|
|
|
|
if design_id not in part_cache.keys():
|
|
logger.debug(
|
|
'[%s/%s] Get new part %s' % (part_data_list.index(part_data), len(part_data_list), design_id))
|
|
status, v_list, n_list, f_ids = self.get_geometry(design_id, part_data_map)
|
|
part_cache[design_id] = [v_list, f_ids]
|
|
else:
|
|
logger.debug(
|
|
'[%s/%s] Get cached part of %s' % (part_data_list.index(part_data), len(part_data_list), design_id))
|
|
v_list = part_cache[design_id][0]
|
|
f_ids = part_cache[design_id][1]
|
|
|
|
vertices = [OpenMaya.MPoint(v_list[x], v_list[x + 1], v_list[x + 2]) for x in
|
|
range(0, len(v_list), 3)]
|
|
polygon_counts = [3 for i in range(0, len(f_ids), 3)]
|
|
fn_mesh = OpenMaya.MFnMesh()
|
|
fn_mesh.create(vertices, polygon_counts, f_ids)
|
|
mesh_parent = fn_mesh.parent(0)
|
|
mesh_transform = OpenMaya.MFnTransform(mesh_parent)
|
|
mesh_transform.setName('lego_part_%s_mesh' % design_id)
|
|
m_matrix = OpenMaya.MMatrix([m[0], m[1], m[2], 0,
|
|
m[3], m[4], m[5], 0,
|
|
m[6], m[7], m[8], 0,
|
|
m[9], m[10], m[11], 1])
|
|
transform_matrix = OpenMaya.MTransformationMatrix(m_matrix)
|
|
mesh_transform.setTransformation(transform_matrix)
|
|
|
|
force_element = 'initialShadingGroup'
|
|
|
|
if has_material:
|
|
if mat_id in self.material_index.keys():
|
|
shd_name = 'shd_%s' % mat_id
|
|
sg_name = '%sSG' % shd_name
|
|
if not mc.objExists(shd_name):
|
|
color_data = self.material_index[mat_id]
|
|
r = float(color_data.get('r')) / 255.0
|
|
g = float(color_data.get('g')) / 255.0
|
|
b = float(color_data.get('b')) / 255.0
|
|
a = float(color_data.get('a')) / 255.0
|
|
transparency = 1.0 - a
|
|
mc.shadingNode('blinn', asShader=True, name=shd_name)
|
|
mc.setAttr('%s.color' % shd_name, r, g, b, type='double3')
|
|
mc.setAttr('%s.transparency' % shd_name,
|
|
transparency, transparency, transparency,
|
|
type='double3')
|
|
mc.sets(renderable=True, noSurfaceShader=True, empty=True, name=sg_name)
|
|
mc.connectAttr('%s.outColor' % shd_name, '%s.surfaceShader' % sg_name, f=True)
|
|
|
|
force_element = sg_name
|
|
|
|
mc.sets(mesh_transform.fullPathName(), e=True, forceElement=force_element)
|
|
mc.progressBar(gMainProgressBar, edit=True, step=1)
|
|
|
|
mc.progressBar(gMainProgressBar, edit=True, endProgress=True)
|