2016-12-30 11:36:05 +00:00
|
|
|
#include "..\Header\MshFile.h"
|
2017-01-15 11:26:15 +00:00
|
|
|
#include "..\Header\tga.h"
|
2017-01-29 10:35:43 +00:00
|
|
|
#include "..\Header\OutputDevice.h"
|
2017-02-05 14:25:59 +00:00
|
|
|
#include <QVector3D>
|
2016-12-30 11:36:05 +00:00
|
|
|
|
2017-01-30 15:00:14 +00:00
|
|
|
|
2016-12-30 11:36:05 +00:00
|
|
|
// helper function to save data from file to any variable type
|
|
|
|
#define F2V(variableName) reinterpret_cast<char*>(&variableName)
|
|
|
|
|
|
|
|
|
|
|
|
/////////////////////////////////////////////////////////////////////////
|
|
|
|
// public constructor/destructor
|
|
|
|
|
2017-01-29 10:35:43 +00:00
|
|
|
MshFile::MshFile(QString path)
|
|
|
|
: FileInterface(path)
|
2016-12-30 11:36:05 +00:00
|
|
|
{
|
|
|
|
import();
|
|
|
|
}
|
|
|
|
|
|
|
|
MshFile::~MshFile()
|
|
|
|
{
|
|
|
|
}
|
|
|
|
|
|
|
|
|
|
|
|
/////////////////////////////////////////////////////////////////////////
|
|
|
|
// private functions
|
|
|
|
|
|
|
|
void MshFile::import()
|
|
|
|
{
|
|
|
|
// go to file size information
|
2017-02-02 13:44:48 +00:00
|
|
|
m_file.seek(4);
|
2016-12-30 11:36:05 +00:00
|
|
|
|
2017-02-02 13:44:48 +00:00
|
|
|
quint32 tmp_fileSize;
|
|
|
|
QList<ChunkHeader*> tmp_mainChunks;
|
2016-12-30 11:36:05 +00:00
|
|
|
|
|
|
|
// get all chunks under HEDR
|
|
|
|
m_file.read(F2V(tmp_fileSize), sizeof(tmp_fileSize));
|
2017-02-02 13:44:48 +00:00
|
|
|
loadChunks(tmp_mainChunks, m_file.pos(), tmp_fileSize);
|
2016-12-30 11:36:05 +00:00
|
|
|
|
|
|
|
// evaulate HEDR subchunks (= find MSH2)
|
|
|
|
for (ChunkHeader* it : tmp_mainChunks)
|
|
|
|
{
|
2017-02-02 13:44:48 +00:00
|
|
|
if ("MSH2" == it->name)
|
2016-12-30 11:36:05 +00:00
|
|
|
{
|
|
|
|
// get all subchunks
|
2017-02-02 13:44:48 +00:00
|
|
|
QList<ChunkHeader*> tmp_msh2Chunks;
|
2016-12-30 11:36:05 +00:00
|
|
|
loadChunks(tmp_msh2Chunks, it->position, it->size);
|
|
|
|
|
|
|
|
// evaluate MSH2 subchunks
|
|
|
|
analyseMsh2Chunks(tmp_msh2Chunks);
|
|
|
|
|
|
|
|
// clean up
|
|
|
|
while (!tmp_msh2Chunks.empty())
|
|
|
|
{
|
|
|
|
ChunkHeader* curs = tmp_msh2Chunks.front();
|
|
|
|
tmp_msh2Chunks.pop_front();
|
|
|
|
delete curs;
|
|
|
|
}
|
|
|
|
}
|
|
|
|
}
|
|
|
|
|
|
|
|
// clean up
|
|
|
|
while (!tmp_mainChunks.empty())
|
|
|
|
{
|
|
|
|
ChunkHeader* cur = tmp_mainChunks.front();
|
|
|
|
tmp_mainChunks.pop_front();
|
|
|
|
delete cur;
|
|
|
|
}
|
|
|
|
}
|
|
|
|
|
2017-02-02 13:44:48 +00:00
|
|
|
void MshFile::loadChunks(QList<ChunkHeader*>& destination, qint64 start, const quint32 length)
|
2016-12-30 11:36:05 +00:00
|
|
|
{
|
|
|
|
// jump to first chunk
|
2017-02-02 13:44:48 +00:00
|
|
|
m_file.seek(start);
|
2016-12-30 11:36:05 +00:00
|
|
|
|
|
|
|
do
|
|
|
|
{
|
2017-02-02 14:26:40 +00:00
|
|
|
// out of file. Maybe a size information is corrupted
|
|
|
|
if (m_file.atEnd() || m_file.error() != QFileDevice::NoError)
|
|
|
|
{
|
|
|
|
OutputDevice::getInstance()->print("WARNING: corrupted file. Trying to continue..", 1);
|
|
|
|
m_file.unsetError();
|
|
|
|
m_file.seek(0);
|
|
|
|
break;
|
|
|
|
}
|
|
|
|
|
2016-12-30 11:36:05 +00:00
|
|
|
ChunkHeader* tmp_header = new ChunkHeader();
|
|
|
|
|
|
|
|
// get information
|
2017-02-03 14:54:45 +00:00
|
|
|
char tmpName[5] = { 0 };
|
|
|
|
m_file.read(F2V(tmpName[0]), sizeof(tmpName) -1);
|
|
|
|
tmp_header->name = QString(tmpName);
|
2016-12-30 11:36:05 +00:00
|
|
|
m_file.read(F2V(tmp_header->size), sizeof(tmp_header->size));
|
2017-02-02 13:44:48 +00:00
|
|
|
tmp_header->position = m_file.pos();
|
2016-12-30 11:36:05 +00:00
|
|
|
|
|
|
|
// store information
|
|
|
|
destination.push_back(tmp_header);
|
|
|
|
|
|
|
|
// jump to next header
|
2017-02-02 14:10:48 +00:00
|
|
|
m_file.seek(tmp_header->size + m_file.pos());
|
2016-12-30 11:36:05 +00:00
|
|
|
|
2017-02-02 13:44:48 +00:00
|
|
|
} while (m_file.pos() - start != length);
|
2016-12-30 11:36:05 +00:00
|
|
|
}
|
|
|
|
|
2017-02-02 13:44:48 +00:00
|
|
|
void MshFile::analyseMsh2Chunks(QList<ChunkHeader*>& chunkList)
|
2016-12-30 11:36:05 +00:00
|
|
|
{
|
|
|
|
for (auto& it : chunkList)
|
|
|
|
{
|
|
|
|
// scene information
|
2017-02-02 13:44:48 +00:00
|
|
|
if ("SINF" == it->name)
|
2016-12-30 11:36:05 +00:00
|
|
|
{
|
|
|
|
// get SINF subchunks
|
2017-02-02 13:44:48 +00:00
|
|
|
QList<ChunkHeader*> tmp_sinfChunks;
|
2016-12-30 11:36:05 +00:00
|
|
|
loadChunks(tmp_sinfChunks, it->position, it->size);
|
|
|
|
|
|
|
|
// evaluate SINF subchunks
|
|
|
|
for (auto& it : tmp_sinfChunks)
|
|
|
|
{
|
2017-02-02 13:44:48 +00:00
|
|
|
if ("BBOX" == it->name)
|
2016-12-30 11:36:05 +00:00
|
|
|
{
|
2017-02-02 13:44:48 +00:00
|
|
|
m_file.seek(it->position);
|
2016-12-30 11:36:05 +00:00
|
|
|
|
|
|
|
// read in the quaternion
|
|
|
|
float tmp_quat[4];
|
|
|
|
for (int i = 0; i < 4; i++)
|
|
|
|
m_file.read(F2V(tmp_quat[i]), sizeof(float));
|
|
|
|
|
|
|
|
m_sceneBbox.rotation.setX(tmp_quat[0]);
|
|
|
|
m_sceneBbox.rotation.setY(tmp_quat[1]);
|
|
|
|
m_sceneBbox.rotation.setZ(tmp_quat[2]);
|
|
|
|
m_sceneBbox.rotation.setScalar(tmp_quat[3]);
|
|
|
|
|
|
|
|
//read in the center
|
|
|
|
for (int i = 0; i < 3; i++)
|
|
|
|
m_file.read(F2V(m_sceneBbox.center[i]), sizeof(float));
|
|
|
|
|
|
|
|
//read in the extents
|
|
|
|
for (int i = 0; i < 3; i++)
|
|
|
|
m_file.read(F2V(m_sceneBbox.extents[i]), sizeof(float));
|
|
|
|
}
|
|
|
|
}
|
|
|
|
|
|
|
|
// clean up SINF subchunks
|
|
|
|
for (ChunkHeader* it : tmp_sinfChunks)
|
|
|
|
delete it;
|
|
|
|
}
|
|
|
|
|
|
|
|
// material list
|
2017-02-02 13:44:48 +00:00
|
|
|
else if ("MATL" == it->name)
|
2016-12-30 11:36:05 +00:00
|
|
|
{
|
2017-02-02 13:44:48 +00:00
|
|
|
OutputDevice::getInstance()->print("loading materials..", 0);
|
2016-12-30 11:36:05 +00:00
|
|
|
// "useless" information how many MATD follow, jump over it
|
2017-02-02 13:44:48 +00:00
|
|
|
m_file.seek(it->position);
|
|
|
|
m_file.seek(sizeof(quint32) + m_file.pos());
|
2016-12-30 11:36:05 +00:00
|
|
|
|
|
|
|
// get all MATL subchunk
|
2017-02-02 13:44:48 +00:00
|
|
|
QList<ChunkHeader*> tmp_matlChunks;
|
|
|
|
loadChunks(tmp_matlChunks, m_file.pos(), it->size - 4);
|
2016-12-30 11:36:05 +00:00
|
|
|
|
|
|
|
// evaluate MATL subchunks
|
|
|
|
for (auto& it : tmp_matlChunks)
|
|
|
|
{
|
|
|
|
// This shouldn't be anything else than MATD
|
2017-02-02 13:44:48 +00:00
|
|
|
if ("MATD" == it->name)
|
2016-12-30 11:36:05 +00:00
|
|
|
{
|
|
|
|
// get all subchunks from MATD
|
2017-02-02 13:44:48 +00:00
|
|
|
QList<ChunkHeader*> tmp_matdChunks;
|
2016-12-30 11:36:05 +00:00
|
|
|
loadChunks(tmp_matdChunks, it->position, it->size);
|
|
|
|
|
2017-01-15 11:26:15 +00:00
|
|
|
m_materials->push_back(Material());
|
2016-12-30 11:36:05 +00:00
|
|
|
|
|
|
|
// analyse MATD subchunks
|
|
|
|
analyseMatdChunks(tmp_matdChunks);
|
|
|
|
|
|
|
|
// clean up MATD subchunks
|
|
|
|
while (!tmp_matdChunks.empty())
|
|
|
|
{
|
|
|
|
ChunkHeader* cur = tmp_matdChunks.front();
|
|
|
|
tmp_matdChunks.pop_front();
|
|
|
|
delete cur;
|
|
|
|
}
|
|
|
|
}
|
|
|
|
}
|
|
|
|
|
|
|
|
// clean up MATL subchunks
|
|
|
|
while (!tmp_matlChunks.empty())
|
|
|
|
{
|
|
|
|
ChunkHeader* cur = tmp_matlChunks.front();
|
|
|
|
tmp_matlChunks.pop_front();
|
|
|
|
delete cur;
|
|
|
|
}
|
|
|
|
}
|
|
|
|
|
|
|
|
// model
|
2017-02-02 13:44:48 +00:00
|
|
|
else if ("MODL" == it->name)
|
2016-12-30 11:36:05 +00:00
|
|
|
{
|
2017-02-02 13:44:48 +00:00
|
|
|
OutputDevice::getInstance()->print("loading model..", 0);
|
2016-12-30 11:36:05 +00:00
|
|
|
Model* new_model = new Model;
|
2017-01-02 14:07:39 +00:00
|
|
|
m_currentType = ModelTyp::null;
|
|
|
|
m_currentRenderFlag = -1;
|
2016-12-30 11:36:05 +00:00
|
|
|
|
|
|
|
// get all MODL subchunks
|
2017-02-02 13:44:48 +00:00
|
|
|
QList<ChunkHeader*> tmp_chunks;
|
2016-12-30 11:36:05 +00:00
|
|
|
loadChunks(tmp_chunks, it->position, it->size);
|
|
|
|
|
|
|
|
// evaluate MODL subchunks
|
|
|
|
analyseModlChunks(new_model, tmp_chunks);
|
|
|
|
|
|
|
|
//clean up MODL subchunks
|
|
|
|
while (!tmp_chunks.empty())
|
|
|
|
{
|
|
|
|
ChunkHeader* cur = tmp_chunks.front();
|
|
|
|
tmp_chunks.pop_front();
|
|
|
|
delete cur;
|
|
|
|
}
|
|
|
|
|
|
|
|
// save Model data
|
|
|
|
m_models->push_back(new_model);
|
|
|
|
}
|
|
|
|
}
|
|
|
|
}
|
|
|
|
|
2017-02-02 13:44:48 +00:00
|
|
|
void MshFile::analyseMatdChunks(QList<ChunkHeader*>& chunkList)
|
2016-12-30 11:36:05 +00:00
|
|
|
{
|
|
|
|
for (auto& it : chunkList)
|
|
|
|
{
|
2017-01-20 15:26:58 +00:00
|
|
|
// name
|
2017-02-02 13:44:48 +00:00
|
|
|
if ("NAME" == it->name)
|
2017-01-20 15:26:58 +00:00
|
|
|
{
|
2017-02-02 13:44:48 +00:00
|
|
|
m_file.seek(it->position);
|
2017-01-20 10:18:48 +00:00
|
|
|
char* buffer = new char[it->size + 1];
|
|
|
|
*buffer = { 0 };
|
|
|
|
m_file.read(buffer, it->size);
|
|
|
|
m_materials->back().name = buffer;
|
|
|
|
delete[] buffer;
|
2017-01-20 15:26:58 +00:00
|
|
|
}
|
|
|
|
|
|
|
|
// data
|
2017-02-02 13:44:48 +00:00
|
|
|
else if("DATA" == it->name)
|
2017-01-20 15:26:58 +00:00
|
|
|
{
|
2017-02-02 13:44:48 +00:00
|
|
|
m_file.seek(it->position);
|
2017-01-20 15:26:58 +00:00
|
|
|
|
|
|
|
// diffuse
|
|
|
|
for (unsigned int i = 0; i < 4; i++)
|
|
|
|
m_file.read(F2V(m_materials->back().diffuseColor[i]), sizeof(float));
|
|
|
|
|
|
|
|
// specular
|
|
|
|
for (unsigned int i = 0; i < 4; i++)
|
|
|
|
m_file.read(F2V(m_materials->back().specularColor[i]), sizeof(float));
|
|
|
|
|
|
|
|
// ambient
|
|
|
|
for (unsigned int i = 0; i < 4; i++)
|
|
|
|
m_file.read(F2V(m_materials->back().ambientColor[i]), sizeof(float));
|
|
|
|
|
|
|
|
// shininess
|
|
|
|
m_file.read(F2V(m_materials->back().shininess), sizeof(float));
|
|
|
|
}
|
|
|
|
|
2017-01-15 14:51:12 +00:00
|
|
|
// attributes
|
2017-02-02 13:44:48 +00:00
|
|
|
else if ("ATRB" == it->name)
|
2017-01-15 14:51:12 +00:00
|
|
|
{
|
2017-01-22 14:41:08 +00:00
|
|
|
// get pointer to current material
|
|
|
|
Material* curMat = &m_materials->back();
|
|
|
|
|
2017-01-15 14:51:12 +00:00
|
|
|
// read the attributes
|
2017-02-02 13:44:48 +00:00
|
|
|
m_file.seek(it->position);
|
|
|
|
quint8 flag;
|
2017-01-15 14:51:12 +00:00
|
|
|
m_file.read(F2V(flag), sizeof(flag));
|
2017-02-02 13:44:48 +00:00
|
|
|
m_file.read(F2V(curMat->rendertype), sizeof(quint8));
|
|
|
|
m_file.read(F2V(curMat->dataValues[0]), sizeof(quint8));
|
|
|
|
m_file.read(F2V(curMat->dataValues[1]), sizeof(quint8));
|
2017-01-15 14:51:12 +00:00
|
|
|
|
2017-01-20 15:26:58 +00:00
|
|
|
// flags
|
|
|
|
// 0: emissive
|
|
|
|
// 1: glow
|
|
|
|
// 2: single-sided transparency
|
|
|
|
// 3: double-sided transparency
|
|
|
|
// 4: hard-edged transparency
|
|
|
|
// 5: per-pixel lighting
|
|
|
|
// 6: additive transparency
|
|
|
|
// 7: specular
|
|
|
|
|
|
|
|
for (unsigned int i = 0; i < 8; i++)
|
2017-02-02 13:44:48 +00:00
|
|
|
curMat->flags[i] = (quint8)(flag << (7 - i)) >> 7;
|
2017-01-20 15:26:58 +00:00
|
|
|
|
2017-01-22 14:41:08 +00:00
|
|
|
curMat->transparent = curMat->flags[2] || curMat->flags[3] || curMat->flags[4] || curMat->flags[6] || curMat->rendertype == 4;
|
2017-01-15 14:51:12 +00:00
|
|
|
}
|
|
|
|
|
2017-01-23 11:17:26 +00:00
|
|
|
// texture 0
|
2017-02-02 13:44:48 +00:00
|
|
|
else if ("TX0D" == it->name)
|
2016-12-30 11:36:05 +00:00
|
|
|
{
|
2017-01-15 11:26:15 +00:00
|
|
|
// get the texture name
|
2017-02-02 13:44:48 +00:00
|
|
|
m_file.seek(it->position);
|
2016-12-30 11:36:05 +00:00
|
|
|
char* buffer = new char[it->size + 1];
|
|
|
|
*buffer = { 0 };
|
|
|
|
m_file.read(buffer, it->size);
|
2017-01-23 11:17:26 +00:00
|
|
|
m_materials->back().tx0d = buffer;
|
2016-12-30 11:36:05 +00:00
|
|
|
delete[] buffer;
|
2017-01-15 11:26:15 +00:00
|
|
|
|
2017-01-20 10:18:48 +00:00
|
|
|
// load the texture if the name is not empty
|
2017-01-23 11:17:26 +00:00
|
|
|
if (!m_materials->back().tx0d.isEmpty())
|
2017-01-23 11:29:10 +00:00
|
|
|
loadTexture(m_materials->back().texture0, m_filepath, m_materials->back().tx0d);
|
2017-01-23 11:17:26 +00:00
|
|
|
}
|
|
|
|
|
|
|
|
// texture 1
|
2017-02-02 13:44:48 +00:00
|
|
|
else if ("TX1D" == it->name)
|
2017-01-23 11:17:26 +00:00
|
|
|
{
|
|
|
|
// get the texture name
|
2017-02-02 13:44:48 +00:00
|
|
|
m_file.seek(it->position);
|
2017-01-23 11:17:26 +00:00
|
|
|
char* buffer = new char[it->size + 1];
|
|
|
|
*buffer = { 0 };
|
|
|
|
m_file.read(buffer, it->size);
|
|
|
|
m_materials->back().tx1d = buffer;
|
|
|
|
delete[] buffer;
|
|
|
|
|
2017-01-23 11:29:10 +00:00
|
|
|
if (!m_materials->back().tx1d.isEmpty())
|
|
|
|
loadTexture(m_materials->back().texture1, m_filepath, m_materials->back().tx1d);
|
2017-01-23 11:17:26 +00:00
|
|
|
}
|
|
|
|
|
|
|
|
// texture 2
|
2017-02-02 13:44:48 +00:00
|
|
|
else if ("TX2D" == it->name)
|
2017-01-23 11:17:26 +00:00
|
|
|
{
|
|
|
|
// get the texture name
|
2017-02-02 13:44:48 +00:00
|
|
|
m_file.seek(it->position);
|
2017-01-23 11:17:26 +00:00
|
|
|
char* buffer = new char[it->size + 1];
|
|
|
|
*buffer = { 0 };
|
|
|
|
m_file.read(buffer, it->size);
|
|
|
|
m_materials->back().tx2d = buffer;
|
|
|
|
delete[] buffer;
|
|
|
|
}
|
|
|
|
|
|
|
|
// texture 3
|
2017-02-02 13:44:48 +00:00
|
|
|
else if ("TX3D" == it->name)
|
2017-01-23 11:17:26 +00:00
|
|
|
{
|
|
|
|
// get the texture name
|
2017-02-02 13:44:48 +00:00
|
|
|
m_file.seek(it->position);
|
2017-01-23 11:17:26 +00:00
|
|
|
char* buffer = new char[it->size + 1];
|
|
|
|
*buffer = { 0 };
|
|
|
|
m_file.read(buffer, it->size);
|
|
|
|
m_materials->back().tx3d = buffer;
|
|
|
|
delete[] buffer;
|
2016-12-30 11:36:05 +00:00
|
|
|
}
|
|
|
|
}
|
|
|
|
}
|
|
|
|
|
2017-02-02 13:44:48 +00:00
|
|
|
void MshFile::analyseModlChunks(Model * dataDestination, QList<ChunkHeader*>& chunkList)
|
2016-12-30 11:36:05 +00:00
|
|
|
{
|
|
|
|
for (auto& it : chunkList)
|
|
|
|
{
|
|
|
|
// model type
|
2017-02-02 13:44:48 +00:00
|
|
|
if ("MTYP" == it->name)
|
2016-12-30 11:36:05 +00:00
|
|
|
{
|
2017-02-02 13:44:48 +00:00
|
|
|
m_file.seek(it->position);
|
|
|
|
quint32 tmp_type;
|
2016-12-30 11:36:05 +00:00
|
|
|
m_file.read(F2V(tmp_type), sizeof(tmp_type));
|
2017-01-02 14:07:39 +00:00
|
|
|
m_currentType = (ModelTyp)tmp_type;
|
2016-12-30 11:36:05 +00:00
|
|
|
}
|
|
|
|
|
|
|
|
// parent name
|
2017-02-02 13:44:48 +00:00
|
|
|
else if ("PRNT" == it->name)
|
2016-12-30 11:36:05 +00:00
|
|
|
{
|
2017-02-02 13:44:48 +00:00
|
|
|
m_file.seek(it->position);
|
2016-12-30 11:36:05 +00:00
|
|
|
char* buffer = new char[it->size + 1];
|
|
|
|
*buffer = { 0 };
|
|
|
|
m_file.read(buffer, it->size);
|
|
|
|
dataDestination->parent = buffer;
|
|
|
|
delete[] buffer;
|
|
|
|
}
|
|
|
|
|
|
|
|
// model name
|
2017-02-02 13:44:48 +00:00
|
|
|
else if ("NAME" == it->name)
|
2016-12-30 11:36:05 +00:00
|
|
|
{
|
2017-02-02 13:44:48 +00:00
|
|
|
m_file.seek(it->position);
|
2016-12-30 11:36:05 +00:00
|
|
|
char* buffer = new char[it->size + 1];
|
|
|
|
*buffer = { 0 };
|
|
|
|
m_file.read(buffer, it->size);
|
|
|
|
dataDestination->name = buffer;
|
|
|
|
delete[] buffer;
|
|
|
|
}
|
|
|
|
|
|
|
|
// render flags
|
2017-02-02 13:44:48 +00:00
|
|
|
else if ("FLGS" == it->name)
|
2016-12-30 11:36:05 +00:00
|
|
|
{
|
2017-02-02 13:44:48 +00:00
|
|
|
m_file.seek(it->position);
|
2017-01-02 14:07:39 +00:00
|
|
|
m_file.read(F2V(m_currentRenderFlag), sizeof(m_currentRenderFlag));
|
2016-12-30 11:36:05 +00:00
|
|
|
}
|
|
|
|
|
|
|
|
// translation
|
2017-02-02 13:44:48 +00:00
|
|
|
else if ("TRAN" == it->name)
|
2016-12-30 11:36:05 +00:00
|
|
|
{
|
|
|
|
float tmp_scale[3];
|
|
|
|
float tmp_rotation[4];
|
|
|
|
float tmp_trans[3];
|
|
|
|
|
2017-02-02 13:44:48 +00:00
|
|
|
m_file.seek(it->position);
|
2016-12-30 11:36:05 +00:00
|
|
|
|
|
|
|
// read in the data
|
|
|
|
for (int i = 0; i < 3; i++)
|
|
|
|
m_file.read(F2V(tmp_scale[i]), sizeof(float));
|
|
|
|
|
|
|
|
for (int i = 0; i < 4; i++)
|
|
|
|
m_file.read(F2V(tmp_rotation[i]), sizeof(float));
|
|
|
|
|
|
|
|
for (int i = 0; i < 3; i++)
|
|
|
|
m_file.read(F2V(tmp_trans[i]), sizeof(float));
|
|
|
|
|
2017-01-03 13:18:46 +00:00
|
|
|
// modify the matrix and quaternion
|
2016-12-30 11:36:05 +00:00
|
|
|
dataDestination->m4x4Translation.scale(tmp_scale[0], tmp_scale[1], tmp_scale[2]);
|
|
|
|
dataDestination->m4x4Translation.translate(tmp_trans[0], tmp_trans[1], tmp_trans[2]);
|
2017-01-02 15:43:38 +00:00
|
|
|
dataDestination->quadRotation.setVector(QVector3D(tmp_rotation[0], tmp_rotation[1], tmp_rotation[2]));
|
|
|
|
dataDestination->quadRotation.setScalar(tmp_rotation[3]);
|
2016-12-30 11:36:05 +00:00
|
|
|
|
2016-12-31 12:04:03 +00:00
|
|
|
dataDestination->m4x4Translation = getParentMatrix(dataDestination->parent) * dataDestination->m4x4Translation;
|
2017-01-03 13:18:46 +00:00
|
|
|
dataDestination->quadRotation = getParentRotation(dataDestination->parent) * dataDestination->quadRotation;
|
2016-12-30 11:36:05 +00:00
|
|
|
}
|
|
|
|
|
|
|
|
// geometry data
|
2017-02-02 13:44:48 +00:00
|
|
|
else if ("GEOM" == it->name)
|
2016-12-30 11:36:05 +00:00
|
|
|
{
|
2017-01-09 15:34:54 +00:00
|
|
|
// don't get null, bone, shadowMesh and hidden mesh indices
|
|
|
|
if (m_currentType == null || m_currentType == bone || m_currentType == shadowMesh || m_currentRenderFlag == 1)
|
|
|
|
continue;
|
|
|
|
|
2016-12-30 11:36:05 +00:00
|
|
|
// get all GEOM subchunks
|
2017-02-02 13:44:48 +00:00
|
|
|
QList<ChunkHeader*> tmp_geomChunks;
|
2016-12-30 11:36:05 +00:00
|
|
|
loadChunks(tmp_geomChunks, it->position, it->size);
|
|
|
|
|
|
|
|
// evaluate GEOM subchunks
|
|
|
|
analyseGeomChunks(dataDestination, tmp_geomChunks);
|
|
|
|
|
|
|
|
// clean up GEOM subchunks
|
|
|
|
while (!tmp_geomChunks.empty())
|
|
|
|
{
|
|
|
|
ChunkHeader* cur = tmp_geomChunks.front();
|
|
|
|
tmp_geomChunks.pop_front();
|
|
|
|
delete cur;
|
|
|
|
}
|
|
|
|
}
|
|
|
|
}
|
|
|
|
}
|
|
|
|
|
2017-02-02 13:44:48 +00:00
|
|
|
void MshFile::analyseGeomChunks(Model * dataDestination, QList<ChunkHeader*>& chunkList)
|
2016-12-30 11:36:05 +00:00
|
|
|
{
|
|
|
|
for (auto& it : chunkList)
|
|
|
|
{
|
|
|
|
// segment
|
2017-02-02 13:44:48 +00:00
|
|
|
if ("SEGM" == it->name)
|
2016-12-30 11:36:05 +00:00
|
|
|
{
|
|
|
|
// get all SEGM subchunks
|
2017-02-02 13:44:48 +00:00
|
|
|
QList<ChunkHeader*> tmp_segmChunks;
|
2016-12-30 11:36:05 +00:00
|
|
|
loadChunks(tmp_segmChunks, it->position, it->size);
|
|
|
|
|
|
|
|
// evaluate SEGM subchunks
|
|
|
|
analyseSegmChunks(dataDestination, tmp_segmChunks);
|
|
|
|
|
|
|
|
// clean up SEGM subchunk
|
|
|
|
while (!tmp_segmChunks.empty())
|
|
|
|
{
|
|
|
|
ChunkHeader* cur = tmp_segmChunks.front();
|
|
|
|
tmp_segmChunks.pop_front();
|
|
|
|
delete cur;
|
|
|
|
}
|
|
|
|
}
|
|
|
|
|
|
|
|
// cloth
|
2017-02-02 13:44:48 +00:00
|
|
|
else if ("CLTH" == it->name)
|
2016-12-30 11:36:05 +00:00
|
|
|
{
|
|
|
|
// get all CLTH subchunks
|
2017-02-02 13:44:48 +00:00
|
|
|
QList<ChunkHeader*> tmp_clthChunks;
|
2016-12-30 11:36:05 +00:00
|
|
|
loadChunks(tmp_clthChunks, it->position, it->size);
|
|
|
|
|
|
|
|
// evaluate CLTH subchunks
|
|
|
|
analyseClthChunks(dataDestination, tmp_clthChunks);
|
|
|
|
|
|
|
|
// clean up CLTH subchunks
|
|
|
|
while (!tmp_clthChunks.empty())
|
|
|
|
{
|
|
|
|
ChunkHeader* cur = tmp_clthChunks.front();
|
|
|
|
tmp_clthChunks.pop_front();
|
|
|
|
delete cur;
|
|
|
|
}
|
|
|
|
}
|
|
|
|
}
|
|
|
|
}
|
|
|
|
|
2017-02-02 13:44:48 +00:00
|
|
|
void MshFile::analyseSegmChunks(Model * dataDestination, QList<ChunkHeader*>& chunkList)
|
2016-12-30 11:36:05 +00:00
|
|
|
{
|
|
|
|
Segment* new_segment = new Segment;
|
|
|
|
|
|
|
|
for (auto& it : chunkList)
|
|
|
|
{
|
|
|
|
// material index
|
2017-02-02 13:44:48 +00:00
|
|
|
if ("MATI" == it->name)
|
2016-12-30 11:36:05 +00:00
|
|
|
{
|
2017-02-02 13:44:48 +00:00
|
|
|
m_file.seek(it->position);
|
2016-12-30 11:36:05 +00:00
|
|
|
m_file.read(F2V(new_segment->textureIndex), sizeof(new_segment->textureIndex));
|
|
|
|
}
|
|
|
|
|
|
|
|
// position list (vertex)
|
2017-02-02 13:44:48 +00:00
|
|
|
else if ("POSL" == it->name)
|
2016-12-30 11:36:05 +00:00
|
|
|
{
|
|
|
|
readVertex(new_segment, it->position);
|
|
|
|
}
|
|
|
|
|
|
|
|
// normals
|
2017-02-02 13:44:48 +00:00
|
|
|
else if ("NRML" == it->name)
|
2016-12-30 11:36:05 +00:00
|
|
|
{
|
2017-02-02 13:44:48 +00:00
|
|
|
quint32 tmp_size;
|
|
|
|
m_file.seek(it->position);
|
2017-01-15 14:51:12 +00:00
|
|
|
m_file.read(F2V(tmp_size), sizeof(tmp_size));
|
2017-01-08 13:41:53 +00:00
|
|
|
|
2017-01-26 18:44:12 +00:00
|
|
|
if (tmp_size < (unsigned) new_segment->vertices.size())
|
2017-01-15 14:51:12 +00:00
|
|
|
{
|
2017-01-29 10:35:43 +00:00
|
|
|
OutputDevice::getInstance()->print("WARNING: too less normals " + QString::number(tmp_size) + " < " + QString::number(new_segment->vertices.size()), 1);
|
2017-01-08 13:41:53 +00:00
|
|
|
|
2017-01-15 14:51:12 +00:00
|
|
|
for (unsigned int i = new_segment->vertices.size(); i != tmp_size; i--)
|
|
|
|
for (unsigned int j = 0; j < 3; j++)
|
2017-01-23 12:49:29 +00:00
|
|
|
new_segment->vertices[i - 1].vertexNormal[j] = 0;
|
2017-01-15 14:51:12 +00:00
|
|
|
}
|
2017-01-26 18:44:12 +00:00
|
|
|
else if (tmp_size > (unsigned) new_segment->vertices.size())
|
2017-01-15 14:51:12 +00:00
|
|
|
{
|
2017-01-29 10:35:43 +00:00
|
|
|
OutputDevice::getInstance()->print("WARNING: too many normals " + QString::number(tmp_size) + " > " + QString::number(new_segment->vertices.size()), 1);
|
2017-01-15 14:51:12 +00:00
|
|
|
tmp_size = new_segment->vertices.size();
|
|
|
|
}
|
2017-01-08 13:41:53 +00:00
|
|
|
|
2017-01-15 14:51:12 +00:00
|
|
|
for (unsigned int i = 0; i < tmp_size; i++)
|
|
|
|
for (unsigned int j = 0; j < 3; j++)
|
2017-01-23 12:49:29 +00:00
|
|
|
m_file.read(F2V(new_segment->vertices[i].vertexNormal[j]), sizeof(float));
|
2017-01-08 13:41:53 +00:00
|
|
|
|
|
|
|
}
|
2016-12-30 11:36:05 +00:00
|
|
|
|
|
|
|
// uv
|
2017-02-02 13:44:48 +00:00
|
|
|
else if ("UV0L" == it->name)
|
2016-12-30 11:36:05 +00:00
|
|
|
{
|
|
|
|
readUV(new_segment, it->position);
|
|
|
|
}
|
|
|
|
|
|
|
|
// polygons (indices into vertex/uv list)
|
2017-02-02 13:44:48 +00:00
|
|
|
else if ("STRP" == it->name)
|
2016-12-30 11:36:05 +00:00
|
|
|
{
|
|
|
|
|
|
|
|
// jump to the data section and read the size;
|
2017-02-02 13:44:48 +00:00
|
|
|
quint32 tmp_size;
|
|
|
|
m_file.seek(it->position);
|
2016-12-30 11:36:05 +00:00
|
|
|
m_file.read(F2V(tmp_size), sizeof(tmp_size));
|
|
|
|
|
|
|
|
int highBitCount(0);
|
|
|
|
QVector<GLuint> tmp_buffer;
|
|
|
|
|
|
|
|
for (unsigned int i = 0; i < tmp_size; i++)
|
|
|
|
{
|
|
|
|
// ReadData
|
2017-02-02 13:44:48 +00:00
|
|
|
quint16 tmp_value;
|
2016-12-30 11:36:05 +00:00
|
|
|
m_file.read(F2V(tmp_value), sizeof(tmp_value));
|
|
|
|
|
|
|
|
// Check if highbit is set
|
|
|
|
if (tmp_value >> 15)
|
|
|
|
{
|
|
|
|
highBitCount++;
|
|
|
|
// remove the high bit, to get the actually value
|
2017-02-02 13:44:48 +00:00
|
|
|
tmp_value = (quint16(tmp_value << 1) >> 1);
|
2016-12-30 11:36:05 +00:00
|
|
|
}
|
|
|
|
|
|
|
|
// save data
|
|
|
|
tmp_buffer.push_back((GLuint)tmp_value);
|
|
|
|
|
|
|
|
// if the last 2 highBits are set, it was a new poly
|
|
|
|
if (highBitCount == 2)
|
|
|
|
{
|
|
|
|
// reset highBitCount
|
|
|
|
highBitCount = 0;
|
|
|
|
|
|
|
|
if (tmp_buffer.size() == 5)
|
|
|
|
{
|
2017-02-05 15:39:37 +00:00
|
|
|
// calculate poylgon normal, tangent and bitangent
|
|
|
|
QVector3D vec1, vec2, norm, tan, bi;
|
|
|
|
QVector2D uv1, uv2;
|
|
|
|
float f;
|
|
|
|
|
|
|
|
vec1 = new_segment->vertices[tmp_buffer[0]].position - new_segment->vertices[tmp_buffer[1]].position;
|
|
|
|
vec2 = new_segment->vertices[tmp_buffer[0]].position - new_segment->vertices[tmp_buffer[2]].position;
|
|
|
|
uv1 = new_segment->vertices[tmp_buffer[0]].texCoord - new_segment->vertices[tmp_buffer[1]].texCoord;
|
|
|
|
uv2 = new_segment->vertices[tmp_buffer[0]].texCoord - new_segment->vertices[tmp_buffer[2]].texCoord;
|
|
|
|
f = 1.0f / (uv1.x() * uv2.y() - uv2.x() * uv1.y());
|
|
|
|
|
|
|
|
norm = QVector3D::crossProduct(vec1, vec2).normalized();
|
|
|
|
|
|
|
|
tan.setX(f * (uv2.y() * vec1.x() - uv1.y() * vec2.x()));
|
|
|
|
tan.setY(f * (uv2.y() * vec1.y() - uv1.y() * vec2.y()));
|
|
|
|
tan.setZ(f * (uv2.y() * vec1.z() - uv1.y() * vec2.z()));
|
|
|
|
tan.normalize();
|
|
|
|
|
|
|
|
bi.setX(f * (-uv2.x() * vec1.x() + uv1.x() * vec2.x()));
|
|
|
|
bi.setY(f * (-uv2.x() * vec1.y() + uv1.x() * vec2.y()));
|
|
|
|
bi.setZ(f * (-uv2.x() * vec1.z() + uv1.x() * vec2.z()));
|
|
|
|
bi.normalize();
|
|
|
|
|
|
|
|
for (int k = 0; k < 3; k++)
|
|
|
|
{
|
|
|
|
// polygon normal wasn't calculated before
|
|
|
|
if (new_segment->vertices[tmp_buffer[k]].polygonNormal == QVector3D(0, 0, 0))
|
|
|
|
{
|
|
|
|
new_segment->vertices[tmp_buffer[k]].polygonNormal = norm;
|
|
|
|
new_segment->vertices[tmp_buffer[k]].tangent = tan;
|
|
|
|
new_segment->vertices[tmp_buffer[k]].bitangent = bi;
|
|
|
|
|
|
|
|
new_segment->indices.push_back(tmp_buffer[k]);
|
|
|
|
}
|
|
|
|
// polygon normal already calculated so duplicate the vertex
|
|
|
|
else
|
|
|
|
{
|
|
|
|
new_segment->vertices.push_back(new_segment->vertices[tmp_buffer[k]]);
|
|
|
|
new_segment->vertices.back().polygonNormal = norm;
|
|
|
|
new_segment->vertices.back().tangent = tan;
|
|
|
|
new_segment->vertices.back().bitangent = bi;
|
|
|
|
new_segment->indices.push_back(new_segment->vertices.size() - 1);
|
|
|
|
}
|
|
|
|
}
|
|
|
|
tmp_buffer.remove(0, 3);
|
2016-12-30 11:36:05 +00:00
|
|
|
}
|
|
|
|
else if (tmp_buffer.size() > 5)
|
|
|
|
{
|
|
|
|
unsigned int tmp_multiPolySize = tmp_buffer.size() - 2;
|
2017-02-05 15:39:37 +00:00
|
|
|
|
|
|
|
// calculate poylgon normal, tangent and bitangent
|
|
|
|
QVector3D vec1, vec2, norm, tan, bi;
|
|
|
|
QVector2D uv1, uv2;
|
|
|
|
float f;
|
|
|
|
|
|
|
|
vec1 = new_segment->vertices[tmp_buffer[0]].position - new_segment->vertices[tmp_buffer[1]].position;
|
|
|
|
vec2 = new_segment->vertices[tmp_buffer[0]].position - new_segment->vertices[tmp_buffer[2]].position;
|
|
|
|
uv1 = new_segment->vertices[tmp_buffer[0]].texCoord - new_segment->vertices[tmp_buffer[1]].texCoord;
|
|
|
|
uv2 = new_segment->vertices[tmp_buffer[0]].texCoord - new_segment->vertices[tmp_buffer[2]].texCoord;
|
|
|
|
f = 1.0f / (uv1.x() * uv2.y() - uv2.x() * uv1.y());
|
|
|
|
|
|
|
|
norm = QVector3D::crossProduct(vec1, vec2).normalized();
|
|
|
|
|
|
|
|
tan.setX(f * (uv2.y() * vec1.x() - uv1.y() * vec2.x()));
|
|
|
|
tan.setY(f * (uv2.y() * vec1.y() - uv1.y() * vec2.y()));
|
|
|
|
tan.setZ(f * (uv2.y() * vec1.z() - uv1.y() * vec2.z()));
|
|
|
|
tan.normalize();
|
|
|
|
|
|
|
|
bi.setX(f * (-uv2.x() * vec1.x() + uv1.x() * vec2.x()));
|
|
|
|
bi.setY(f * (-uv2.x() * vec1.y() + uv1.x() * vec2.y()));
|
|
|
|
bi.setZ(f * (-uv2.x() * vec1.z() + uv1.x() * vec2.z()));
|
|
|
|
bi.normalize();
|
|
|
|
|
2016-12-30 11:36:05 +00:00
|
|
|
// for every triangle of the multi polygon..
|
|
|
|
for (unsigned int tri = 0; tri < tmp_multiPolySize - 2; tri++)
|
2017-02-05 15:39:37 +00:00
|
|
|
{
|
2016-12-30 11:36:05 +00:00
|
|
|
// ..calculate the edge indices
|
|
|
|
for (int triEdge = 0; triEdge < 3; triEdge++)
|
2017-02-05 15:39:37 +00:00
|
|
|
{
|
|
|
|
int curIndi = tmp_buffer[(tri + triEdge - ((tri % 2) * (triEdge - 1) * 2))];
|
|
|
|
|
|
|
|
// polygon normal wasn't calculated before
|
|
|
|
if (new_segment->vertices[curIndi].polygonNormal == QVector3D(0, 0, 0))
|
|
|
|
{
|
|
|
|
new_segment->vertices[curIndi].polygonNormal = norm;
|
|
|
|
new_segment->vertices[curIndi].tangent = tan;
|
|
|
|
new_segment->vertices[curIndi].bitangent = bi;
|
|
|
|
new_segment->indices.push_back(curIndi);
|
|
|
|
}
|
|
|
|
// polygon normal already calculated so duplicate the vertex
|
|
|
|
else
|
|
|
|
{
|
|
|
|
new_segment->vertices.push_back(new_segment->vertices[curIndi]);
|
|
|
|
new_segment->vertices.back().polygonNormal = norm;
|
|
|
|
new_segment->vertices.back().tangent = tan;
|
|
|
|
new_segment->vertices.back().bitangent = bi;
|
|
|
|
new_segment->indices.push_back(new_segment->vertices.size() - 1);
|
|
|
|
}
|
|
|
|
}
|
|
|
|
}
|
2017-01-05 08:39:18 +00:00
|
|
|
tmp_buffer.remove(0, tmp_multiPolySize);
|
2016-12-30 11:36:05 +00:00
|
|
|
}
|
|
|
|
|
|
|
|
} // if 2 high bits are set
|
|
|
|
|
|
|
|
} // for all values
|
|
|
|
|
|
|
|
// save the last polygon (no 2 high bit followed)
|
|
|
|
if (tmp_buffer.size() == 3)
|
|
|
|
{
|
2017-02-05 15:39:37 +00:00
|
|
|
// calculate poylgon normal, tangent and bitangent
|
|
|
|
QVector3D vec1, vec2, norm, tan, bi;
|
|
|
|
QVector2D uv1, uv2;
|
|
|
|
float f;
|
|
|
|
|
|
|
|
vec1 = new_segment->vertices[tmp_buffer[0]].position - new_segment->vertices[tmp_buffer[1]].position;
|
|
|
|
vec2 = new_segment->vertices[tmp_buffer[0]].position - new_segment->vertices[tmp_buffer[2]].position;
|
|
|
|
uv1 = new_segment->vertices[tmp_buffer[0]].texCoord - new_segment->vertices[tmp_buffer[1]].texCoord;
|
|
|
|
uv2 = new_segment->vertices[tmp_buffer[0]].texCoord - new_segment->vertices[tmp_buffer[2]].texCoord;
|
|
|
|
f = 1.0f / (uv1.x() * uv2.y() - uv2.x() * uv1.y());
|
|
|
|
|
|
|
|
norm = QVector3D::crossProduct(vec1, vec2).normalized();
|
|
|
|
|
|
|
|
tan.setX(f * (uv2.y() * vec1.x() - uv1.y() * vec2.x()));
|
|
|
|
tan.setY(f * (uv2.y() * vec1.y() - uv1.y() * vec2.y()));
|
|
|
|
tan.setZ(f * (uv2.y() * vec1.z() - uv1.y() * vec2.z()));
|
|
|
|
tan.normalize();
|
|
|
|
|
|
|
|
bi.setX(f * (-uv2.x() * vec1.x() + uv1.x() * vec2.x()));
|
|
|
|
bi.setY(f * (-uv2.x() * vec1.y() + uv1.x() * vec2.y()));
|
|
|
|
bi.setZ(f * (-uv2.x() * vec1.z() + uv1.x() * vec2.z()));
|
|
|
|
bi.normalize();
|
|
|
|
|
|
|
|
for (int k = 0; k < 3; k++)
|
|
|
|
{
|
|
|
|
// polygon normal wasn't calculated before
|
|
|
|
if (new_segment->vertices[tmp_buffer[k]].polygonNormal == QVector3D(0, 0, 0))
|
|
|
|
{
|
|
|
|
new_segment->vertices[tmp_buffer[k]].polygonNormal = norm;
|
|
|
|
new_segment->vertices[tmp_buffer[k]].tangent = tan;
|
|
|
|
new_segment->vertices[tmp_buffer[k]].bitangent = bi;
|
|
|
|
|
|
|
|
new_segment->indices.push_back(tmp_buffer[k]);
|
|
|
|
}
|
|
|
|
// polygon normal already calculated so duplicate the vertex
|
|
|
|
else
|
|
|
|
{
|
|
|
|
new_segment->vertices.push_back(new_segment->vertices[tmp_buffer[k]]);
|
|
|
|
new_segment->vertices.back().polygonNormal = norm;
|
|
|
|
new_segment->vertices.back().tangent = tan;
|
|
|
|
new_segment->vertices.back().bitangent = bi;
|
|
|
|
new_segment->indices.push_back(new_segment->vertices.size() - 1);
|
|
|
|
}
|
|
|
|
}
|
|
|
|
tmp_buffer.remove(0, 3);
|
2016-12-30 11:36:05 +00:00
|
|
|
}
|
|
|
|
else if (tmp_buffer.size() > 3)
|
|
|
|
{
|
|
|
|
unsigned int tmp_multiPolySize = tmp_buffer.size();
|
2017-02-05 15:39:37 +00:00
|
|
|
|
|
|
|
// calculate poylgon normal, tangent and bitangent
|
|
|
|
QVector3D vec1, vec2, norm, tan, bi;
|
|
|
|
QVector2D uv1, uv2;
|
|
|
|
float f;
|
|
|
|
|
|
|
|
vec1 = new_segment->vertices[tmp_buffer[0]].position - new_segment->vertices[tmp_buffer[1]].position;
|
|
|
|
vec2 = new_segment->vertices[tmp_buffer[0]].position - new_segment->vertices[tmp_buffer[2]].position;
|
|
|
|
uv1 = new_segment->vertices[tmp_buffer[0]].texCoord - new_segment->vertices[tmp_buffer[1]].texCoord;
|
|
|
|
uv2 = new_segment->vertices[tmp_buffer[0]].texCoord - new_segment->vertices[tmp_buffer[2]].texCoord;
|
|
|
|
f = 1.0f / (uv1.x() * uv2.y() - uv2.x() * uv1.y());
|
|
|
|
|
|
|
|
norm = QVector3D::crossProduct(vec1, vec2).normalized();
|
|
|
|
|
|
|
|
tan.setX(f * (uv2.y() * vec1.x() - uv1.y() * vec2.x()));
|
|
|
|
tan.setY(f * (uv2.y() * vec1.y() - uv1.y() * vec2.y()));
|
|
|
|
tan.setZ(f * (uv2.y() * vec1.z() - uv1.y() * vec2.z()));
|
|
|
|
tan.normalize();
|
|
|
|
|
|
|
|
bi.setX(f * (-uv2.x() * vec1.x() + uv1.x() * vec2.x()));
|
|
|
|
bi.setY(f * (-uv2.x() * vec1.y() + uv1.x() * vec2.y()));
|
|
|
|
bi.setZ(f * (-uv2.x() * vec1.z() + uv1.x() * vec2.z()));
|
|
|
|
bi.normalize();
|
|
|
|
|
2016-12-30 11:36:05 +00:00
|
|
|
// for every triangle of the multi polygon..
|
|
|
|
for (unsigned int tri = 0; tri < tmp_multiPolySize - 2; tri++)
|
2017-02-05 15:39:37 +00:00
|
|
|
{
|
2016-12-30 11:36:05 +00:00
|
|
|
// ..calculate the edge indices
|
|
|
|
for (int triEdge = 0; triEdge < 3; triEdge++)
|
2017-02-05 15:39:37 +00:00
|
|
|
{
|
|
|
|
int curIndi = tmp_buffer[(tri + triEdge - ((tri % 2) * (triEdge - 1) * 2))];
|
|
|
|
|
|
|
|
// polygon normal wasn't calculated before
|
|
|
|
if (new_segment->vertices[curIndi].polygonNormal == QVector3D(0, 0, 0))
|
|
|
|
{
|
|
|
|
new_segment->vertices[curIndi].polygonNormal = norm;
|
|
|
|
new_segment->vertices[curIndi].tangent = tan;
|
|
|
|
new_segment->vertices[curIndi].bitangent = bi;
|
|
|
|
new_segment->indices.push_back(curIndi);
|
|
|
|
}
|
|
|
|
// polygon normal already calculated so duplicate the vertex
|
|
|
|
else
|
|
|
|
{
|
|
|
|
new_segment->vertices.push_back(new_segment->vertices[curIndi]);
|
|
|
|
new_segment->vertices.back().polygonNormal = norm;
|
|
|
|
new_segment->vertices.back().tangent = tan;
|
|
|
|
new_segment->vertices.back().bitangent = bi;
|
|
|
|
new_segment->indices.push_back(new_segment->vertices.size() - 1);
|
|
|
|
}
|
|
|
|
}
|
|
|
|
}
|
2016-12-30 11:36:05 +00:00
|
|
|
}
|
|
|
|
}
|
|
|
|
}
|
|
|
|
|
|
|
|
dataDestination->segmList.push_back(new_segment);
|
|
|
|
}
|
|
|
|
|
2017-02-02 13:44:48 +00:00
|
|
|
void MshFile::analyseClthChunks(Model * dataDestination, QList<ChunkHeader*>& chunkList)
|
2016-12-30 11:36:05 +00:00
|
|
|
{
|
|
|
|
Segment* new_segment = new Segment;
|
|
|
|
|
|
|
|
for (auto& it : chunkList)
|
|
|
|
{
|
|
|
|
// texture name
|
2017-02-02 13:44:48 +00:00
|
|
|
if ("CTEX" == it->name)
|
2016-12-30 11:36:05 +00:00
|
|
|
{
|
|
|
|
// read the texture name
|
2017-02-02 13:44:48 +00:00
|
|
|
m_file.seek(it->position);
|
2016-12-30 11:36:05 +00:00
|
|
|
char* buffer = new char[it->size + 1];
|
|
|
|
*buffer = { 0 };
|
|
|
|
m_file.read(buffer, it->size);
|
|
|
|
|
2017-01-22 13:37:06 +00:00
|
|
|
|
|
|
|
m_materials->push_back(Material());
|
|
|
|
m_materials->back().name = "Cloth Material";
|
2017-01-23 11:17:26 +00:00
|
|
|
m_materials->back().tx0d = QString(buffer);
|
2016-12-30 11:36:05 +00:00
|
|
|
|
2017-01-22 13:37:06 +00:00
|
|
|
m_materials->back().shininess = 10;
|
2017-01-15 11:26:15 +00:00
|
|
|
|
2017-01-23 11:17:26 +00:00
|
|
|
if (!m_materials->back().tx0d.isEmpty())
|
2017-01-23 11:29:10 +00:00
|
|
|
loadTexture(m_materials->back().texture0, m_filepath, m_materials->back().tx0d);
|
2017-01-22 13:37:06 +00:00
|
|
|
|
|
|
|
new_segment->textureIndex = m_materials->size() - 1;
|
2016-12-30 11:36:05 +00:00
|
|
|
|
|
|
|
delete[] buffer;
|
|
|
|
}
|
|
|
|
|
|
|
|
// position list (vertex)
|
2017-02-02 13:44:48 +00:00
|
|
|
else if ("CPOS" == it->name)
|
2016-12-30 11:36:05 +00:00
|
|
|
{
|
|
|
|
readVertex(new_segment, it->position);
|
|
|
|
}
|
|
|
|
|
|
|
|
// uv
|
2017-02-02 13:44:48 +00:00
|
|
|
else if ("CUV0" == it->name)
|
2016-12-30 11:36:05 +00:00
|
|
|
{
|
|
|
|
readUV(new_segment, it->position);
|
|
|
|
}
|
|
|
|
|
|
|
|
// triangles (indices into vertex/uv list)
|
2017-02-02 13:44:48 +00:00
|
|
|
else if ("CMSH" == it->name)
|
2016-12-30 11:36:05 +00:00
|
|
|
{
|
|
|
|
// jump to the data section and read the size;
|
2017-02-02 13:44:48 +00:00
|
|
|
quint32 tmp_size;
|
|
|
|
m_file.seek(it->position);
|
2016-12-30 11:36:05 +00:00
|
|
|
m_file.read(F2V(tmp_size), sizeof(tmp_size));
|
|
|
|
|
|
|
|
// for every triangle..
|
2017-02-05 14:25:59 +00:00
|
|
|
for (unsigned int i = 0; i < tmp_size; i++)
|
2016-12-30 11:36:05 +00:00
|
|
|
{
|
2017-02-05 14:25:59 +00:00
|
|
|
quint32 tmp_value[3];
|
|
|
|
for (unsigned int j = 0; j < 3; j++)
|
|
|
|
{
|
|
|
|
m_file.read(F2V(tmp_value[j]), sizeof(quint32));
|
|
|
|
new_segment->indices.push_back((GLuint)tmp_value[j]);
|
|
|
|
}
|
2016-12-30 11:36:05 +00:00
|
|
|
|
2017-02-05 14:25:59 +00:00
|
|
|
QVector3D vec1, vec2, norm;
|
|
|
|
|
|
|
|
vec1 = new_segment->vertices[new_segment->indices[i * 3]].position - new_segment->vertices[new_segment->indices[i * 3 + 1]].position;
|
|
|
|
vec2 = new_segment->vertices[new_segment->indices[i * 3]].position - new_segment->vertices[new_segment->indices[i * 3 + 2]].position;
|
|
|
|
norm = QVector3D::crossProduct(vec1, vec2);
|
|
|
|
|
|
|
|
for (int k = 0; k < 3; k++)
|
|
|
|
{
|
|
|
|
new_segment->vertices[new_segment->indices[i * 3 + k]].vertexNormal += norm;
|
|
|
|
new_segment->vertices[new_segment->indices[i * 3 + k]].vertexNormal.normalize();
|
|
|
|
}
|
2016-12-30 11:36:05 +00:00
|
|
|
}
|
|
|
|
}
|
|
|
|
}
|
|
|
|
|
|
|
|
dataDestination->segmList.push_back(new_segment);
|
|
|
|
}
|
|
|
|
|
2017-02-02 13:44:48 +00:00
|
|
|
void MshFile::readVertex(Segment * dataDestination, qint64 position)
|
2016-12-30 11:36:05 +00:00
|
|
|
{
|
2017-02-02 13:44:48 +00:00
|
|
|
quint32 tmp_size;
|
|
|
|
m_file.seek(position);
|
2016-12-30 11:36:05 +00:00
|
|
|
m_file.read(F2V(tmp_size), sizeof(tmp_size));
|
|
|
|
|
|
|
|
for (unsigned int i = 0; i < tmp_size; i++)
|
|
|
|
{
|
|
|
|
float tmp[3];
|
|
|
|
for (unsigned int j = 0; j < 3; j++)
|
|
|
|
m_file.read(F2V(tmp[j]), sizeof(float));
|
|
|
|
|
|
|
|
VertexData new_data;
|
|
|
|
new_data.position = QVector3D(tmp[0], tmp[1], tmp[2]);
|
|
|
|
|
|
|
|
dataDestination->vertices.push_back(new_data);
|
|
|
|
}
|
|
|
|
}
|
|
|
|
|
2017-02-02 13:44:48 +00:00
|
|
|
void MshFile::readUV(Segment * dataDestination, qint64 position)
|
2016-12-30 11:36:05 +00:00
|
|
|
{
|
2017-02-02 13:44:48 +00:00
|
|
|
quint32 tmp_size;
|
|
|
|
m_file.seek(position);
|
2016-12-30 11:36:05 +00:00
|
|
|
m_file.read(F2V(tmp_size), sizeof(tmp_size));
|
|
|
|
|
2017-01-26 18:44:12 +00:00
|
|
|
if (tmp_size < (unsigned) dataDestination->vertices.size())
|
2016-12-30 11:36:05 +00:00
|
|
|
{
|
2017-01-29 10:35:43 +00:00
|
|
|
OutputDevice::getInstance()->print("WARNING: too less UVs " + QString::number(tmp_size) + " < " + QString::number(dataDestination->vertices.size()),1);
|
2016-12-30 11:36:05 +00:00
|
|
|
|
2017-01-04 13:35:27 +00:00
|
|
|
for (unsigned int i = dataDestination->vertices.size(); i != tmp_size; i--)
|
|
|
|
for (unsigned int j = 0; j < 2; j++)
|
|
|
|
dataDestination->vertices[i - 1].texCoord[j] = 0;
|
2016-12-30 11:36:05 +00:00
|
|
|
}
|
2017-01-26 18:44:12 +00:00
|
|
|
else if (tmp_size > (unsigned) dataDestination->vertices.size())
|
2016-12-30 11:36:05 +00:00
|
|
|
{
|
2017-01-29 10:35:43 +00:00
|
|
|
OutputDevice::getInstance()->print("WARNING: too many UVs " + QString::number(tmp_size) + " > " + QString::number(dataDestination->vertices.size()), 1);
|
2016-12-30 11:36:05 +00:00
|
|
|
tmp_size = dataDestination->vertices.size();
|
|
|
|
}
|
|
|
|
|
|
|
|
for (unsigned int i = 0; i < tmp_size; i++)
|
|
|
|
for (unsigned int j = 0; j < 2; j++)
|
|
|
|
m_file.read(F2V(dataDestination->vertices[i].texCoord[j]), sizeof(float));
|
2016-12-31 12:04:03 +00:00
|
|
|
}
|
|
|
|
|
2017-01-23 11:29:10 +00:00
|
|
|
void MshFile::loadTexture(QOpenGLTexture *& destination, QString filepath, QString& filename)
|
2017-01-15 11:26:15 +00:00
|
|
|
{
|
|
|
|
bool loadSuccess(false);
|
2017-01-30 15:00:14 +00:00
|
|
|
|
2017-01-23 11:29:10 +00:00
|
|
|
QImage img = loadTga(filepath + "/" + filename, loadSuccess);
|
2017-01-15 11:26:15 +00:00
|
|
|
|
|
|
|
if (!loadSuccess)
|
2017-01-20 10:18:48 +00:00
|
|
|
{
|
2017-01-29 10:35:43 +00:00
|
|
|
OutputDevice::getInstance()->print("WARNING: texture not found or corrupted: " + filename, 1);
|
2017-04-30 13:05:21 +00:00
|
|
|
//TODO: cloth use the wrong diffuse color. should be null
|
2017-01-20 15:26:58 +00:00
|
|
|
img = QImage(1, 1, QImage::Format_RGB32);
|
|
|
|
img.fill(QColor(m_materials->back().diffuseColor[0] * 255, m_materials->back().diffuseColor[1] * 255, m_materials->back().diffuseColor[2] * 255));
|
2017-01-23 11:29:10 +00:00
|
|
|
filename += " *";
|
2017-01-20 10:18:48 +00:00
|
|
|
}
|
|
|
|
|
2017-01-15 11:26:15 +00:00
|
|
|
// Load image to OglTexture
|
|
|
|
QOpenGLTexture* new_texture = new QOpenGLTexture(img.mirrored());
|
|
|
|
|
|
|
|
// Set nearest filtering mode for texture minification
|
|
|
|
new_texture->setMinificationFilter(QOpenGLTexture::Nearest);
|
|
|
|
|
|
|
|
// Set bilinear filtering mode for texture magnification
|
|
|
|
new_texture->setMagnificationFilter(QOpenGLTexture::Linear);
|
|
|
|
|
|
|
|
// Wrap texture coordinates by repeating
|
|
|
|
// f.ex. texture coordinate (1.1, 1.2) is same as (0.1, 0.2)
|
|
|
|
new_texture->setWrapMode(QOpenGLTexture::Repeat);
|
|
|
|
|
|
|
|
destination = new_texture;
|
|
|
|
}
|
|
|
|
|
2017-02-02 13:44:48 +00:00
|
|
|
QMatrix4x4 MshFile::getParentMatrix(QString parent) const
|
2016-12-31 12:04:03 +00:00
|
|
|
{
|
|
|
|
QMatrix4x4 matrix;
|
|
|
|
|
|
|
|
for (auto& it : *m_models)
|
|
|
|
{
|
2017-02-02 13:44:48 +00:00
|
|
|
if (parent == it->name)
|
2016-12-31 12:04:03 +00:00
|
|
|
{
|
|
|
|
matrix = getParentMatrix(it->parent) * it->m4x4Translation;
|
|
|
|
break;
|
|
|
|
}
|
|
|
|
}
|
|
|
|
|
|
|
|
return matrix;
|
|
|
|
}
|
2017-01-03 13:18:46 +00:00
|
|
|
|
2017-02-02 13:44:48 +00:00
|
|
|
QQuaternion MshFile::getParentRotation(QString parent) const
|
2017-01-03 13:18:46 +00:00
|
|
|
{
|
|
|
|
QQuaternion rotation;
|
|
|
|
|
|
|
|
for (auto& it : *m_models)
|
|
|
|
{
|
2017-02-02 13:44:48 +00:00
|
|
|
if (parent == it->name)
|
2017-01-03 13:18:46 +00:00
|
|
|
{
|
|
|
|
rotation = getParentRotation(it->parent) * it->quadRotation;
|
|
|
|
break;
|
|
|
|
}
|
|
|
|
}
|
|
|
|
|
|
|
|
return rotation;
|
|
|
|
}
|