first commit

This commit is contained in:
2026-02-21 17:11:31 +09:00
commit 18b4338361
4001 changed files with 365464 additions and 0 deletions

View File

@@ -0,0 +1,373 @@
#include "an6000_decode.h"
#if (RM_MODEL == RM_MODEL_TYPE_AN6000)
#include <stdio.h>
#include <stdlib.h>
#include <string.h>
typedef struct {
volatile unsigned long size;
volatile char type[4];
} BoxHeader_t;
typedef struct _BoxAddress_t {
fpos_t addr;
unsigned long size;
} BoxAddress_t;
/**
* @brief Convert the Byte-order(32bit) <for little-endian environment>
*
* @param[in] val target value
*
* @return convert value
*/
unsigned long convertEndian_32(unsigned long val)
{
unsigned long result = 0;
unsigned char *p1 = (unsigned char *)&val;
unsigned char *p2 = (unsigned char *)&result;
p2[0] = p1[3];
p2[1] = p1[2];
p2[2] = p1[1];
p2[3] = p1[0];
return result;
}
/**
* @brief [DEBUG] Box data analysis
*
* @param[in] fp target file pointer
* @param[in] level analysis depth
* @param[in] parentSize parent box size
*/
void BoxAnalysis(FILE *fp, int level, size_t parentSize)
{
BoxHeader_t boxHead, childBoxHead;
char typeDesc[8] = {0};
size_t readSize;
int i;
int isNotAscii;
char *pbuf = NULL;
fpos_t pos;
while (1) {
// read box header
readSize = fread(&boxHead, 1, sizeof(BoxHeader_t), fp);
if ((feof(fp) != 0) || (readSize == 0)) {
break;
}
boxHead.size = convertEndian_32(boxHead.size);
strncpy(typeDesc, (const char *)&boxHead.type[0], 4);
if (boxHead.size == 0) {
break;
}
fgetpos(fp, &pos);
printf("[%08X] ", (unsigned long)(pos - sizeof(BoxHeader_t)));
for (i = 0; i < level; i++) {
printf(" ");
}
printf("%s[%d]\n", typeDesc, boxHead.size);
if (parentSize != (size_t)-1) {
parentSize -= boxHead.size;
}
// check child box exist
readSize = fread(&childBoxHead, 1, sizeof(BoxHeader_t), fp);
childBoxHead.size = convertEndian_32(childBoxHead.size);
isNotAscii = 0;
for (i = 0; i < 4; i++) {
if ((childBoxHead.type[i] < 'a') || (childBoxHead.type[i] > 'z')) {
++isNotAscii;
break;
}
}
fseek(fp, -(long)sizeof(BoxHeader_t), SEEK_CUR);
if ((boxHead.size > childBoxHead.size) && (isNotAscii == 0)) {
// child box exist
BoxAnalysis(fp, level + 1, boxHead.size - sizeof(BoxHeader_t));
}
else {
// child box not exist
pbuf = (char *)malloc(boxHead.size - sizeof(BoxHeader_t));
fread(pbuf, 1, boxHead.size - sizeof(BoxHeader_t), fp);
free(pbuf);
}
// check of continue the child box loop
if (parentSize == 0) {
break;
}
// terminal box
if (strncmp((const char *)boxHead.type, "FWVR", 4) == 0) {
break;
}
}
if (parentSize == (size_t)-1) {
fseek(fp, 0, SEEK_SET);
}
}
/**
* @brief Get the crypt target box information
*
* @param[in] fp target file pointer
* @param[out] buf target box information
* @param[in] bufSize number of \a buf buffer
* @param[in] level analysis depth
* @param[in] parentSize parent box size
*
* @return number of available target box information
*/
static bool gFound_stsz = false;
static bool gFount_udat = false;
size_t GetCryptTarget(FILE *fp, BoxAddress_t *buf, size_t bufSize, int level, size_t parentSize)
{
size_t buf_num = 0;
BoxHeader_t boxHead = {0,};
BoxHeader_t childBoxHead = {0,};
size_t readSize = 0;
int isNotAscii = 0;
int i = 0;
//char *pbuf = NULL;
fpos_t pos = 0;
if (bufSize == 0) {
return 0;
}
while (1) {
// read box header
readSize = fread(&boxHead, 1, sizeof(BoxHeader_t), fp);
if (readSize == 0) {
break;
}
boxHead.size = convertEndian_32(boxHead.size);
if (boxHead.size == 0) {
break;
}
if (parentSize != (size_t)-1) {
parentSize -= boxHead.size;
}
// check crypt target box
if (((strncmp((const char *)boxHead.type, "stsz", 4) == 0) && (gFound_stsz == false) )
|| (strncmp((const char *)boxHead.type, "udat", 4) == 0) && (gFount_udat == false)){
fgetpos(fp, &pos);
buf[buf_num].addr = pos - sizeof(BoxHeader_t);
buf[buf_num].size = boxHead.size;
fseek(fp, boxHead.size - sizeof(BoxHeader_t), SEEK_CUR);
if(strncmp((const char *)boxHead.type, "stsz", 4) == 0)
{
gFound_stsz = true;
}
else if(strncmp((const char *)boxHead.type, "udat", 4) == 0)
{
gFount_udat = true;
}
if (++buf_num == bufSize) {
return buf_num;
}
}
else {
// Existence check of child box
readSize = fread(&childBoxHead, 1, sizeof(BoxHeader_t), fp);
fseek(fp, -(long)sizeof(BoxHeader_t), SEEK_CUR);
childBoxHead.size = convertEndian_32(childBoxHead.size);
isNotAscii = 0;
for (i = 0; i < 4; i++) {
if ((childBoxHead.type[i] < 'a') || (childBoxHead.type[i] > 'z')) {
++isNotAscii;
break;
}
}
if ((boxHead.size > childBoxHead.size) && (isNotAscii == 0)) {
// child box is exist
buf_num += GetCryptTarget(fp, &buf[buf_num], bufSize - buf_num, level + 1, boxHead.size - sizeof(BoxHeader_t));
if (buf_num == bufSize) {
return buf_num;
}
}
else {
// child box is not exist
fseek(fp, boxHead.size - sizeof(BoxHeader_t), SEEK_CUR);
}
}
// check of continue the child box loop
if (parentSize == 0) {
break;
}
// terminal box
if (strncmp((const char *)boxHead.type, "FWVR", 4) == 0) {
break;
}
}
return buf_num;
}
/**
* @brief Encrypt MP4 File
*
* @param[in] filename target file name
*
* @retval 0 successful
* @retval non-zero failed
*/
int encrypt_an6000(const wchar_t *filename)
{
FILE *fp = NULL;
BoxAddress_t tgtInfo[8];
size_t infoSize, i, x;
char *pbuf;
char key = 0x19;
char preData, enc;
memset(tgtInfo, 0, sizeof(tgtInfo));
infoSize = sizeof(tgtInfo) / sizeof(BoxAddress_t);
fp = _wfopen(filename,L"r+b");
//fp = fopen(filename, "r+b");
if (fp == NULL) {
return -1;
}
fseek(fp, 0, SEEK_SET);
// Analysis box information
//BoxAnalysis(fp, 0, (size_t)-1);
infoSize = GetCryptTarget(fp, tgtInfo, infoSize, 0, -1);
for (i = 0; i < infoSize; i++) {
pbuf = (char*)malloc(tgtInfo[i].size);
// read the target data
fsetpos(fp, &tgtInfo[i].addr);
fread(pbuf, 1, tgtInfo[i].size, fp);
// data encrypt
preData = 0;
for (x = 8; x < tgtInfo[i].size; x++) {
enc = ((~pbuf[x]) ^ preData) ^ key;
preData = pbuf[x];
pbuf[x] = enc;
}
// over-write the target data
fsetpos(fp, &tgtInfo[i].addr);
fwrite(pbuf, 1, tgtInfo[i].size, fp);
free(pbuf);
}
fclose(fp);
return 0;
}
/**
* @brief Decrypt MP4 File
*
* @param[in] filename target file name
*
* @retval 0 successful
* @retval non-zero failed
*/
int decrypt_an6000(const wchar_t *filename)
{
gFound_stsz = false;
gFount_udat = false;
FILE *fp = NULL;
BoxAddress_t tgtInfo[8] = {0,};
size_t infoSize, i, x;
char *pbuf = NULL;
char key = 0x19;
char preData = 0;
memset(tgtInfo, 0, sizeof(_BoxAddress_t) * 8);
infoSize = sizeof(tgtInfo) / sizeof(_BoxAddress_t);
// filePathCH2.toStdWString().c_str()
fp = _wfopen(filename,L"r+b");
//fp = fopen(filename, "r+b");
if (fp == NULL) {
return -1;
}
fseek(fp, 0, SEEK_SET);
// Analysis box information
infoSize = GetCryptTarget(fp, tgtInfo, infoSize, 0, -1);
for (i = 0; i < infoSize; i++) {
pbuf = (char*)malloc(tgtInfo[i].size);
// read the target data
fsetpos(fp, &tgtInfo[i].addr);
fread(pbuf, 1, tgtInfo[i].size, fp);
// data decrypt
preData = 0;
for (x = 8; x < tgtInfo[i].size; x++) {
pbuf[x] = ~((pbuf[x] ^ key) ^ preData);
preData = pbuf[x];
}
// over-write the target data
fsetpos(fp, &tgtInfo[i].addr);
fwrite(pbuf, 1, tgtInfo[i].size, fp);
free(pbuf);
}
fclose(fp);
return 0;
}
bool is_encrypted_an6000(const wchar_t *filename)
{
gFound_stsz = false;
gFount_udat = false;
FILE *fp = NULL;
BoxAddress_t tgtInfo[8] = {0,};
size_t infoSize, i, x;
memset(tgtInfo, 0, sizeof(_BoxAddress_t) * 8);
infoSize = sizeof(tgtInfo) / sizeof(_BoxAddress_t);
fp = _wfopen(filename,L"r+b");
if (fp == NULL) {
return false;
}
fseek(fp, 0, SEEK_SET);
infoSize = GetCryptTarget(fp, tgtInfo, infoSize, 0, -1);
char buffer[16] = {0,};
for (i = 0; i < infoSize; i++) {
// read the target data
tgtInfo[i].addr += 4;
fsetpos(fp, &tgtInfo[i].addr);
fread(buffer, 1, 16, fp);
if(buffer[0] == 's' && buffer[1] =='t' && buffer[2] =='s' && buffer[3] =='z') {
bool enc = (buffer[4] != 0 || buffer[5] != 0 || buffer[6] != 0 || buffer[7] != 0);
fclose(fp);
return enc;//(buffer[4] != 0 || buffer[5] != 0 || buffer[6] != 0 || buffer[7] != 0);
}
//printf("%c:%c:%c:%c\n",buffer[0],buffer[1],buffer[2],buffer[3]);
//printf("%02X:%02X:%02X:%02X %02X:%02X:%02X:%02X\n",buffer[0],buffer[1],buffer[2],buffer[3],buffer[4],buffer[5],buffer[6],buffer[7]);
}
fclose(fp);
return true;
}
#endif // #if (RM_MODEL == RM_MODEL_TYPE_AN6000)

View File

@@ -0,0 +1,12 @@
#ifndef AN6000_DECODE_H
#define AN6000_DECODE_H
#include "../rm_include.h"
#if (RM_MODEL == RM_MODEL_TYPE_AN6000)
int decrypt_an6000(const wchar_t *filename);
int encrypt_an6000(const wchar_t *filename);
bool is_encrypted_an6000(const wchar_t *filename);
#endif // #if (RM_MODEL == RM_MODEL_TYPE_AN6000)
#endif // AN6000_DECODE_H

View File

@@ -0,0 +1,37 @@
#include "fileio.h"
int read_long(FILE *in)
{
int c;
c=getc(in);
c=c+(getc(in)<<8);
c=c+(getc(in)<<16);
c=c+(getc(in)<<24);
return c;
}
int read_word(FILE *in)
{
int c;
c=getc(in);
c=c+(getc(in)<<8);
return c;
}
int read_chars(FILE *in, char *s, int count)
{
int t;
for (t=0; t<count; t++)
{
s[t]=getc(in);
}
s[t]=0;
return 0;
}

View File

@@ -0,0 +1,22 @@
#ifndef FILEIO_H
#define FILEIO_H
// #include "rm_constants.h"
#include <stdio.h>
typedef FILE* RMfile;
#define RMfread(__BUFFER,__SIZE,__COUNT,__FILE) fread(__BUFFER,__SIZE,__COUNT,__FILE)
#define RMftell(__FILE) ftell(__FILE)
#define RMfseek(__FILE,__OFSET,__SEEK_TYPE) fseek(__FILE,__OFSET,__SEEK_TYPE)
#define RMgetc(__FILE) getc(__FILE)
#define RMfeof(__FILE) feof(__FILE)
int read_long(RMfile in);
int read_word(RMfile in);
int read_chars(RMfile in, char *s, int count);
#endif

File diff suppressed because it is too large Load Diff

View File

@@ -0,0 +1,196 @@
#ifndef FM_ADDRESS_H
#define FM_ADDRESS_H
#if (USE_JP_ADDRESS)
#include <QString>
#include <inttypes.h>
#include <QMap>
#define USE_ADDR_DB_TYPE 1
// // _maxLenPref: 4 _maxLenCity: 8 _maxLenChome: 12
#define MAX_JA_AREA_NAME_LEN 30
#define MAX_PREF_COUNT 50 // 도도부현 개수
#define MAX_CITY_COUNT 200 // 도도부현당 최대 시/구 개수, "北海道" 52, "大阪府" 64
#if (USE_ADDR_DB_TYPE == 0)
#define MAX_JIBUN_COUNT 4000 // 3381
#define MAX_CHOME_COUNT 1500 // 시/구당 최대 마을 개수 旭川市 1475
#elif (USE_ADDR_DB_TYPE == 1)
#define MAX_JIBUN_COUNT 20000 // 20000
#define MAX_CHOME_COUNT 5000 // 福島県, 시/구당 최대 마을 개수 旭川市 1475
#endif
typedef struct _JA_HEADER {
char header[128]; // 0
char date[24]; // 128
char version[24]; // 152
uint32_t stringSize; // 176 문자열 테이블 크기
//uint32_t prefOffset; // 180 도도부현 시작 옵셋 (=sizeof header + stringSize) + ....
uint32_t pref_count; // 184 pref 개수
uint32_t city_count; // 188 전체 city 개수
uint32_t town_count; // 192 전체 town 개수
uint32_t jibun_count; // 196 전체 지번 개수
int32_t xmin; // 영역..
int32_t ymin;
int32_t xmax;
int32_t ymax;
} JA_HEADER;
// 저장용 구조체
typedef struct _JA_AREA_PACKET
{
uint32_t nameOffset;
uint32_t sub_count;
uint32_t sub_offset;
int32_t xmin;
int32_t ymin;
int32_t xmax;
int32_t ymax;
uint8_t bits[4]; // CHOME 만 사용 (BITS X,Y,A0,A1)
} JA_AREA_PACKET;
#if (USE_JP_ADDRESS_TOOL)
// 주소(지번) -> 처리용 구조체
typedef struct _JA_JIBUN {
uint16_t a0; // 150 : 8bit
uint16_t a1; // 732 : 10bit
int32_t x;
int32_t y;
} JA_JIBUN; // 12BYTE
// 도도부현,시구,마을(丁目) -> 처리용 구조체
typedef struct _JA_AREA {
ushort name[MAX_JA_AREA_NAME_LEN]; // 16(12 * 2)
int32_t count; // sub area(or a) count
_JA_AREA* subAreas;
_JA_JIBUN* subJibuns;
uint32_t offset; // sub area offset // or Pointer
int32_t xmin;
int32_t ymin;
int32_t xmax;
int32_t ymax;
uint32_t nameOffset;
uint16_t a0min; // BYTE PACK 계산하기 위해 사용
uint16_t a0max;
uint16_t a1min;
uint16_t a1max;
uint8_t bits[4]; // CHOME 의 경우 포함된 지번 데이터의 BIT PACK
} JA_AREA;
#endif // #if (USE_JP_ADDRESS_TOOL)
// 北海道 深川市 一条 (3066,1474)
// 神奈川県 横浜市鶴見区 大黒町 (879,2625)
// XMAX: 12bit, YMAX:12bit,
// a0: // 150 : 8bit
// a1: // 732 : 10bit
class FMAddress
{
private:
// 탐색 결과
int _found_a0;
int _found_a1;
int _found_dist_sq; // 거리^2
int _found_x;
int _found_y;
JA_HEADER _header; // 헤더
JA_AREA_PACKET* _areas; // 도도부현, 시구군, 읍면동..
uint8_t* _strings; // 문자열 "
FILE* _file;
// TOWN내에서 가장 가까운 지번 탐색 town, x,y (return dist, a0:대번지, a1:소번지)
bool _searchTown(JA_AREA_PACKET* town,int x, int y,int tolerance);
public:
FMAddress();
~FMAddress();
static FMAddress* instance()
{
static FMAddress * _instance = 0;
if ( _instance == 0 ) {
_instance = new FMAddress();
}
return _instance;
}
bool isOpened() {
return (_areas != NULL && _strings != NULL && _file != NULL);
}
bool open(); // 파일열기 (실행파일경로 or 테스트 경로)
bool open(QString path); // 파일열기
// 탐색 (경위도, 도도부현, 시구군, 읍면동, 탐색거리, 허용 오차m)
bool search(double lon,double lat,wchar_t** pref,wchar_t** city,wchar_t** town,int* a0, int*a1, int* pdist, int tolerance = 200);
// 탐색 결과 도도부현,시군구,읍면동,번지,호,거리
bool search(double lon, double lat,QStringList& result, int tolerance = 200);
#if (USE_JP_ADDRESS_TOOL)
void debugList(); // 디버그
void _debugTown(JA_AREA_PACKET* town); //
public:
bool convert(QString src, QString target);
bool verify(QString src); // TXT 데이터와 비교
// CODE,PREF,CITY,CHOME,A0,A1,LON,LAT,CDATE
static QMap<QString,uint32_t> _stringTable;
private:
static void _updateBound(_JA_AREA* dest, _JA_AREA* src);
static void _updateBoundP(_JA_AREA* dest, _JA_JIBUN* src);
static void _initBound(_JA_AREA* dest);
static void _calculateBitPack(_JA_AREA * src); // , uint8_t* byte
static void _copyPacket(JA_AREA_PACKET* dest, JA_AREA* src);
void _freeConvertData();
void _resizeChome(JA_AREA* chome); // MAX 크기로 할당된 지번을 원래 개수대로 변경
void _createStatCSV(); // 통계 생성
void _saveData();
void _saveStringTable();
void _savePref(); // 도도부현 저장
void _sortJIBUN(JA_AREA* chome); // 지번 XY 순으로 정렬
bool _packJibun(JA_AREA* town,uint8_t* buffer,uint32_t* offset); // 지번 BIT PACK
uint32_t _totalJibunBytes; // 전체 지번 할당 데이터
uint32_t _writeOffset; // 현재 파일 포인터
FILE* _outFile; // 파일
uint32_t _totalStringBufferLength; // 전체 문자열 버퍼 크기(중복제거)
uint32_t _totalStringLength; // 전체 문자열 크기
uint32_t _currentStringOffset; // 현재 문자열 버퍼 옵셋
void _setStringOffset(JA_AREA* area,QString name); // 문자열 추가 + 옵셋처리
// 현재 처리중인 도도부현,...
QString _currentPrefName;
QString _currentCityName;
QString _currentChomeName;
JA_AREA* _prefs; // 도도부현 저장
JA_AREA* _currentPref; // 현재 도도부현
JA_AREA* _currentCity; // " 시군구
JA_AREA* _currentChome; // " 마을
JA_JIBUN* _currentJibun; // " 지번
int _maxLenPref; // 통계 확인용
int _maxLenCity; // "
int _maxLenChome; // "
int _maxA0; // " 지번 대번지 최대값
int _maxA1; // " " 소번지
#endif // USE_JP_ADDRESS_TOOL
};
#endif // USE_JP_ADDRESS
#endif // FM_ADDRESS_H

View File

@@ -0,0 +1,151 @@
#include "fm_parse_gps.h"
bool FMParseGPS::ParseRMC(char szSentence[], NMEA_INFO* data, int nPacketSize)
{
Q_UNUSED(nPacketSize);
// qDebug() << szSentence;
char szItem[NMEA_TOKEN_SIZE]={0,};
long nTemp;
// 의미없으니 하지 말자
// DWORD dwCheckSum = 0;
//for (i = 1; i < strlen(szSentence) && szSentence[i] != '*'; i++)
// dwCheckSum ^= szSentence[i];
//NMEA_INFO* data = &_gpsData[index];
//0th
szSentence = _GetNextToken(szSentence, szItem);
// 1th
char szUtc[11] = {0,};
szSentence = _GetNextToken(szSentence, szItem);
strncpy(szUtc, szItem, sizeof(szUtc));
// 2th
szSentence = _GetNextToken(szSentence, szItem);
char cStatus = szItem[0];
// 3th
szSentence = _GetNextToken(szSentence, szItem);
data->Latitude = atof(szItem);
// 4th
szSentence = _GetNextToken(szSentence, szItem);
unsigned char cLatitudeDir = szItem[0];
// 5th
szSentence = _GetNextToken(szSentence, szItem);
data->Longitude = atof(szItem);
// 6th
szSentence = _GetNextToken(szSentence, szItem);
unsigned char cLongitudeDir = szItem[0];
// 7th
szSentence = _GetNextToken(szSentence, szItem);
double speed = atof(szItem);
// 8th
szSentence = _GetNextToken(szSentence, szItem);
double track = atof(szItem);
// 9th
char szDate[7] = {0,};
szSentence = _GetNextToken(szSentence, szItem);
strncpy(szDate, szItem, sizeof(szDate));
// 10th
szSentence = _GetNextToken(szSentence, szItem);
// double Mag = atof(szItem); // Magnetic variation
// 11th
szSentence = _GetNextToken(szSentence, szItem);
// unsigned char cMagDir = szItem[0];
// Set GPS Info
if (strlen(szUtc) >= 6)
{
data->nHour = (szUtc[0] - 48) * 10 + (szUtc[1] - 48);
data->nMin = (szUtc[2] - 48) * 10 + (szUtc[3] - 48);
data->nSec = (szUtc[4] - 48) * 10 + (szUtc[5] - 48);
}
if (strlen(szDate) >= 6)
{
data->nDay = (szDate[0] - 48) * 10 + (szDate[1] - 48);
data->nMonth = (szDate[2] - 48) * 10 + (szDate[3] - 48);
data->nYear = (szDate[4] - 48) * 10 + (szDate[5] - 48);
if (data->nYear > 80)
{
data->nYear += 1900;
}
else
{
data->nYear += 2000;
}
}
// printf("date:%d-%d-%d %d:%d:%d\n",data->nYear,data->nMonth,data->nDay,data->nHour,data->nMin,data->nSec);
data->nStatus = (cStatus == 'A') ? 1 : 0;
data->Latitude = data->Latitude / 100.0;
nTemp = (long)data->Latitude;
data->Latitude = (double)nTemp + (double)(data->Latitude - nTemp) * 100.0 / 60.0;
if (cLatitudeDir == 'S')
{
data->Latitude *= -1;
}
data->Longitude = data->Longitude / 100.0;
nTemp = (long)data->Longitude;
data->Longitude = (double)nTemp + (double)(data->Longitude - nTemp) * 100.0 / 60.0;
if (cLongitudeDir == 'W')
{
data->Longitude *= -1;
}
data->Speed = speed * 1.852; // 1 knots는 1해리를 1시간에 가는 속력으로 1해리는 1852m
data->nAngle = (uint8_t) track;
// // data->nAngle = (long) track;
// if((data->nAngle-track>5 || data->nAngle-track<-5) ||
// (_nLastAngle[1]==_nLastAngle[0] && _nLastAngle[0] == track ))
// {
// data->nAngle = (uint8_t) track;
// }
// else
// {
// _nLastAngle[1] = _nLastAngle[0];
// _nLastAngle[0] = (long)track;
// }
return true;
}
char* FMParseGPS::_GetNextToken(char* lpSentence, char* lpToken, int iTokenSize)
{
lpToken[0] = '\0';
if (lpSentence == NULL || lpSentence[0] == '\0')
{
return NULL;
}
if (lpSentence[0] == ',')
{
return lpSentence + 1;
}
iTokenSize--;
while( *lpSentence != ',' &&
*lpSentence != '\0' &&
*lpSentence != '*'&& iTokenSize > 0)
{
*lpToken = *lpSentence;
lpToken++;
lpSentence++;
iTokenSize--;
}
lpSentence++;
*lpToken = '\0';
return lpSentence;
}

View File

@@ -0,0 +1,15 @@
#ifndef FM_PARSE_GPS_H
#define FM_PARSE_GPS_H
#include "rm_sensordata.h"
#define NMEA_TOKEN_SIZE 51
class FMParseGPS
{
public:
static bool ParseRMC(char szSentence[], NMEA_INFO* data, int nPacketSize);
private:
static char* _GetNextToken(char* lpSentence, char* lpToken, int iTokenSize = NMEA_TOKEN_SIZE);
};
#endif // FM_PARSE_GPS_H

View File

@@ -0,0 +1,461 @@
#include "fm_video_edit.h"
#if (FM_VIDEO_EDIT)
#if (USE_LIB_MP4V2)
#include "mp4v2.h"
#else
#ifdef __cplusplus
extern "C"
{
#include <libavformat/avformat.h>
#include <libavcodec/avcodec.h>
}
#endif // extern C
#endif //USE_LIB_MP4V2
#define MAX_VIDEO_STREAM_COUNT 10
FMVideoEdit::FMVideoEdit(QStringList files, QStringList dests, EditMode mode, bool deleteSrcDone)
{
_files = files;
_dests = dests;
_mode = mode;
_deleteSrcAfter = deleteSrcDone;
}
void FMVideoEdit::run()
{
_splitMP4VideoTracks();
}
int _find_video_stream_index(int stream_id, int *streams) {
for(int i=0;i<MAX_VIDEO_STREAM_COUNT;i++) {
if(stream_id == streams[i]) {
return i;
}
}
return -1;
}
void _clean_output_contexts(AVFormatContext** ctxs) {
for(int i=0;i<MAX_VIDEO_STREAM_COUNT;i++) {
if (ctxs[i] != NULL) {
avformat_free_context(ctxs[i]);
}
}
}
void FMVideoEdit::_splitMP4VideoTracks()
{
QString src = _files.first();
AVFormatContext *input_ctx = NULL;
int video_stream_index[MAX_VIDEO_STREAM_COUNT] = {-1,};
bool video_stream_validation[MAX_VIDEO_STREAM_COUNT] = {true,}; // 정상 Video Stream 확인
int video_stream_count = 0;
int real_video_stream_count = 0; // 실제 비디오 스트림 개수
int audio_stream_index = -1; // Audio 가 존재하지 않을 경우 stream index 를 2가 아닌 1로 작성하기 위해 사용
int video_frame_count = -1; // Progress 처리하기 위해 사용
// 입력 파일 열기
if (avformat_open_input(&input_ctx, src.toUtf8().constData(), NULL, NULL) < 0) {
emit done(ErrorFileOpen);
return;
}
// 메타 정보 확인
// major_brand : isom
// minor_version : 512
// compatible_brands : isomiso2avc1mp41
/*
AVDictionaryEntry *tag = NULL;
while ((tag = av_dict_get(input_ctx->metadata, "", tag, AV_DICT_IGNORE_SUFFIX))) {
qInfo() << tag->key << ":" << tag->value << __FUNCTION__;
}
*/
// 스트림 정보 읽기
if (avformat_find_stream_info(input_ctx, NULL) < 0) {
emit done(ErrorOpenStream);
avformat_close_input(&input_ctx);
return;
}
// 각 비디오 스트림 + 오디오/자막 스트림 인덱스 확인
for (unsigned int i = 0; i < input_ctx->nb_streams; i++) {
AVMediaType type = input_ctx->streams[i]->codecpar->codec_type;
if (type == AVMEDIA_TYPE_VIDEO) {
// 프레임이 없는 N 번째 영상 스트림 제거
video_stream_validation[i] = (input_ctx->streams[i]->nb_frames > 0);
// if(input_ctx->streams[i]->nb_frames == 0) {
// video_stream_validation[i] = false;
// }
if(video_stream_validation[i])
{
real_video_stream_count += 1;
}
// Progress 처리하기 위해
if(video_frame_count < 0) {
video_frame_count = input_ctx->streams[i]->nb_frames;
}
// AVStream* ps = input_ctx->streams[i];
// qInfo() << "VSTREAM:" << i << ps->discard << __FUNCTION__;
video_stream_index[video_stream_count] = i;
video_stream_count++;
} else if (type == AVMEDIA_TYPE_AUDIO) {
audio_stream_index = i;
} //else if (type == AVMEDIA_TYPE_SUBTITLE) {
// subtitle_stream_index = i;
//}
}
// 실제 영상 스트림이 출력 개수보다 많을 경우에만 처리
//video_stream_count = 2;
if(real_video_stream_count > _dests.size()) {
emit done(ErrorWrongTrackCount);
avformat_close_input(&input_ctx);
return;
}
// 한번에 모든 패킷을 여러 파일에 저장하기 위해 동시 생성
AVFormatContext *output_ctx[MAX_VIDEO_STREAM_COUNT] = {NULL,};
AVStream *out_video_stream[MAX_VIDEO_STREAM_COUNT] = {NULL,};
AVStream *out_audio_stream[MAX_VIDEO_STREAM_COUNT] = {NULL,};
AVStream *out_subtitle_stream[MAX_VIDEO_STREAM_COUNT] = {NULL,};
for(int s=0;s<video_stream_count;s++) {
// 프레임 없는 영상 스트림 SKIP
if(!video_stream_validation[s]) {
continue;
}
// 복사할 비디오 스트림 인덱스
// int current_video_stream = video_stream_index[s];
// 출력 파일 생성
//AVFormatContext *output_ctx = NULL;
QString destString = _dests.at(s);
char dest[4096] = {0,};
memcpy(dest,destString.toUtf8().constData(),destString.toUtf8().length());
// MP4는 PCM 데이터를 저장하지 못하므로 확장자는 MP4 이나 강제로 MOV 포멧으로 지정
avformat_alloc_output_context2(&output_ctx[s], NULL, "mov", dest);
if (!output_ctx[s]) {
emit done(ErrorFileCreate);
avformat_close_input(&input_ctx);
_clean_output_contexts(output_ctx);
return;
}
// 비디오 스트림 생성
out_video_stream[s] = avformat_new_stream(output_ctx[s], NULL);
if (!out_video_stream[s]) {
emit done(ErrorCreateStream);
_clean_output_contexts(output_ctx);
avformat_close_input(&input_ctx);
return;
}
// 스트림 파라미터 복사
int ret = avcodec_parameters_copy(out_video_stream[s]->codecpar, input_ctx->streams[video_stream_index[s]]->codecpar);
if (ret < 0) {
emit done(ErrorCopyStreamParameters);
_clean_output_contexts(output_ctx);
avformat_close_input(&input_ctx);
return;
}
out_video_stream[s]->codecpar->codec_tag = 0;
// 나머지 스트림들 복사
for (unsigned int i = 0; i < input_ctx->nb_streams; i++) {
AVMediaType type = input_ctx->streams[i]->codecpar->codec_type;
if (type == AVMEDIA_TYPE_AUDIO) {
out_audio_stream[s] = avformat_new_stream(output_ctx[s], NULL);
if (!out_audio_stream[s]) {
emit done(ErrorCreateStream);
_clean_output_contexts(output_ctx);
avformat_close_input(&input_ctx);
return;
}
ret = avcodec_parameters_copy(out_audio_stream[s]->codecpar, input_ctx->streams[i]->codecpar);
if (ret < 0) {
emit done(ErrorCopyStreamParameters);
_clean_output_contexts(output_ctx);
avformat_close_input(&input_ctx);
return;
}
out_audio_stream[s]->codecpar->codec_tag = 0;
}
// 자막 스트림 복사
if (type == AVMEDIA_TYPE_SUBTITLE) {
out_subtitle_stream[s] = avformat_new_stream(output_ctx[s], NULL);
if (!out_subtitle_stream[s]) {
emit done(ErrorCreateStream);
_clean_output_contexts(output_ctx);
avformat_close_input(&input_ctx);
return;
}
ret = avcodec_parameters_copy(out_subtitle_stream[s]->codecpar, input_ctx->streams[i]->codecpar);
if (ret < 0) {
emit done(ErrorCopyStreamParameters);
_clean_output_contexts(output_ctx);
avformat_close_input(&input_ctx);
return;
}
out_subtitle_stream[s]->codecpar->codec_tag = 0;
}
} // 나머지 스트림 복사
// 출력 파일 헤더 쓰기
av_dump_format(output_ctx[s], 0, dest, 1);
if (!(output_ctx[s]->oformat->flags & AVFMT_NOFILE)) {
if (avio_open(&output_ctx[s]->pb, dest, AVIO_FLAG_WRITE) < 0) {
emit done(ErrorFileWrite);
_clean_output_contexts(output_ctx);
avformat_close_input(&input_ctx);
return;
}
}
// major_brand를 'isom'으로 설정
// AVDictionary *options = NULL;
// av_dict_set(&options, "major_brand", "isom", 0);
// av_dict_set(&options, "compatible_brands", "isomiso2avc1mp41", 0);
//&options
int res = avformat_write_header(output_ctx[s], NULL); // &options
if (res < 0)
{
//char ebuffer[1024] = {0,};
//av_strerror(res,ebuffer,1024);
qInfo() << "VIDEO STREAM:" << s << " HEADER WRITE ERROR" << __FUNCTION__;
emit done(ErrorFileWrite);
_clean_output_contexts(output_ctx);
avformat_close_input(&input_ctx);
// av_dict_free(&options);
return;
}
} // 파일 생성 및 스트림 정보 복사
// 패킷 복사
AVPacket packet;
// for 문으로 처리시 처음으로 다시 돌아가는 기능 필요...
// av_seek_frame(input_ctx, -1, 0, AVSEEK_FLAG_BACKWARD);
int frame_processed = 0; // Progress 처리
while (av_read_frame(input_ctx, &packet) >= 0) {
AVMediaType type = input_ctx->streams[packet.stream_index]->codecpar->codec_type;
// 입력 스트림과 출력 스트림의 타임베이스 정보를 가져옵니다.
const AVRational& in_time_base = input_ctx->streams[packet.stream_index]->time_base;
if(type == AVMEDIA_TYPE_VIDEO) {
// video 순서 index 확인
int vindex = _find_video_stream_index(packet.stream_index,video_stream_index);
if(!video_stream_validation[vindex]) {
continue;
}
// 시간계산 비디오 스트림 인덱스는 무조건 0
const AVRational& out_time_base = output_ctx[vindex]->streams[0]->time_base;
// 모든 패킷 RESCALE 하지 않으면 재생시간 짧아짐
packet.pts = av_rescale_q(packet.pts, in_time_base, out_time_base);
packet.dts = av_rescale_q(packet.dts, in_time_base, out_time_base);
packet.duration = av_rescale_q(packet.duration, in_time_base, out_time_base);
packet.stream_index = 0; // 출력 비디오 스트림 인덱스
av_interleaved_write_frame(output_ctx[vindex], &packet);
if(vindex == 0 && video_frame_count > 0) {
frame_processed++;
int percent = int (((double)frame_processed) / ((double)video_frame_count) * 100.0);
emit progress(percent);
//qInfo() << progress << __FUNCTION__;
}
} else if (type == AVMEDIA_TYPE_AUDIO) {
// 패킷을 여러번 쓰기 위해서는 av_packet_clone 해서 처리해야함..
for(int s=0;s<video_stream_count;s++) { // 모든 파일에 오디오 패킷 쓰기
if(!video_stream_validation[s]) { // invalid video 스트림 SKIP
continue;
}
// 패킷 시간 RESCALE 처리해아함 // AUDIO STREAM INDEX 는 무조건 1
const AVRational& out_time_base = output_ctx[s]->streams[1]->time_base;
AVPacket* cp = av_packet_clone(&packet);
// 모든 패킷 RESCALE 하지 않으면 재생시간 짧아짐
cp->pts = av_rescale_q(cp->pts, in_time_base, out_time_base);
cp->dts = av_rescale_q(cp->dts, in_time_base, out_time_base);
cp->duration = av_rescale_q(cp->duration, in_time_base, out_time_base);
cp->stream_index = 1; //출력 오디오 스트림 인덱스
av_interleaved_write_frame(output_ctx[s], cp);
av_packet_free(&cp);
}
} else if (type == AVMEDIA_TYPE_SUBTITLE) {
// AUDIO 가 존재하지 않을경우 1 존재할 경우 2
const int subtitle_stream_index = audio_stream_index >= 0 ? 2 : 1;
for(int s=0;s<video_stream_count;s++) { // 모든 파일에 자막 패킷 쓰기
if(!video_stream_validation[s]) { // invalid video 스트림 SKIP
continue;
}
// 패킷 시간 RESCALE 처리해아함
const AVRational& out_time_base = output_ctx[s]->streams[subtitle_stream_index]->time_base;
AVPacket* cp = av_packet_clone(&packet);
// 모든 패킷 RESCALE 하지 않으면 재생시간 짧아짐 for 에서 누적되니 신규 packet 에만 처리
cp->pts = av_rescale_q(cp->pts, in_time_base, out_time_base);
cp->dts = av_rescale_q(cp->dts, in_time_base, out_time_base);
cp->duration = av_rescale_q(cp->duration, in_time_base, out_time_base);
cp->stream_index = subtitle_stream_index;
av_interleaved_write_frame(output_ctx[s], cp);
av_packet_free(&cp);
}
}
av_packet_unref(&packet);
}
for(int s=0;s<video_stream_count;s++) {
if(!video_stream_validation[s]) { // invalid video 스트림 파일 SKIP
continue;
}
// 트레일러 쓰기 및 파일 닫기
av_write_trailer(output_ctx[s]);
if (!(output_ctx[s]->oformat->flags & AVFMT_NOFILE)) {
avio_closep(&output_ctx[s]->pb);
}
//av_dict_free(&options);
avformat_free_context(output_ctx[s]);
}
avformat_close_input(&input_ctx);
_afterProcess();
emit done(ErrorNone);
}
void FMVideoEdit::_afterProcess()
{
if(_deleteSrcAfter) {
for(int i=0;i<_files.size();i++) {
QFile(_files.at(i)).remove();
qInfo() << "DELETE:" << _files.at(i) << __FUNCTION__;
}
}
}
#if (USE_LIB_MP4V2)
void FMVideoEdit::_splitVideoTracks()
{
QString src = _files.first();
/* 최적화 하여 마지막 이상한 atom 제거해도 수정이 안됨
QFileInfo fi = QFileInfo(src2);
QString src = QDir::cleanPath(fi.dir().path() + QDir::separator() + fi.baseName() + "_2.mp4");
if(!MP4Optimize(src2.toLocal8Bit().data(),src.toLocal8Bit().data())) {
//qInfo() << "OPTIMIZE FAILED.." << __FUNCTION__;
return;
} */
MP4FileHandle inputFile = MP4Read(src.toLocal8Bit().data());
if(MP4_INVALID_FILE_HANDLE == inputFile) {
emit done(ErrorFileOpen);
return;
}
uint32_t numTrack = MP4GetNumberOfTracks(inputFile,NULL,0);
//qInfo() << QString(MP4Info(mp4File)) << __FUNCTION__;
qInfo() << "numTrack" << numTrack << __FUNCTION__;
QList<MP4TrackId> videoTracks = QList<MP4TrackId>();
QList<MP4TrackId> otherTracks = QList<MP4TrackId>(); // 영상을 제외한 트랙들
for(uint32_t i=0;i<numTrack;i++) {
MP4TrackId tid = MP4FindTrackId (inputFile, i, NULL,0);
const char* type = MP4GetTrackType(inputFile,tid);
if(strcmp(type,MP4_VIDEO_TRACK_TYPE) == 0) {
videoTracks.append(tid);
qInfo() << "TRACK:" << i << " ID:" << tid << " TYPE:" << type << __FUNCTION__;
} else if (strcmp(type,MP4_AUDIO_TRACK_TYPE) != 0) { // else { //
// 오디오 트랙만 처리하면 atom size 에러가 발생함...
otherTracks.append(tid);
qInfo() << "OTHER TRACK:" << i << " ID:" << tid << " TYPE:" << type << __FUNCTION__;
}
}
if(videoTracks.size() != _dests.size()) {
emit done(ErrorWrongTrackCount);
MP4Close(inputFile);
}
for(int i=0;i<_dests.size();i++) {
QString dest = _dests.at(i);
MP4FileHandle outputFile = MP4Create(dest.toLocal8Bit().data());
if (outputFile == MP4_INVALID_FILE_HANDLE) {
emit done(ErrorFileCreate);
MP4Close(inputFile);
return;
}
// 입력 파일의 일반적인 정보를 출력 파일에 복사
MP4SetTimeScale(outputFile, MP4GetTimeScale(inputFile));
QList<MP4TrackId> cloneTracks = QList<MP4TrackId>();
cloneTracks.append(videoTracks.at(i)); // 영상 트랙
cloneTracks.append(otherTracks); // 나머지 트랙(자막,메타,음성)
qInfo() << "cloneTracks:" << cloneTracks << __FUNCTION__;
// 트랙 복제
for (int j=0;j<cloneTracks.size();j++) {
MP4TrackId srcID = cloneTracks.at(j);
MP4TrackId outputVideoTrackId = MP4CloneTrack(inputFile, srcID, outputFile);
qInfo() << "CLONE TRACK SRC:" << srcID << " OUT:" << outputVideoTrackId << __FUNCTION__;
if (outputVideoTrackId == MP4_INVALID_TRACK_ID) {
MP4Close(inputFile);
MP4Close(outputFile);
emit done(ErrorTrackClone);
return;
}
// 샘플 복사
uint32_t sampleId = 1;
MP4SampleId sampleCount = MP4GetTrackNumberOfSamples(inputFile, srcID);
while (sampleId <= sampleCount) {
uint8_t* sampleData = nullptr;
uint32_t sampleSize = 0;
MP4Timestamp startTime = 0;
MP4Duration sampleDuration = 0;
if (MP4ReadSample(inputFile, srcID, sampleId, &sampleData, &sampleSize,&startTime, &sampleDuration)) {
if (!MP4WriteSample(outputFile, outputVideoTrackId, sampleData, sampleSize, sampleDuration)) {
emit done(ErrorWriteSample);
MP4Free(sampleData);
break;
}
MP4Free(sampleData);
} else {
emit done(ErrorReadSample);
break;
}
sampleId++;
}
} // 각 트랙 복사
MP4Close(outputFile);
}
MP4Close(inputFile);
}
#endif // #if (USE_LIB_MP4V2)
#endif // #if (FM_VIDEO_EDIT)

View File

@@ -0,0 +1,75 @@
#ifndef FM_VIDEO_EDIT_H
#define FM_VIDEO_EDIT_H
#if (FM_VIDEO_EDIT)
#include <QtCore>
/**
* @brief 영상(MP4,AVI) 편집(FILE JOIN,TRACK SPLIT) 기능
* @example
* FMVideoEdit* ed = new FMVideoEdit(...,...);
connect(sp, SIGNAL(done()), SLOT(onSplitVideoDone()));
QThreadPool::globalInstance()->start(sp,LOADER_THREAD_PRIORITY);
*/
class FMVideoEdit : public QObject, public QRunnable
{
Q_OBJECT
public:
enum EditMode {
FileJoin,
VideoTrackSplit,
};
enum ErrorCode {
ErrorNone = 0,
ErrorWrongTrackCount,
ErrorFileOpen,
ErrorOpenStream,
ErrorFileCreate,
ErrorCreateStream,
ErrorCopyStreamParameters,
ErrorFileWrite,
};
/**
* @brief 생성
* @param files: 처리할 파일경로(들) - 동일 포맷(+코덱,etc)만 가능
* @param dests: 처리완료된 파일경로(들)
* @param mode: 처리 모드
*/
FMVideoEdit(QStringList files, QStringList dests, EditMode mode, bool deleteSrcDone = true);
virtual void run();
private:
QStringList _files; //! 처리할 파일(들)
QStringList _dests; //! 처리 결과물 파일 경로(들)
EditMode _mode; //! 처리 모드
bool _deleteSrcAfter; //! 처리 완료 후 소스 영상 삭제
/**
* @brief 멀티트랙 영상을 N개의 단일트랙 영상+오디오 파일로 저장
*/
void _splitMP4VideoTracks();
/**
* @brief 처리 완료 후 진행
* eg. 소스 파일 삭제 등
*/
void _afterProcess();
signals:
/**
* @brief 처리완료 (또는 ERROR)
* @param error: 에러코드, 0=성공
*/
void done(int error); //! 처리 완료 결과
/**
* @brief 처리 경과
* @param progress , 0~100%
*/
void progress(int progress); //! 처리 프로그래스
};
#endif // FM_VIDEO_EDIT
#endif // FM_VIDEO_EDIT_H

File diff suppressed because it is too large Load Diff

View File

@@ -0,0 +1,32 @@
#ifndef RM_AVIREPAIR_H
#define RM_AVIREPAIR_H
#include <QObject>
//#include "rm_constants.h"
class RMAVIRepair : public QObject
{
Q_OBJECT
public:
explicit RMAVIRepair(QObject *parent = 0);
static bool repair(QString& filePath, QString& destPath);
#if (SUPPORT_AVI_FIX_DURL)
static bool repairV50(QString& filePath, QString& destPath);
#endif
static bool clipToPair(QString& filePath, QString& pairfilePath);
#if (REPAIR_CHECK_SUBTITLE)
// 파일명과 비교하여 30분이상 차이나면 처리하지 않음
static bool compare_subtitle_time(QString& filePath, char* subtitle);
#endif
// 명칭뒤에 _f 추가
#if (REPAIR_FAILED_TAG)
static void rename_failed(QString& filePath);
#endif
signals:
public slots:
};
#endif // RM_AVIREPAIR_H

View File

@@ -0,0 +1,29 @@
#ifndef RM_FORMAT_H
#define RM_FORMAT_H
#if !defined(BBEXTRACT)
#include "../rm_include.h"
#endif
typedef struct _VideoPreInfo {
bool bDuration;
unsigned int duration;
#if (CHECK_VIDEO_BITRATE)
bool bMOVSize;
unsigned int movSize;
#endif
unsigned int width;
unsigned int height;
} VideoPreInfo;
typedef enum {
VideoReadSensor = 0, // 센서 데이터 읽어 오기
VideoReadDuration = 1, // 재생시간만 확인
#if (CHECK_VIDEO_BITRATE)
VideoReadMOVSize = 2, // duration + 'mov' 크기 확인 (후방카메라 연결되지 않은경우 확인)
#endif
} VideoReadMode;
#endif // RM_FORMAT_H

View File

@@ -0,0 +1,883 @@
#if (FILE_FORMAT_AVI)
#include <stdio.h>
#include <stdlib.h>
#include <string.h>
#if !defined(BBEXTRACT)
#include <QtCore>
#include "rm_constants.h"
#if (APPLICATION_PROFILE)
#include <QElapsedTimer>
#include <QDebug>
#endif
#endif //#if !defined(BBEXTRACT)
#include "rm_format_avi.h"
extern "C" {
#include "fileio.h"
}
// 참조: https://github.com/masayukig/jpegtoavi/blob/master/aviformat.txt
// http://telnet.or.kr/directx/htm/avirifffilereference.htm
// http://blog.naver.com/PostView.nhn?blogId=shlee7708&logNo=120121689464 <- 제일 편함!!
//static int skip_chunk(FILE *in)
//{
//char chunk_id[5];
//int chunk_size;
//int end_of_chunk;
// read_chars(_file,chunk_id,4);
// chunk_size=read_long(_file);
// printf("Uknown Chunk at %d\n",(int)ftell(_file));
// printf("-------------------------------\n");q
// printf(" chunk_id: %s\n",chunk_id);
// printf(" chunk_size: %d\n",chunk_size);
// printf("\n");
// end_of_chunk=ftell(_file)+chunk_size;
// if ((end_of_chunk%4)!=0)
// {
// end_of_chunk=end_of_chunk+(4-(end_of_chunk%4));
// }
// RMfseek(_file,end_of_chunk,SEEK_SET);
// return 0;
//}
//AVIRiff(RMfile in,bool durationOnly = false);
AVIRiff::AVIRiff(RMfile in,VideoReadMode mode, VideoPreInfo* info)
{
_readMode = mode;
_preInfo = info;
#if (SENSOR_AVI_SUBTITLE)
subtitles = NULL;
#endif
if(_preInfo != NULL) {
_preInfo->bDuration = false;
_preInfo->duration = 0;
#if (CHECK_VIDEO_BITRATE)
_preInfo->movSize = 0;
_preInfo->bMOVSize = false;
#endif
_preInfo->width = 0;
_preInfo->height = 0;
}
_isValid = false;
_file = in;
_videoStreamCount = 0;
#if (AVI_CHUNHO_SENSOR_FORMAT_1)
_gps_buffer = NULL;
_sensor_buffer = NULL;
_gps_buffer_size = 0;
_sensor_buffer_size = 0;
#endif
parse_riff();
}
AVIRiff::~AVIRiff()
{
for(int i=0;i<_videoStreamCount;i++)
{
if(_videoStreamFormat[i].palette != NULL)
{
free(_videoStreamFormat[i].palette);
}
}
#if (AVI_CHUNHO_SENSOR_FORMAT_1)
if(_gps_buffer != NULL)
{
free(_gps_buffer);
}
if(_sensor_buffer != NULL)
{
free(_sensor_buffer);
}
#endif
#if (SENSOR_AVI_SUBTITLE)
if(subtitles != NULL) {
for(int i=0;i<subtitles->size();i++) {
free(subtitles->at(i));
}
delete subtitles;
subtitles = NULL;
}
#endif
}
int AVIRiff::hex_dump_chunk(int chunk_len)
{
#if (USE_AVI_DUMP)
char chars[17];
int ch,n;
chars[16]=0;
int line = 0;
for (n=0; n<chunk_len; n++)
{
if ((n%16)==0)
{
if (n!=0)
{
printf("%s\n", chars);
}
printf("%04d ",line++);
memset(chars, ' ', 16);
}
ch=getc(_file);
if (ch==EOF)
{
break;
}
printf("%02x ", ch);
if (ch>=' ' && ch<=126)
{
chars[n%16]=ch;
}
else
{
chars[n%16]='.';
}
}
if ((n%16)!=0)
{
for (ch=n%16; ch<16; ch++)
{
printf(" ");
}
}
printf("%s\n", chars);
#else
#if !defined(BBEXTRACT)
Q_UNUSED(chunk_len);
#endif // #if !defined(BBEXTRACT)
#endif
return 0;
}
#if (DEBUG_AVI_FORMAT)
int AVIRiff::parse_idx1(int chunk_len)
{
index_entry_t index_entry;
int t;
printf(" IDX1\n");
printf(" -------------------------------\n");
printf(" ckid dwFlags dwChunkOffset dwChunkLength\n");
for (t=0; t<chunk_len/16; t++)
{
read_chars(_file,index_entry.ckid,4);
index_entry.dwFlags=read_long(_file);
index_entry.dwChunkOffset=read_long(_file);
index_entry.dwChunkLength=read_long(_file);
printf(" %s 0x%08x 0x%08x 0x%08x\n",
index_entry.ckid,
index_entry.dwFlags,
index_entry.dwChunkOffset,
index_entry.dwChunkLength);
}
printf("\n");
return 0;
}
#endif
int AVIRiff::read_avi_header() // ,struct avi_header_t *avi_header
{
// 연결되지 않은 영상 Bit rate : 121 kb/s
// 연결된 영상 Bit rate : 4 493 kb/s
//long offset=ftell(_file);
AVIHeader* avi_header = &_avi_header;
avi_header->TimeBetweenFrames=read_long(_file); // AVI TYPE 1 = 0
avi_header->MaximumDataRate=read_long(_file);
avi_header->PaddingGranularity=read_long(_file);
avi_header->Flags=read_long(_file);
avi_header->TotalNumberOfFrames=read_long(_file);
avi_header->NumberOfInitialFrames=read_long(_file);
avi_header->NumberOfStreams=read_long(_file);
avi_header->SuggestedBufferSize=read_long(_file);
avi_header->Width=read_long(_file);
avi_header->Height=read_long(_file);
avi_header->TimeScale=read_long(_file);
avi_header->DataRate=read_long(_file);
avi_header->StartTime=read_long(_file);
avi_header->DataLength=read_long(_file);
if(_preInfo != NULL)
{
_preInfo->width = avi_header->Width;
_preInfo->height = avi_header->Height;
}
#if (DEBUG_AVI_HEADER)
printf(" offset=0x%lx\n",offset);
printf(" TimeBetweenFrames: %d\n",avi_header->TimeBetweenFrames);
printf(" MaximumDataRate: %d\n",avi_header->MaximumDataRate);
printf(" PaddingGranularity: %d\n",avi_header->PaddingGranularity);
printf(" Flags: %d\n",avi_header->Flags);
printf(" TotalNumberOfFrames: %d\n",avi_header->TotalNumberOfFrames);
printf(" NumberOfInitialFrames: %d\n",avi_header->NumberOfInitialFrames);
printf(" NumberOfStreams: %d\n",avi_header->NumberOfStreams);
printf(" SuggestedBufferSize: %d\n",avi_header->SuggestedBufferSize);
printf(" Width: %d\n",avi_header->Width);
printf(" Height: %d\n",avi_header->Height);
printf(" TimeScale: %d\n",avi_header->TimeScale);
printf(" DataRate: %d\n",avi_header->DataRate);
printf(" StartTime: %d\n",avi_header->StartTime);
printf(" DataLength: %d\n",avi_header->DataLength);
fflush(stdout);
#endif
return 0;
}
void AVIRiff::print_data_handler(unsigned char *handler)
{
int t;
for (t=0; t<4; t++)
{
if ((handler[t]>='a' && handler[t]<='z') ||
(handler[t]>='A' && handler[t]<='Z') ||
(handler[t]>='0' && handler[t]<='9'))
{
printf("%c",handler[t]);
}
else
{
printf("[0x%02x]",handler[t]);
}
}
}
int AVIRiff::read_stream_header(stream_header_t *stream_header)
{
#if (DEBUG_AVI_STREAM_HEADER)
long offset=ftell(_file);
#endif
read_chars(_file,stream_header->DataType,4);
read_chars(_file,stream_header->DataHandler,4);
stream_header->Flags=read_long(_file);
stream_header->Priority=read_long(_file);
stream_header->InitialFrames=read_long(_file);
stream_header->TimeScale=read_long(_file);
stream_header->DataRate=read_long(_file);
stream_header->StartTime=read_long(_file);
stream_header->DataLength=read_long(_file);
stream_header->SuggestedBufferSize=read_long(_file);
stream_header->Quality=read_long(_file);
stream_header->SampleSize=read_long(_file);
if(_preInfo != NULL) {
if(_preInfo->duration == 0 && stream_header->TimeScale != 0)
{
_preInfo->bDuration = true;
_preInfo->duration = (unsigned int)(((double)(stream_header->TimeScale * 1000) / (double)stream_header->DataRate) * (double)stream_header->DataLength);
}
}
#if (DEBUG_AVI_STREAM_HEADER)
printf("------------------------------------------------------------\n");
printf(" offset=0x%lx\n",offset);
printf(" DataType: %s\n",stream_header->DataType);
printf(" DataHandler: ");
print_data_handler((unsigned char *)stream_header->DataHandler);
printf("\n");
printf(" Flags: %d\n",stream_header->Flags);
printf(" Priority: %d\n",stream_header->Priority);
printf(" InitialFrames: %d\n",stream_header->InitialFrames);
printf(" TimeScale: %d\n",stream_header->TimeScale);
printf(" DataRate: %d\n",stream_header->DataRate);
printf(" StartTime: %d\n",stream_header->StartTime);
printf(" DataLength: %d\n",stream_header->DataLength);
printf(" SuggestedBufferSize: %d\n",stream_header->SuggestedBufferSize);
printf(" Quality: %d\n",stream_header->Quality);
printf(" SampleSize: %d\n",stream_header->SampleSize);
fflush(stdout);
#endif
return 0;
}
int AVIRiff::read_stream_format(stream_format_t *stream_format)
{
int t,r,g,b;
#if (DEBUG_AVI_FORMAT)
long offset=ftell(_file);
#endif
stream_format->header_size=read_long(_file);
stream_format->image_width=read_long(_file);
stream_format->image_height=read_long(_file);
stream_format->number_of_planes=read_word(_file);
stream_format->bits_per_pixel=read_word(_file);
stream_format->compression_type=read_long(_file);
stream_format->image_size_in_bytes=read_long(_file);
stream_format->x_pels_per_meter=read_long(_file);
stream_format->y_pels_per_meter=read_long(_file);
stream_format->colors_used=read_long(_file);
stream_format->colors_important=read_long(_file);
stream_format->palette=0;
if (stream_format->colors_important!=0)
{
stream_format->palette= (int*) malloc(stream_format->colors_important*sizeof(int));
for (t=0; t<stream_format->colors_important; t++)
{
b=RMgetc(_file);
g=RMgetc(_file);
r=RMgetc(_file);
stream_format->palette[t]=(r<<16)+(g<<8)+b;
}
}
#if (DEBUG_AVI_FORMAT)
printf(" offset=0x%lx\n",offset);
printf(" header_size: %d\n",stream_format->header_size);
printf(" image_width: %d\n",stream_format->image_width);
printf(" image_height: %d\n",stream_format->image_height);
printf(" number_of_planes: %d\n",stream_format->number_of_planes);
printf(" bits_per_pixel: %d\n",stream_format->bits_per_pixel);
printf(" compression_type: %04x (%c%c%c%c)\n",stream_format->compression_type,
((stream_format->compression_type)&255),
((stream_format->compression_type>>8)&255),
((stream_format->compression_type>>16)&255),
((stream_format->compression_type>>24)&255));
printf(" image_size_in_bytes: %d\n",stream_format->image_size_in_bytes);
printf(" x_pels_per_meter: %d\n",stream_format->x_pels_per_meter);
printf(" y_pels_per_meter: %d\n",stream_format->y_pels_per_meter);
printf(" colors_used: %d\n",stream_format->colors_used);
printf(" colors_important: %d\n",stream_format->colors_important);
fflush(stdout);
#endif
return 0;
}
int AVIRiff::read_stream_format_auds(stream_format_auds_t *stream_format)
{
#if (DEBUG_AVI_FORMAT)
long offset=RMftell(_file);
#endif
stream_format->format=read_word(_file);
stream_format->channels=read_word(_file);
stream_format->samples_per_second=read_long(_file);
stream_format->bytes_per_second=read_long(_file);
#if (DEBUG_AVI_FORMAT)
int block_align=read_word(_file);
#endif
stream_format->block_size_of_data=read_word(_file);
stream_format->bits_per_sample=read_word(_file);
//stream_format->extended_size=read_word(_file);
#if (DEBUG_AVI_FORMAT)
printf(" offset=0x%lx\n",offset);
printf(" format: %d\n",stream_format->format);
printf(" channels: %d\n",stream_format->channels);
printf(" samples_per_second: %d\n",stream_format->samples_per_second);
printf(" bytes_per_second: %d\n",stream_format->bytes_per_second);
printf(" block_align: %d\n",block_align);
printf(" block_size_of_data: %d\n",stream_format->block_size_of_data);
printf(" bits_per_sample: %d\n",stream_format->bits_per_sample);
#endif
return 0;
}
int AVIRiff::parse_hdrl_list()
{
// stream_format_auds_t stream_format_auds;
// stream_header_t stream_header_auds;
char chunk_id[5];
int chunk_size;
char chunk_type[5];
int end_of_chunk;
int next_chunk;
//long offset=RMftell(_file);
AVI_STREAM_TYPE stream_type = UNDEFINED_STREAM;
read_chars(_file,chunk_id,4);
chunk_size=read_long(_file);
read_chars(_file,chunk_type,4);
#if (DEBUG_AVI_FORMAT)
printf(" AVI Header LIST (id=%s size=%d type=%s offset=0x%lx)\n",chunk_id,chunk_size,chunk_type,offset);
printf(" {\n");
#endif
end_of_chunk=RMftell(_file)+chunk_size-4;
if ((end_of_chunk%4)!=0)
{
//printf("Adjusting end of chunk %d\n", end_of_chunk);
//end_of_chunk=end_of_chunk+(4-(end_of_chunk%4));
//printf("Adjusting end of chunk %d\n", end_of_chunk);
}
if (strcmp(chunk_id,"JUNK")==0)
{
RMfseek(_file,end_of_chunk,SEEK_SET);
#if (DEBUG_AVI_FORMAT)
printf(" }\n");
#endif
return 0;
}
while (RMftell(_file)<end_of_chunk)
{
//long offset=RMftell(_file);
read_chars(_file,chunk_type,4);
chunk_size=read_long(_file);
next_chunk=RMftell(_file)+chunk_size;
if ((chunk_size%4)!=0)
{
//printf("Chunk size not a multiple of 4?\n");
//chunk_size=chunk_size+(4-(chunk_size%4));
}
#if (DEBUG_AVI_FORMAT)
printf(" %.4s (size=%d offset=0x%lx)\n",chunk_type,chunk_size,offset);
printf(" {\n");
#endif
if (strcasecmp("strh",chunk_type)==0)
{
long marker=RMftell(_file);
char buffer[5];
read_chars(_file,buffer,4);
RMfseek(_file,marker,SEEK_SET);
if (strcmp(buffer, "vids")==0)
{
stream_type = VIDEO_STREAM;
read_stream_header(&_videoStreamHeaders[_videoStreamCount]); // format 읽은후에 ++
}
else if (strcmp(buffer, "auds")==0)
{
stream_type = AUDIO_STREAM;
read_stream_header(&_audioStreamHeader);
}
else if (strcmp(buffer, "txts")==0)
{
stream_type = TXT_STREAM;
read_stream_header(&_txtStreamHeader);
}
else
{
#if (DEBUG_AVI_FORMAT)
printf("Unknown stream type %s\n", buffer);
#endif
return -1;
}
}
else if (strcasecmp("strf",chunk_type)==0)
{
if (stream_type == VIDEO_STREAM)
{
read_stream_format(&_videoStreamFormat[_videoStreamCount]);
_videoStreamCount++;
}
else if (stream_type==1)
{
read_stream_format_auds(&_audioStreamFormat);
}
else if (stream_type==2) // subtitle
{
// STRF 에는 아무 정보도 없음
//hex_dump_chunk(in,371);
//read_stream_format_auds(in,&stream_format_auds);
}
}
else if (strcasecmp("strd",chunk_type)==0)
{
//RMfseek(_file,offset,SEEK_SET);
//read_strd_format(chunk_size);
//_stopProcessing = true;
}
else
{
#if (DEBUG_AVI_FORMAT)
printf(" Unknown chunk type: %s\n",chunk_type);
#endif
// skip_chunk(_file);
}
#if (DEBUG_AVI_FORMAT)
printf(" }\n");
#endif
RMfseek(_file,next_chunk,SEEK_SET);
}
//printf("@@@@ %ld %d\n", RMftell(_file), end_of_chunk);
#if (DEBUG_AVI_FORMAT)
printf(" }\n");
#endif
RMfseek(_file,end_of_chunk,SEEK_SET);
return 0;
}
#if (SENSOR_AVI_SUBTITLE)
// 자막 데이터 AVI TYPE2
int AVIRiff::add_subtitle(long chunkSize)
{
if(subtitles == NULL) {
subtitles = new QList<char*>();
}
char* buffer = (char*)malloc(chunkSize + 1);
memset(buffer,0,chunkSize + 1);
RMfread(buffer,chunkSize,1,_file);
//qInfo() << buffer;
//QString str = QString(buffer);
subtitles->append(buffer);
//free(buffer);
return 0;
}
#endif
int AVIRiff::parse_movi_list(unsigned int size)
{
#if (APPLICATION_PROFILE)
QElapsedTimer timer;
timer.start();
#endif
char chunk_id[5] = {0,};
long chunk_size;
long end_of_chunk;
long offset=RMftell(_file);
#if (DEBUG_AVI_MOVI)
printf(" AVI MOVI Chunk (id=%s size=%d offset=0x%lx)\n",chunk_id,chunk_size,offset);
printf(" {\n");
fflush(stdout);
#endif
//int txtCount = 0;
while(RMftell(_file)< offset+(long)size-4)
{
read_chars(_file,chunk_id,4); // 4 byte
chunk_size=read_long(_file); // 4 byte
// pack 하지 않는다.. 다음 chunk 의 chunk_id + size 제외하고 이동
end_of_chunk= RMftell(_file) + chunk_size;
// H265 AVI 는 2 BYTE pack 되어 있음..
if ((end_of_chunk % 2) == 1)
{
end_of_chunk += 1;
}
if(chunk_id[2] == 't' && chunk_id[3] == 'x')
{
#if (SENSOR_AVI_SUBTITLE)
add_subtitle(chunk_size);
#endif
#if (DEBUG_AVI_MOVI)
printf(" MOVI TXT (id(%04d)=%s size=%d offset=%ld)\n",txtCount++,chunk_id,chunk_size,ftell(_file));
// hex_dump_chunk(chunk_size);
fflush(stdout);
#endif
}
else
{
// 이후 NAL SEI 처리 기능 추가
}
RMfseek(_file,end_of_chunk,SEEK_SET);
}
#if (DEBUG_AVI_MOVI)
printf(" }\n");
fflush(stdout);
#endif
#if (APPLICATION_PROFILE)
qInfo () << "Elapsed Time(parse_movi_list):" << timer.elapsed() << " msec" << " count:" << txtCount;
#endif
return 0;
}
//int AVIRiff::parse_hdrl(stream_format_t *stream_format, unsigned int size)
int AVIRiff::parse_hdrl(unsigned int size)
{
char chunk_id[5];
long chunk_size;
long end_of_chunk;
long offset=RMftell(_file);
read_chars(_file,chunk_id,4);
chunk_size=read_long(_file);
#if (DEBUG_AVI_FORMAT)
printf(" AVI Header Chunk (id=%s size=%d offset=0x%lx)\n",chunk_id,chunk_size,offset);
printf(" {\n");
#endif
end_of_chunk=RMftell(_file)+chunk_size;
if ((end_of_chunk%4)!=0) // 4BYTE PACK
{
end_of_chunk=end_of_chunk+(4-(end_of_chunk%4));
}
read_avi_header(); // 메인 헤더 읽기
#if (DEBUG_AVI_FORMAT)
printf(" }\n");
#endif
while(RMftell(_file)<offset+(long)size-4)
{
//printf("Should end at 0x%lx 0x%lx\n",offset+size,RMftell(_file));
parse_hdrl_list();
//parse_hdrl_list(stream_header,stream_format);
}
return 0;
}
int AVIRiff::parse_riff()
{
_isValid = false; // TAG 문제시만..
// 개별 chunk 처리
char chunk_id[5] = {0,};
int chunk_size;
char chunk_type[5] = {0,};
int end_of_chunk, end_of_subchunk;
//struct avi_header_t avi_header;
// stream_header_t stream_header;
// stream_format_t stream_format={0};
read_chars(_file,chunk_id,4);
chunk_size=read_long(_file);
read_chars(_file,chunk_type,4);
#if (DEBUG_AVI_FORMAT)
long offset=RMftell(_file);
printf("RIFF Chunk (id=%s size=%d type=%s offset=0x%lx)\n",chunk_id,chunk_size,chunk_type, offset);
printf("{\n");
#endif
if (strcasecmp("RIFF",chunk_id)!=0)
{
//printf("Not a RIFF file.\n");
return 1;
}
else if (strcasecmp("AVI ",chunk_type)!=0)
{
//printf("Not an AVI file.\n");
return 1;
}
end_of_chunk=RMftell(_file)+chunk_size-4;
#if (AVI_CHUNHO_SENSOR_FORMAT_1)
// Sensor 는 STRD 와 유사하게 end_of_chunk 뒤에 위치함
if(_preInfo == NULL)
{
RMfseek(_file,end_of_chunk,SEEK_SET);
bool b_gps_read = false;
bool b_sensor_read = false;
// 파일 끝까지 확인
int tryCount = 0;
while (RMfeof(_file) == 0 && tryCount++ < 100)
{
int offset = RMftell(_file);
#if !defined(BBEXTRACT)
QString offsetStr;
offsetStr.sprintf("%08X",offset);
#endif
read_chars(_file,chunk_id,4); // ID
chunk_size=read_long(_file); // 크기
// 센서 데이터 깨진 파일이 존재함 (chunck offset 이 짧거나..)
// qInfo() << "chunk_id:" << chunk_id << " chunk_size:" << chunk_size << " offset:" << offsetStr;
//end_of_subchunk=RMftell(_file)+chunk_size; // 범위 지정
//IDIT : 9355386 = 20
//gpsa : 9355398 = 4
//gps0 : 9357326 = 1920
//gsea : 9357354 = 20
//gsen : 9361576 = 4214
// 14 BYTE
if (strcasecmp("gps0",chunk_id)==0)
{
_gps_buffer_size = chunk_size;
_gps_buffer = (uint8_t*)malloc(chunk_size);
RMfread(_gps_buffer,_gps_buffer_size,1,_file);
b_gps_read = true;
}
else if (strcasecmp("gsen",chunk_id)==0)
{
if(chunk_size > 0) {
//qInfo() << "gsen offset:" << offsetStr;
_sensor_buffer_size = chunk_size;
_sensor_buffer = (uint8_t*)malloc(chunk_size);
RMfread(_sensor_buffer,_sensor_buffer_size,1,_file);
b_sensor_read = true;
}
}
// 깨진 데이터는 1 BYTE 씩 탐색
else if(chunk_size < 0 ||
(strcasecmp("IDIT",chunk_id) != 0 &&
strcasecmp("gpsa",chunk_id) != 0 &&
strcasecmp("gsea",chunk_id) != 0)) {
chunk_size = -7;
}
end_of_subchunk = offset + 8 + chunk_size;
if(b_sensor_read && b_gps_read)
{
break;
}
//qInfo() << chunk_id << ":" << end_of_subchunk << " =" << chunk_size;
RMfseek(_file,end_of_subchunk,SEEK_SET);
}
_isValid = true;
return 0;
}
#endif
while (RMftell(_file)<end_of_chunk)
{
read_chars(_file,chunk_id,4); // ID
chunk_size=read_long(_file); // 크기
end_of_subchunk=RMftell(_file)+chunk_size; // 범위 지정
if (strcasecmp("JUNK",chunk_id)==0 || strcasecmp("PAD ",chunk_id)==0) // ID 가 JUNK 또는 PAD 일 경우 ...
{
chunk_type[0]=0;
}
else
{
read_chars(_file,chunk_type,4); // 타입이 존재할 경우
}
#if (DEBUG_AVI_FORMAT)
long offset=RMftell(_file); // 현재 옵셋
printf(" New Chunk (id=%s size=%d type=%s offset=0x%lx)\n",chunk_id,chunk_size,chunk_type,offset);
printf(" {\n");
fflush(stdout);
#endif
if (strcasecmp("JUNK",chunk_id)==0 || strcasecmp("PAD ",chunk_id)==0) // 사용되지 않음
{
if ((chunk_size%4)!=0)
{
chunk_size=chunk_size+(4-(chunk_size%4));
}
#if (DEBUG_AVI_JUNK)
hex_dump_chunk(chunk_size);
#endif
}
else if (strcasecmp("INFO",chunk_type)==0) // 사용되지 않음..
{
if ((chunk_size%4)!=0)
{
chunk_size=chunk_size+(4-(chunk_size%4));
}
#if (DEBUG_AVI_INFO)
hex_dump_chunk(chunk_size);
#endif
}
else if (strcasecmp("hdrl",chunk_type)==0) // 헤더 리스트 처리
{
parse_hdrl(chunk_size);
// 기본 모드가 아닐 경우 종료 (subtitle 모델 체크는 subtitle 확인해야함)
// skip_chunk(_file);
}
else if (strcasecmp("movi",chunk_type)==0) // 'movi' 리스트 처리
{
#if (CHECK_VIDEO_BITRATE)
if(_readMode == AVIReadMOVSize && _preInfo != NULL) {
_preInfo->bMOVSize = true;
_preInfo->movSize = chunk_size;
}
#endif
// 처리할 필요 없음
#if (SENSOR_AVI_SUBTITLE)
if(_readMode == VideoReadSensor) {
parse_movi_list(chunk_size);
}
#endif
}
else if (strcasecmp("idx1",chunk_id)==0) // 인덱스 처리
{
RMfseek(_file,RMftell(_file)-4,SEEK_SET);
#if (DEBUG_AVI_FORMAT)
parse_idx1(chunk_size);
#endif
}
else
{
#if (DEBUG_AVI_FORMAT)
printf(" Unknown chunk at %d (%4s)\n",(int)RMftell(_file)-8,chunk_type);
#endif
if (chunk_size==0)
{
break;
}
}
// 필요한 정보 모두 수집되었는지 확인
if(_readMode == VideoReadDuration && _preInfo != NULL && _preInfo->bDuration == true) {
_isValid = true;
return 0;
}
#if (CHECK_VIDEO_BITRATE)
else if(_readMode == VideoReadMOVSize && _preInfo != NULL && _preInfo->bDuration == true && _preInfo->bMOVSize)
{
_isValid = true;
return 0;
}
#endif
RMfseek(_file,end_of_subchunk,SEEK_SET);
#if (DEBUG_AVI_FORMAT)
printf(" }\n");
#endif
}
#if (DEBUG_AVI_FORMAT)
printf("}\n");
#endif
_isValid = true;
return 0;
}
bool AVIRiff::duration(RMfile in, VideoPreInfo* info)
{
AVIRiff avi = AVIRiff(in,VideoReadDuration,info);
return avi.isValid();
}
#if (CHECK_VIDEO_BITRATE)
bool AVIRiff::movSize(RMfile in, VideoPreInfo* info)
{
AVIRiff avi = AVIRiff(in,VideoReadMOVSize,info);
return avi.isValid();
}
#endif
#endif // #if (FILE_FORMAT_AVI)

View File

@@ -0,0 +1,263 @@
#ifndef RM_FORMAT_AVI_H
#define RM_FORMAT_AVI_H
#if (FILE_FORMAT_AVI)
#if !defined(BBEXTRACT)
#include "../rm_include.h"
#include <QString>
#endif
#include "rm_format.h"
#pragma once
#include <stdio.h>
#include <vector>
#include <stdint.h>
#ifdef _MSC_VER
#define strncasecmp _strnicmp
#define strcasecmp _stricmp
#endif
extern "C" {
#include "fileio.h"
}
// DEBUG 용
#define DEBUG_AVI_FORMAT 0
#define USE_AVI_DUMP 0
#define DEBUG_AVI_JUNK 0
#define DEBUG_AVI_INFO 0
#define DEBUG_AVI_HEADER 0 // 메인헤더
#define DEBUG_AVI_STREAM_HEADER 0 // 스트림 헤더
#define DEBUG_AVI_MOVI 0 // 스트림 데이터 리스트
// http://www.econote.co.kr/main/view_post.asp?post_seq_no=49407 정리 잘 되어 있음
// AVI RIFF 구조는
// RIFF
// - hdrl(LIST)
// - avih : AVI 파일 전체 기본 정보
// - strl (LIST)
// - strh (vids) : 영상 스트림 헤더
// - strf " 포멧
// - strl (LIST)
// - strh (auds) : 음성 스트림 헤더
// - strf " 포멧
// - strl (LIST)
// - strh (txts) : 자막 스트림 헤더
// - strf " 포멧
// - IDIT
// - INFO(LIST) : 정보
// -ISFT : 업체정보
// - movi(LIST) : 실제 데이터
// - 00dc/b : 음성패킷 (c:압축,b:비압축)
// - 00wb/c : 영상패킷 (")
// - 00tx : 자막
// - idx1: 프레임의 위치 (binary 로 01wb,flag,offset,size ..... 00dc,flat,offset,size..
#define MAX_VIDEO_STREAM_COUNT 4
// TimeScale 등이 0 라 재생시간 처리에 문제가 생김
// avi 정보 (AVI Header,'avih')
typedef struct _AVIHeader
{
int TimeBetweenFrames;
int MaximumDataRate;
int PaddingGranularity;
int Flags;
int TotalNumberOfFrames;
int NumberOfInitialFrames;
int NumberOfStreams;
int SuggestedBufferSize;
int Width;
int Height;
int TimeScale;
int DataRate;
int StartTime;
int DataLength;
} AVIHeader;
// str(eam) 정보 (Stream Header,'strh') / Video('vids')
typedef struct _STRHeader
{
char DataType[5];
char DataHandler[5];
int Flags;
int Priority;
int InitialFrames;
int TimeScale;
int DataRate;
int StartTime;
int DataLength;
int SuggestedBufferSize;
int Quality;
int SampleSize;
} stream_header_t;
// str(eam) 포멧 (Stream Format,'strf') / Video('vids')
typedef struct _STRFormat
{
int header_size;
int image_width;
int image_height;
int number_of_planes;
int bits_per_pixel;
int compression_type;
int image_size_in_bytes;
int x_pels_per_meter;
int y_pels_per_meter;
int colors_used;
int colors_important;
int *palette;
} stream_format_t;
// str(eam) 정보 (Stream Header,'strh') / Audio('auds')
typedef struct _STRHeaderAudio
{
int format_type;
int number_of_channels;
int sample_rate;
int bytes_per_second;
int block_size_of_data;
int bits_per_sample;
int byte_count_extended;
}stream_header_auds_t;
// str(eam) 포멧 (Stream Format,'strf') / Video('auds')
typedef struct _STRFormatAudio
{
int header_size;
int format;
int channels;
int samples_per_second;
int bytes_per_second;
int block_size_of_data;
int bits_per_sample;
int extended_size;
} stream_format_auds_t;
// 인덱스 구조 ('idx1')
typedef struct _AVIDX
{
char ckid[5];
int dwFlags;
int dwChunkOffset;
int dwChunkLength;
} index_entry_t;
class AVIRiff
{
private:
VideoReadMode _readMode; // 현재 모드
VideoPreInfo* _preInfo; // 기본 정보 (존재할 경우 ReadMode == PreInfo)
bool _isValid; // 센서 관련?
public:
#if (SENSOR_AVI_SUBTITLE)
QList<char*>* subtitles; // 자막 센서 포멧
#endif
AVIRiff(RMfile in,VideoReadMode mode = VideoReadSensor, VideoPreInfo* info = NULL);
static bool duration(RMfile in,VideoPreInfo* info);
#if (CHECK_VIDEO_BITRATE)
static bool movSize(RMfile in,VideoPreInfo* info);
#endif
~AVIRiff();
bool isValid()
{
return _isValid;
}
// MOV 크기와 재생시간 그리고 오디오 bitrate (kb/sec) 를 입력받아 mov bitrate 를 계산한다
#if (CHECK_VIDEO_BITRATE)
static int videoBitrate(VideoPreInfo* info, double audioBitrate = 256) {
const double kb = 1000.0; // 1024?
const double durationInSec = (double)(info->duration) / 1000.0;
double audioSize = durationInSec * audioBitrate * kb / 8.0; // kb SEC
double videoSize = (double)(info->movSize) - audioSize;
return videoSize * 8.0 / durationInSec / kb;
}
#endif
#if (AVI_CHUNHO_SENSOR_FORMAT_1)
uint8_t* _gps_buffer;
size_t _gps_buffer_size;
uint8_t* _sensor_buffer;
size_t _sensor_buffer_size;
void getChunckData(uint8_t** gps,size_t* gps_size,uint8_t** sensor,size_t* sensor_size)
{
*gps = _gps_buffer;
*sensor = _sensor_buffer;
*gps_size = _gps_buffer_size;
*sensor_size = _sensor_buffer_size;
}
#endif
typedef enum
{
UNDEFINED_STREAM = 0,
VIDEO_STREAM,
AUDIO_STREAM,
TXT_STREAM,
} AVI_STREAM_TYPE;
RMfile _file;
AVIHeader _avi_header; // 파일헤더
stream_header_t _audioStreamHeader;
stream_format_auds_t _audioStreamFormat;
int _videoStreamCount; // 현재까지 로딩된 입력된 스트림 인덱스
stream_header_t _videoStreamHeaders[MAX_VIDEO_STREAM_COUNT]; // N개
stream_format_t _videoStreamFormat[MAX_VIDEO_STREAM_COUNT];
stream_header_t _txtStreamHeader; // TXT 스트림 헤더
int parse_riff();
int hex_dump_chunk(int chunk_len);
#if (DEBUG_AVI_FORMAT)
int parse_idx1(int chunk_len);
#endif
int read_avi_header(); // 메인 헤더
#if (SENSOR_AVI_SUBTITLE)
int add_subtitle(long chunkSize); // 자막 처리
#endif
void print_data_handler(unsigned char *handler); // Data Handler 출력 (DEBUG)
int read_stream_header(stream_header_t *stream_header);
int read_stream_format(stream_format_t *stream_format);
int read_stream_format_auds(stream_format_auds_t *stream_format);
//int read_strd_format(long strdSize);
//int add_subtitle(long chunkSize);
int parse_hdrl(unsigned int size);
int parse_hdrl_list();
int parse_movi_list(unsigned int size);
};
#endif // #if (FILE_FORMAT_AVI)
#endif // RM_FORMAT_AVI_H

File diff suppressed because it is too large Load Diff

View File

@@ -0,0 +1,308 @@
#ifndef RM_FORMAT_MOV_H
#define RM_FORMAT_MOV_H
#if (FILE_FORMAT_MOV)
// 기본정보(duration + bitrate) 읽기 + 센서 파서
// 기존 mov_reader.h/cpp + rm_mov_format.h/cpp 통합 버전
#if !defined(BBEXTRACT)
#include "../rm_include.h"
#include <QString>
#endif
#include "rm_format.h"
extern "C" {
#include "fileio.h"
}
#include <stdint.h>
// 최대 tag 탐색 깊이
#define RM_MOV_MAX_DEPTH 10
// 마지막 탐색된 tag 의 최대 크기 (eg. 3개 trak 에서 mdia->stbl->stsd stsd 가 text 인 tag 탐색시
#if(RM_MODEL == RM_MODEL_TYPE_TB4000 || RM_MODEL == RM_MODEL_TYPE_MH9000)
#define RM_MOV_MAX_TAG_VALUE_SIZE 100
#else // 4000
#define RM_MOV_MAX_TAG_VALUE_SIZE 10
#endif // 4000
#if (RM_MODEL == RM_MODEL_TYPE_NX_DRW22)
typedef struct _GPS0
{
double lat; // 0
double lon; // 8
uint32_t alt; // 16
uint16_t speed; // 20
uint8_t year; // 22
uint8_t month; // 23
uint8_t day; // 24
uint8_t hour; // 25
uint8_t min; // 26
uint8_t sec; // 27
uint8_t degree; // 28
uint8_t status; // 29
uint8_t version; // 30
uint8_t reserved;
} GPS0;
#elif (RM_MODEL == RM_MODEL_TYPE_KEIYO1 || RM_MODEL == RM_MODEL_TYPE_MBJ5010 || RM_MODEL == RM_MODEL_TYPE_FC_DR232W || RM_MODEL == RM_MODEL_TYPE_BV2000)
// #define SENSOR_INTERVAL 1 // 2초 간격으로 저장
// #define SENSOR_FPS 40 // " 40개씩 저장됨
#define MAX_SENSOR_FPS 40
#endif // RM_MODEL_TYPE_NX_DRW22
#if (RM_MODEL == RM_MODEL_TYPE_TB4000)
//typedef struct GPSINFOCHUCKTIME_s
//{
// unsigned char byYear; // < Years since 1900
// unsigned char byMon; // < Months since January - [0,11]
// unsigned char byDay; // < Day of the month - [1,31]
// unsigned char byHour; // < Hours since midnight - [0,23]
// unsigned char byMin; // < Minutes after the hour - [0,59]
// unsigned char bySec; // < Seconds after the minute - [0,59]
//} GPSINFOCHUCKTIME;
typedef struct _GPSINFOCHUCK_TELEBIT
{
double dwLat; //< Latitude in NDEG - +/-[degree][min].[sec/60]
double dwLon; //< Longitude in NDEG - +/-[degree][min].[sec/60]
long lAlt; //< Altitude in meter +-:under/below sea level
unsigned short usSpeed; //< Speed unit: km/h
unsigned char datetime[6]; // byYear(from 1900), byMon, byDay, byHour, byMin, bySec
//GPSINFOCHUCKTIME sUTC; //< UTC of position
unsigned char ubDirection; //< Clockwise degree from the North.
unsigned char ubFlag; //< Check if the GPS data is valid;
unsigned char ubVersion; //< Stuture Version
unsigned char ubReserved;
} GPSINFOCHUCK_TELEBIT;
#endif // 4000
#if (RM_MODEL == RM_MODEL_TYPE_MH9000)
#pragma pack(push, 1)
typedef struct _gps_chunk_t
{
char header[4]; // id = "DVAL"
unsigned short year;
unsigned short mon;
unsigned short mday;
unsigned short hour;
unsigned short min;
unsigned short sec;
char vehicle_id[16];
char driver_id[16];
char rec_type; // 0:normal, 1:event
char event_gsensor;
char event_smoke;
char event_dsm;
char flag_seat_b;
char flag_side_b;
char flag_wink_l;
char flag_wink_r;
char flag_foot_b;
char flag_gron_b;
char blank_a[2];
char flag_smoke;
char flag_dsm;
char blank_b[2];
unsigned short speed;
unsigned short rpm;
unsigned short gps_speed;
char blank_c[2];
unsigned int latitude; // lat * 100000
unsigned int longitude; // long * 100000
unsigned short gx[10]; // (g x 100) 100ms x 10 = 1sec
unsigned short gy[10]; // (g x 100) 100ms x 10 = 1sec
unsigned short gz[10]; // (g x 100) 100ms x 10 = 1sec
char fw_version[16];
long lAlt; /**< Altitude in meter +-:under/below sea level*/
unsigned char ubDirection; /**< Clockwise degree from the North.*/
} gps_chunk_t;
#pragma pack(pop)
#endif // MH9000
class MOVFormat
{
// 재생시간 및 bitrate parser
private:
VideoReadMode _readMode; // 현재 모드
VideoPreInfo* _preInfo; // 기본 정보 (존재할 경우 ReadMode == PreInfo)
bool _isValid; // 센서 관련?
// Python..
unsigned int _offset;
RMfile _file;
private:
// parse 중지
bool _stop_parse;
// 탐색할 tag 리스트 (최대 depth = 10)
int _tag_count;
long _tag_list[RM_MOV_MAX_DEPTH];
// 탐색 tag (depth) offset
unsigned int _tag_offset_list[RM_MOV_MAX_DEPTH];
// 탐색 tag (depth) size
long _tag_size_list[RM_MOV_MAX_DEPTH];
// 탐색 중 확인할 value (자막의 경우 track 중 value 가 txt 인지 확인해야함)
char _tag_search_value[RM_MOV_MAX_TAG_VALUE_SIZE];
// 초기화
void init_parser();
void set_tags(const char* tag,...);
// 현재 tag value 가 _tag_search_value 와 동일한지 확인
bool check_tag_value(long atom_type, unsigned int offset, long size);
// 처음부터 끝까지 depth 0 에서 parse 시작
void parse_all();
// 센서 데이터만 parse
//void parse_sensor();
// bitrate 만 parse
#if !defined(BBEXTRACT)
#if (CHECK_VIDEO_BITRATE)
bool parse_bitrate();
#endif // CHECK_VIDEO_BITRATE
void parse_avc1(long offset, long size); // H264 Video
#endif // #if !defined(BBEXTRACT)
bool parse_duration();
bool parse_sensor();
#if (RM_MODEL == RM_MODEL_TYPE_XLDR_88 || SUB_MODEL_CARROT_EMT)
bool parse_buffer(uint8_t* buffer,long size);
#endif
// _tag 로 지정된 tag 의 옵셋 가져오기
void parse(unsigned int offset, unsigned int length, int depth = 0);
unsigned int get_tag_list_offset();
#if (RM_MODEL == RM_MODEL_TYPE_NX_DRW22)
GPS0* _gps0;
int16_t *_zyx; // 20201005 XYZ->ZYX 로 수정요청
#elif (RM_MODEL == RM_MODEL_TYPE_ADT_CAPS || \
RM_MODEL == RM_MODEL_TYPE_BV2000 || \
RM_MODEL == RM_MODEL_TYPE_XLDR_88 || \
RM_MODEL == RM_MODEL_TYPE_KEIYO1 || \
RM_MODEL == RM_MODEL_TYPE_MBJ5010 || \
RM_MODEL == RM_MODEL_TYPE_FC_DR232W)
void* _nmea; // NMEA INFO
void* _sens; // _SEN
#elif (RM_MODEL == RM_MODEL_TYPE_MH9000)
gps_chunk_t* _sens;
#elif (RM_MODEL_EMT_KR)
void* _nmea; // NMEA INFO
#endif
#if (RM_MODEL != RM_MODEL_TYPE_MH9000 && !RM_MODEL_EMT_KR)
int _gps0Count;
#endif // _sens 에 통합
int _gsenCount;
#if (RM_MODEL == RM_MODEL_TYPE_BV2000 ||\
RM_MODEL == RM_MODEL_TYPE_KEIYO1 ||\
RM_MODEL == RM_MODEL_TYPE_MBJ5010 ||\
RM_MODEL == RM_MODEL_TYPE_MH9000 || \
RM_MODEL == RM_MODEL_TYPE_FC_DR232W)
int _sensorFPS;
void process_subtitle(const char* subtitle);
#elif (RM_MODEL_EMT_KR)
void process_subtitle(const char* subtitle);
#endif
public:
MOVFormat(RMfile in,VideoReadMode mode = VideoReadSensor, VideoPreInfo* info = NULL);
static bool duration(RMfile in,VideoPreInfo* info);
#if !defined(BBEXTRACT)
#if (CHECK_VIDEO_BITRATE)
static bool movSize(RMfile in,VideoPreInfo* info);
#endif
#endif // #if !defined(BBEXTRACT)
~MOVFormat();
#if (RM_MODEL_EMT_KR)
QString modelName;
#endif
bool isValid()
{
return _isValid;
}
// MOV 크기와 재생시간 그리고 오디오 bitrate (kb/sec) 를 입력받아 mov bitrate 를 계산한다
#if (CHECK_VIDEO_BITRATE)
static int videoBitrate(VideoPreInfo* info) {
const double kb = 1000.0; // 1024?
const double durationInSec = (double)(info->duration) / 1000.0;
double videoSize = (double)(info->movSize);
return videoSize * 8.0 / durationInSec / kb;
}
#endif
#if (RM_MODEL == RM_MODEL_TYPE_NX_DRW22)
int getGPS(GPS0** gps)
{
*gps = _gps0;
return _gps0Count;
}
int getSensor(int16_t** sensor)
{
*sensor = _zyx;
return _gsenCount;
}
#elif (RM_MODEL_EMT_KR)
int getNMEA(void** nmea)
{
*nmea = _nmea;
return _gsenCount;
}
#elif (RM_MODEL == RM_MODEL_TYPE_ADT_CAPS || \
RM_MODEL == RM_MODEL_TYPE_XLDR_88 || \
RM_MODEL == RM_MODEL_TYPE_KEIYO1 || \
RM_MODEL == RM_MODEL_TYPE_MBJ5010 || \
RM_MODEL == RM_MODEL_TYPE_BV2000 || \
RM_MODEL == RM_MODEL_TYPE_MH9000 | \
RM_MODEL == RM_MODEL_TYPE_FC_DR232W)
// MH9000은 sensor 에 통합되어 있음
#if (RM_MODEL != RM_MODEL_TYPE_MH9000)
int getGPS(void** nmea)
{
*nmea = _nmea;
return _gps0Count;
}
#endif //
int getSensor(void** sensor)
{
*sensor = _sens;
return _gsenCount;
}
#if (RM_MODEL == RM_MODEL_TYPE_KEIYO1 || \
RM_MODEL == RM_MODEL_TYPE_MBJ5010 || \
RM_MODEL == RM_MODEL_TYPE_FC_DR232W || \
RM_MODEL == RM_MODEL_TYPE_BV2000 || \
RM_MODEL == RM_MODEL_TYPE_MH9000)
int getSensorFPS() {
return _sensorFPS;
}
#endif
#elif (RM_MODEL == RM_MODEL_TYPE_TB4000)
GPSINFOCHUCK_TELEBIT* _nmea; // NMEA INFO
int getGPS(GPSINFOCHUCK_TELEBIT** nmea)
{
*nmea = _nmea;
return _gps0Count;
}
#endif
};
#endif // #if (FILE_FORMAT_MOV)
#endif // RM_FORMAT_MOV_H

View File

@@ -0,0 +1,179 @@
#include "rm_format_mov.h"
#include "fm_parse_gps.h"
#if (RM_MODEL == RM_MODEL_TYPE_XLDR_88)
#define GPSR_FLAG ('G' | 'P' << 8 | 'S' << 16 | 'R' << 24)
#define SENS_FLAG ('S' | 'E' << 8 | 'N' << 16 | 'S' << 24)
#define BYTE_SWAP_32(__num) ( ((__num>>24)&0xff) | ((__num<<8)&0xff0000) | ((__num>>8)&0xff00) | ((__num<<24)&0xff000000))
//#define GPRS_SENTENCE_SIZE 128
bool MOVFormat::parse_buffer(uint8_t* strd,long strdSize)
{
size_t check_offset = 0;
// GPSR 및 데이터 시작 위치가 다름 (이벤트에 따라)
for(long i=0;i<strdSize;i++)
{
if(*((int32_t*)&strd[i]) == GPSR_FLAG || *((int32_t*)&strd[i]) == SENS_FLAG)
{
check_offset = i;
break;
}
}
if (check_offset == 0)
{
return false;
}
long offset = 0;
int32_t flag;// = *((int32_t*)&strd[offset]);
offset += (check_offset - 4); // GRSP 앞쪽 4BYTE 에 크기가 시작됨
bool gpsDone = false;
bool sensorDone = false;
while(offset < strdSize)
{
// 데이터 크기는 GPS 및 센서 데이터 앞쪽에 있음
int32_t dataSize = *((int32_t*)&strd[offset]); // data to offset 이 가장 먼저 있음
offset += 4;
dataSize = BYTE_SWAP_32(dataSize); // eg. 4088 byte 부터 sensor data 시작, 센서는 9032
int32_t nextOffset = dataSize + (check_offset - 4); // 4088 + 24(front) = 4120 (file offset: )
flag = *((int32_t*)&strd[offset]); // flag (type)
offset += 4;
int index = 0;
if(flag == GPSR_FLAG) // GPS Data
{
//_gps0Count = dataSize / (GPRS_SENTENCE_SIZE + 4 + 4); // 136 = 0x88
//qInfo() << "GPSR_FLAG found:" << _gpsCount;
if(_nmea != NULL)
{
free(_nmea);
}
//_bGPSExist = true;
NMEA_INFO* nmea = (NMEA_INFO*)malloc(sizeof(_NMEA_INFO) * 1000);
_nmea = nmea;
//#if (DEBUG_SENSOR_DATA)
// printf("gps data size:%d count:%d test:%f\n",dataSize,_gpsCount,(float)dataSize / (float)(GPRS_SENTENCE_SIZE + 4 + 4));
//#endif
// bool validGPSFound = false;
while(offset < dataSize)
{
// int32_t frameNumber = *((int32_t*)&strd[offset]);
//offset += 4;
int32_t packetSize = strd[offset];
offset += 1;
char* buffer = (char*)&strd[offset];
if(strncmp(buffer,"$GPRMC",6) != 0) {
break;
}
FMParseGPS::ParseRMC(buffer, &nmea[index],packetSize);
char deb[1024] = {0,};
strncpy(deb,buffer,packetSize);
// qInfo() << index << ":" << deb << " =" << QString().sprintf("%X",offset);
//_ParseRMC(buffer,index,NMEA_TOKEN_SIZE);
//#if (GPRMC_VALID_CHECK)
// if( validGPSFound == false &&
// _gpsData[index].nStatus == 1 &&
// IS_VALID_LOCATION(_gpsData[index].Longitude,_gpsData[index].Latitude) == true)
//#else
// if( validGPSFound == false && IS_VALID_LOCATION(_gpsData[index].Longitude,_gpsData[index].Latitude) == true)
//#endif
// {
// validGPSFound = true;
// }
offset +=packetSize;
// _bGPSExist = validGPSFound;
index++;
}
gpsDone = true;
_gps0Count = index;
}
else if(flag == SENS_FLAG) // Sensor Data
{
dataSize -= 8; // size + flag = 8 byte
_gsenCount = dataSize / SENS_PACKET_SIZE;
if(_sens != NULL)
{
free(_sens);
}
float* sens = (float*)malloc(SENS_PACKET_SIZE * _gsenCount);
_sens = sens;
//printf("----------------------------------------------------------\n");
//printf("sensor data:%d\n",_sensorCount);
//printf("----------------------------------------------------------\n");
#if (DEBUG_SENSOR_DATA)
printf("sensor data size:%d count:%d test:%f\n",dataSize,_sensorCount,(float)dataSize / (float)(SENS_PACKET_SIZE));
#endif
while(index < _gsenCount)
{
memcpy(&sens[index * NUM_SENSOR_DATA],&strd[offset],SENS_PACKET_SIZE);
offset += SENS_PACKET_SIZE;
//qInfo() << QString().sprintf("%.2f,%.2f,%.2f", sens[(index * NUM_SENSOR_DATA)+0],sens[(index * NUM_SENSOR_DATA)+1],sens[(index * NUM_SENSOR_DATA)+2]);
//printf("%d,%.5f,%.5f,%.5f\n",index, _sensorData[(index * NUM_SENSOR_DATA)+0],_sensorData[(index * NUM_SENSOR_DATA)+1],_sensorData[(index * NUM_SENSOR_DATA)+2]);
#if (DEBUG_SENSOR_DATA)
printf("idx:%d x:%.5f y:%.5f z:%.5f\n",index, _sensorData[(index * NUM_SENSOR_DATA)+0],_sensorData[(index * NUM_SENSOR_DATA)+1],_sensorData[(index * NUM_SENSOR_DATA)+2]);
#endif
index++;
}
sensorDone = true;
}
else
{
break; // flag 가 아닐경우
}
if(gpsDone == true && sensorDone == true)
{
break;
}
offset = nextOffset;// dataSize; // 어차피 같아야함..
}
return true;
}
bool MOVFormat::parse_sensor()
{
init_parser();
// 1. udat
// user 데이터 확인 완료
set_tags("udat",NULL);
// 탐색 시작
parse_all();
if(_stop_parse == false)
{
return false;
}
unsigned int udat_offset = _tag_offset_list[0];
long udat_size = _tag_size_list[0];
if(udat_offset != 0 && udat_size != 0)
{
//qInfo() << QString().sprintf("%X,%d",udat_offset+8,udat_size-8);
RMfseek(_file,udat_offset+8,SEEK_SET);
uint8_t *buffer = (uint8_t*)malloc(udat_size-8);
RMfread(buffer,udat_size-8,1,_file);
parse_buffer(buffer,udat_size-8);
return true;
}
return false;
}
#endif

View File

@@ -0,0 +1,10 @@
#include "rm_overwrite.h"
QMutex RMOverwrite::_lock;
QWaitCondition RMOverwrite::_wait;
int RMOverwrite::gCurrent = OVERWRITE_OPTION_ASK | OVERWRITE_OPTION_ALL;
QString RMOverwrite::currentFileName;
int RMOverwrite::currentCount = 0;

View File

@@ -0,0 +1,56 @@
#ifndef RM_OVERWRITE_H
#define RM_OVERWRITE_H
#include <QWaitCondition>
#include <QObject>
#include <QMutex>
// 백업시 파일 존재할 경우 처리 방법
typedef enum
{
OVERWRITE_OPTION_ASK = 1 << 0, // 확인
OVERWRITE_OPTION_SKIP = 1 << 1, // 스킵
OVERWRITE_OPTION_WRITE = 1 << 2, // 덮어쓰기
OVERWRITE_OPTION_CANCEL = 1 << 3, // 취소
OVERWRITE_OPTION_ALL = 1 << 4, // 전체
} OVERWRITE_OPTION;
class RMOverwrite
{
private:
static QMutex _lock; // 프로세스 잠금/WAIT (백업 중 동일 파일 존재 ETC)
static QWaitCondition _wait; // 대기
public:
static void lock () {
RMOverwrite::_lock.lock();
}
static void unlock () {
RMOverwrite::_lock.unlock();
}
static void wait() {
RMOverwrite::_wait.wait(&RMOverwrite::_lock);
}
static void unwait() {
RMOverwrite::_wait.wakeAll();
}
static void reset() {
RMOverwrite::gCurrent = OVERWRITE_OPTION_ASK;
RMOverwrite::currentFileName = "";
RMOverwrite::currentCount = 0;
}
static QString currentFileName; // 중복된 파일명
static int currentCount; // 남은 파일 개수
//static OVERWRITE_OPTION gGlobal; // 전체 옵션 (eg. 모두 skip, etc)
static int gCurrent; // 현재 옵션 (다이얼로그 선택)
static bool check(OVERWRITE_OPTION with) {
return ((RMOverwrite::gCurrent & with) == with);
}
};
#endif // RM_OVERWRITE_H

File diff suppressed because it is too large Load Diff

View File

@@ -0,0 +1,328 @@
#ifndef RM_SENSORDATA_H
#define RM_SENSORDATA_H
#include <stdio.h>
#include <stdint.h>
#include <vector>
#if !defined(BBEXTRACT)
#include "../rm_include.h"
#endif // #if !defined(BBEXTRACT)
#if !defined(MIN)
#define MIN(a,b) (((a)<(b))?(a):(b))
#endif
#if !defined(MAX)
#define MAX(a,b) (((a)>(b))?(a):(b))
#endif
#include "rm_format_avi.h"
#define DEBUG_SENSOR_DATA 0
#define GPRMC_VALID_CHECK 1
#define NUM_SENSOR_DATA 3 // XYZ
#define SENS_PACKET_SIZE (NUM_SENSOR_DATA * sizeof(float))
#if (RM_MODEL_EMT_KR)
// 모든 자막에 GPS+SENSOR 같은 숫자로 존재
typedef struct _NMEA_INFO
{
uint16_t nYear; // (UTC)년(+9시간)
uint8_t nMonth; // (UTC)월
uint8_t nDay; // (UTC)일
uint8_t nHour; // (UTC)시
uint8_t nMin; // (UTC)분
uint8_t nSec; // (UTC)초
uint8_t reserved; // ... (64 bit pack)
double Latitude; // 위도
double Longitude; // 경도
float Speed; // 속도 (Km/h)
float x;
float y;
float z;
float Voltage; // 전압
float Temperature; // 온도
uint8_t nStatus; // 상태(0,1)
uint8_t nAngle; // 방위각
#if (USE_TRIGGER)
uint8_t eTrigger; // 트리거 E (255 가 아닌 위치index 가 발생위치임)
uint8_t mTrigger; // " M (255 가 아닌 위치index 가 발생위치임)
uint8_t reserved2[4]; // 64 bit pack
#else // USE_TRIGGER
uint8_t reserved2[6]; // 64 bit pack
#endif // USE_TRIGGER
} NMEA_INFO;
#else // #if (RM_MODEL_EMT_KR)
typedef struct _NMEA_INFO
{
uint16_t nYear; // (UTC)년(+9시간)
uint8_t nMonth; // (UTC)월
uint8_t nDay; // (UTC)일
uint8_t nHour; // (UTC)시
uint8_t nMin; // (UTC)분
uint8_t nSec; // (UTC)초
uint8_t reserved; // ... (64 bit pack)
double Latitude; // 위도
double Longitude; // 경도
double Speed; // 속도 (Km/h)
uint8_t nStatus; // 상태(0,1)
uint8_t nAngle; // 방위각
uint8_t reserved2[6]; // 64 bit pack
} NMEA_INFO;
#endif //
#if (RM_MODEL == RM_MODEL_TYPE_ADT_CAPS && !SUB_MODEL_CARROT_EMT)
typedef struct _SEN {
float x;
float y;
float z;
float gcal;
uint32_t drive;
uint32_t parking;
} SEN;
#endif // RM_MODEL_TYPE_ADT_CAPS
class RMSensorData
{
public:
typedef enum
{
FILE_TYPE_AVI = 0,
FILE_TYPE_MOV = 1, // MP4
} FILE_TYPE;
RMSensorData(FILE* f,FILE_TYPE type);
~RMSensorData();
QString modelName;
uint32_t getSensorCount()
{
return _sensorCount;
}
#if (RM_MODEL_EMT_KR)
const NMEA_INFO* getSensor()
{
return _sensorData;
}
uint32_t getGPSCount()
{
return _bGPSExist ? _sensorCount : 0;
}
const NMEA_INFO* getGPS()
{
return _bGPSExist ? _sensorData : NULL;
}
const bool getGPSCoord(double ratio,double* lat, double* lon)
{
if(_bGPSExist)
{
unsigned int index = (int)((double)_sensorCount * ratio);
if(index <= _sensorCount) // 마지막 1초는 오차 범위
{
index = MIN(index,_sensorCount-1);
*lat = _sensorData[index].Latitude;
*lon = _sensorData[index].Longitude;
return true;
}
}
return false;
}
const double getGPSSpeed(double ratio)
{
if(_bGPSExist)
{
unsigned int index = (int)((double)_sensorCount * ratio);
if(index <= _sensorCount) // 마지막 1초는 오차 범위
{
index = MIN(index,_sensorCount-1);
//qInfo() << "SPD:" << index << _gpsData[index].nStatus << _gpsData[index].Speed;
return _sensorData[index].nStatus == 1 ? _sensorData[index].Speed : -1;
}
}
return -1;
}
bool getGPSPosition(double ratio,double* lonX, double* latY, double* speed)
{
if(_bGPSExist)
{
unsigned int index = (int)((double)_sensorCount * ratio);
if(index < _sensorCount && _sensorData[index].nStatus == 1)
{
*lonX = _sensorData[index].Longitude;
*latY = _sensorData[index].Latitude;
*speed = _sensorData[index].Speed;
return true;
}
}
return false;
}
#else // RM_MODEL_EMT_KR
const float* getSensor()
{
return _sensorData;
}
uint32_t getGPSCount()
{
return _bGPSExist ? _gpsCount : 0;
}
const NMEA_INFO* getGPS()
{
return _bGPSExist ? _gpsData : NULL;
}
const bool getGPSCoord(double ratio,double* lat, double* lon)
{
if(_bGPSExist)
{
unsigned int index = (int)((double)_gpsCount * ratio);
if(index <= _gpsCount) // 마지막 1초는 오차 범위
{
index = MIN(index,_gpsCount-1);
*lat = _gpsData[index].Latitude;
*lon = _gpsData[index].Longitude;
return true;
}
}
return false;
}
const double getGPSSpeed(double ratio)
{
#if !(SPEED_ALWAYS_EXITS)
if(_bGPSExist)
#endif
{
unsigned int index = (int)((double)_gpsCount * ratio);
if(index <= _gpsCount) // 마지막 1초는 오차 범위
{
index = MIN(index,_gpsCount-1);
#if (SPEED_ALWAYS_EXITS)
//qInfo() << "SPD:" << index << _gpsData[index].nStatus << _gpsData[index].Speed;
return _gpsData[index].Speed;
#else
//qInfo() << "SPD:" << index << _gpsData[index].nStatus << _gpsData[index].Speed;
return _gpsData[index].nStatus == 1 ? _gpsData[index].Speed : -1;
#endif
}
}
return -1;
}
bool getGPSPosition(double ratio,double* lonX, double* latY, double* speed)
{
if(_bGPSExist)
{
unsigned int index = (int)((double)_gpsCount * ratio);
if(index < _gpsCount && _gpsData[index].nStatus == 1)
{
*lonX = _gpsData[index].Longitude;
*latY = _gpsData[index].Latitude;
*speed = _gpsData[index].Speed;
return true;
}
}
return false;
}
#if (CLIP_SENSOR_DATA)
void clipWithDuration(qint64 duration);
void clipSensorCount(int number);
void clipGPSCount(int number);
void fixGPS();
#endif
#endif // RM_MODEL_EMT_KR
#if (USE_TRIGGER)
float triggerE; // 트리거 위치 (ratio 0~1.0)
float triggerM;
#endif // USE_TRIGGER
#if defined(MODEL_BBVIEWER) && (!MODEL_WATEX)
const double getOBDSpeed(double ratio)
{
if(_sensorData != NULL && _sensorCount > 0)
{
unsigned int index = (int)(((double)_sensorCount) * ratio);
//qInfo() << "SENSOR INDEX:" << index;
if(index < _sensorCount)
{
return _OBDSpeed[index];
}
}
return true;
}
#endif // BB
// 재생시간과 센서 데이터 동기화 (짧으면 추가, 길면 자르기)
void processWithDuration(qint64 ms);
private:
#if (CLIP_SENSOR_DATA)
int _skipSensorCount;
#endif
bool _bGPSExist;
long _nLastAngle[2]; // RMC 데이터 처리용
#if(RM_MODEL_EMT_KR)
uint32_t _sensorCount;
NMEA_INFO* _sensorData;
#else // RM_MODEL_EMT_KR
#if (RM_MODEL == RM_MODEL_TYPE_KEIYO1 || \
RM_MODEL == RM_MODEL_TYPE_MBJ5010 || \
RM_MODEL == RM_MODEL_TYPE_FC_DR232W || \
RM_MODEL == RM_MODEL_TYPE_BV2000 || \
RM_MODEL == RM_MODEL_TYPE_MH9000 )
int _sensorFPS;
#endif
uint32_t _gpsCount;
uint32_t _sensorCount;
#if defined(BBEXTRACT)
GSENSOR* _sensorData;
#else
float* _sensorData;
#endif
NMEA_INFO* _gpsData;
#if (MODEL_BBVIEWER && !MODEL_WATEX)
float* _OBDSpeed; // GPS 속도인지 ODB 속도인지 확인
#endif
#endif // !RM_MODEL_EMT_KR
#if (FILE_FORMAT_MOV)
bool loadMOV(FILE* f);
#if !(RM_MODEL_EMT_KR)
#if(RM_MODEL == RM_MODEL_TYPE_MH9000)
bool loadMOVGPSNR(void* p,uint32_t count);
#else // RM_MODEL_TYPE_MH9000
bool loadMOVGPSNR(void* p,uint32_t count, void* ps, uint32_t sensorCount);
//bool loadMOVGPSNR(void* p,uint32_t count);
#endif // RM_MODEL_TYPE_MH9000
#endif // #if !(RM_MODEL_EMT_KR)
#endif // FILE_FORMAT_MOV
#if (FILE_FORMAT_AVI)
bool loadAVIRiff(FILE* f); // TYPE 1
#if (AVI_CHUNHO_SENSOR_FORMAT_1)
bool loadAVIChunck(uint8_t* gps, size_t gps_size,uint8_t* sensor, size_t sensor_size);
#endif
#if (SENSOR_AVI_SUBTITLE)
bool loadAVISubTitle(QList<char*>* subTitles); // AVI TYPE 2 불러오기
#endif
#endif // FILE_FORMAT_AVI
};
#endif // RM_SENSORDATA_H

File diff suppressed because it is too large Load Diff

View File

@@ -0,0 +1,236 @@
#ifndef RM_VIDEOITEM_2CH_H
#define RM_VIDEOITEM_2CH_H
// 각 채널별 파일이 구분되어 있는 단말기의 경우 RMVideoItem 을 별도로 사용함
#include "../rm_include.h"
#include "rm_video_list.h"
#include <QObject>
#include <QString>
#include <QRunnable>
#include <QDateTime>
#include <QThread>
// 분리 채널 파일 포멧만 사용
#if (RM_USE_SEPARATED_CH_FILE)
class RMSensorData;
class RMVideoItem : public QObject
{
Q_OBJECT
friend class RMVideoListLoader;
friend class RMVideoItemLoader;
friend class RMPlayerBase;
public:
#ifdef _DEBUG
void print();
#endif
#if (USE_DEBUG_FUNCTIONS)
static void make_many_files(QString path);
#endif
RMVideoFileList::GROUP_TYPE type;
bool checked; ///! 백업용 FLAG
bool dropItem; // dropitem = single path
bool added; // 신규로 추가된 아이템을 확인하기위해 추가시 초기화 한다.
// bool forceSwap; // 후방만 존재하고 전방이 없을 경우=> Player 의 RearSwap.. 을 사용
int width; // 해상도 (1CH, 현재 AVI 만 구현)
int height; // "
QString filePath;
QString fileName; // 경로 제외 명칭
#if (RM_MODEL == RM_MODEL_TYPE_AN6000)
QString decodedPath;
void removeTemp();
#endif // RM_MODEL_TYPE_AN6000
#if (DUAL_CH_FILE && DUAL_CH_1CH_EXIST)
bool only1CH;
#endif
#if (RM_MODEL_EMT_KR)
QString modelName; //! WIDE 지원 모델 영상
#endif
#if (TRI_CHANNEL)
RMVideoItem(QString path,int CH,QDateTime* pDateTime); // CH 1,2,3
#else
RMVideoItem(QString path,bool CH2,QDateTime* pDateTime);
#endif
#if (RM_MODEL_EMT_KR)
bool isWideMode() {
return (modelName == "360X" && realCHCount() > 2);
}
#endif
// MP4 CH2 파일
QString filePathCH2;
#if (TRI_CHANNEL)
QString filePathCH3;
bool isCH3Exist() {
return !filePathCH3.isEmpty();
}
#elif (PENTA_CHANNEL)
QString filePathCH3;
QString filePathCH4;
QString filePathCH5;
#endif // PENTA_CHANNEL
QString& anyFilePath()
{
return filePath.length() > 0 ? filePath : filePathCH2;
}
bool isRearOnly() {
return (filePath.isEmpty() && (filePathCH2.isEmpty() == false));
}
bool isFrontOnly() {
return (filePathCH2.isEmpty() && (filePath.isEmpty() == false));
}
bool isSingleChannel()
{
#if (DUAL_CH_FILE && DUAL_CH_1CH_EXIST) // 2CH 파일이나 후방없는 1CH 파일도 존재
return only1CH;
#elif (DUAL_CH_FILE || SINGLE_CH_VIEWER)
return false;
#else
return (filePath.length() == 0 || filePathCH2.length() == 0);
#endif
}
#if (TRI_CHANNEL2)
int _realCHCount;
//! \brief 프레임이 존재하지 않는 채널 제외한 채널 개수 확인
//! AVDemux 에서 확인됨, 초기값은 0
//! \return
int realCHCount() {
return _realCHCount;
}
#endif // TRI_CHANNEL2
#if (CHECK_VIDEO_BITRATE)
// 후방 채널 비디오 비트레이트 확인해서 일정 기준 미만이면 제거
void testBitrate2CH();
#endif
#if !(SINGLE_CH_VIEWER)
#if (!FORCE_2CH || (RM_MODEL == RM_MODEL_TYPE_XLDR_88 || SUB_MODEL_CARROT_EMT))
// 채널정보를 제외한 파일명을 리턴한다
static QString fileNameWithoutChannel(QString& path);
#endif
#endif // SINGLE_CH_VIEWER
~RMVideoItem();
static bool FileExist(QString path);
static int IsFeasible(QString path, QDateTime* pDateTime = NULL);
int fileFormat();
QString title();
QString typeString();
QString titleDate();
QString titleTime();
QString titleDateTime();
QString titleDuration();
#if !(DUAL_CH_FILE || SINGLE_CH_VIEWER)
QString titleFrontRear();
#endif
QString titlePrefix();
QString titleSize();
QString titleCapture(qint64 secs);
RMSensorData* getSensorData()
{
return sensorData;
}
//static QString recoveredPath(QString filePath);
static QString durationString(unsigned int sec,bool hms);
unsigned int durationInMSecs()
{
return _durationInMSecs;
}
#if (PLAY_SYNC_FIX2)
unsigned int packetDurationInMSecs; // 재생시작해야 발생함???
#endif // PLAY_SYNC_FIX2
// ?? 사용되지 않는다???
QDateTime dateTimeInPosition(qreal ratio,double* lat, double* lon); // 전체 플레이 ratio 를 통해 시간 가져오기
bool isValid()
{
#if (SKIP_VIDEO_PREINFO)
return true;
#else
return (_durationInMSecs > 300); // 0.3?
#endif
}
QDateTime& startTime()
{
return _dateTime;
}
#if (CHECK_REAR_DURATION)
bool isRearDuration();
#endif
// 인덱스 순으로 정렬
//int fileIndex();
protected:
#if (!PRE_LOAD_SENSOR_DATA)
void load()
{
emit loadSensorInfoStarted();
if(getSensorData() == NULL)
{
loadSensorInfo();
}
emit loadSensorInfoEnd();
}
#endif
void loadSensorInfo();
private:
signals:
void loadSensorInfoStarted();
void loadSensorInfoEnd();
void loadSensorInfoFail();
private:
#if (TRI_CHANNEL)
QString filePathWithCH(int ch);
#endif
QDateTime _dateTime;
#if (TRI_CHANNEL)
bool _loadDuration(int ch); // 1,2,3
#else
bool _loadDuration(bool rear = false);
#endif
qint64 _fileSize;
// 1초 미만 파일이 있음
unsigned int _durationInMSecs;
RMSensorData* sensorData;
static QDateTime _fileNameToDateTime(QString baseName);
public slots:
void onChecked();
};
#endif // #if (RM_USE_SEPARATED_CH_FILE)
#endif // RM_VIDEOITEM_2CH_H

View File

@@ -0,0 +1,2 @@
#include "rm_video_item_loader.h"

View File

@@ -0,0 +1,26 @@
#ifndef RM_VIDEO_ITEM_LOADER_H
#define RM_VIDEO_ITEM_LOADER_H
#include <QObject>
#include <QRunnable>
#include "rm_video_item_2ch.h"
#if (!PRE_LOAD_SENSOR_DATA)
class RMVideoItemLoader : public QRunnable
{
private:
RMVideoItem* _item;
public:
RMVideoItemLoader(RMVideoItem* item)
{
_item = item;
}
virtual void run()
{
_item->load();
}
};
#endif // PRE_LOAD_SENSOR_DATA
#endif // RM_VIDEO_ITEM_LOADER_H

File diff suppressed because it is too large Load Diff

View File

@@ -0,0 +1,435 @@
#ifndef RM_VIDEOFILELIST_H
#define RM_VIDEOFILELIST_H
#include <QObject>
#include <QUrl>
#include <qstring.h>
#include <QMainWindow>
#include <QDebug>
#include "../rm_include.h"
class RMVideoItem;
// #include "rm_video_group.h"
class RMVideoFileList : public QObject
{
Q_OBJECT
public:
friend class RMWidgetVideoListDelegate;
friend class RMWidgetVideoList;
friend class RMVideoFileListLoader;
friend class RMVideoFileListBackup;
static int n_lastPercent;
static QList<QString> fileFilters;
#if (SUPPORT_LOADING_CANCEL)
// 로딩 취소
bool bCancelLoading;
#endif
// item 으로 이동
typedef enum
{
// 주행 / 주행 이벤트 / 주차 / 주차 충격 / 수동 녹화( / 보관함???)
#if (RM_MODEL_EMT_KR)
FILTER_NONE = 0,
FILTER_NORMAL = 1 << 0, // 주행
FILTER_EVENT = 1 << 1, // 주행 이벤트
FILTER_PARK = 1 << 2, // 주차
FILTER_PARK_EVENT = 1 << 3, // 주차 충격
FILTER_MANUAL = 1 << 4, // 수동 녹화
FILTER_MYBOX = 1 << 5, // 보관함 (포함된 폴더가 MYBOX 일 경우 분류 관계없이)
#else // RM_MODEL_EMT_KR
FILTER_NONE = 0,
FILTER_NORMAL = 1 << 0,
FILTER_EVENT = 1 << 1,
#if ((RM_MODEL == RM_MODEL_TYPE_XLDR_88 || SUB_MODEL_CARROT_EMT) || \
RM_MODEL == RM_MODEL_TYPE_BV2000 || \
RM_MODEL == RM_MODEL_TYPE_KEIYO1 || \
RM_MODEL == RM_MODEL_TYPE_TBD360 || \
RM_MODEL == RM_MODEL_TYPE_MBJ5010 || \
RM_MODEL == RM_MODEL_TYPE_MH9000 || \
RM_MODEL == RM_MODEL_TYPE_FC_DR232W)
FILTER_PARK = 1 << 2,
#endif // 주차 사용
#if (RM_MODEL == RM_MODEL_TYPE_KEIYO1 ||\
RM_MODEL == RM_MODEL_TYPE_BV2000 || \
RM_MODEL == RM_MODEL_TYPE_MH9000 || \
RM_MODEL == RM_MODEL_TYPE_MBJ5010 ||\
RM_MODEL == RM_MODEL_TYPE_FC_DR232W)
FILTER_MANUAL = 1 << 3,
FILTER_ALL = FILTER_NORMAL | FILTER_EVENT | FILTER_PARK | FILTER_MANUAL,
#elif (RM_MODEL == RM_MODEL_TYPE_XLDR_88 || SUB_MODEL_CARROT_EMT || RM_MODEL == RM_MODEL_TYPE_MH9000 )
FILTER_ALL = FILTER_NORMAL | FILTER_EVENT | FILTER_PARK,
#elif !(RM_MODEL == RM_MODEL_TYPE_TBD360)
FILTER_ALL = FILTER_NORMAL | FILTER_EVENT,
#endif
#endif // RM_MODEL_EMT_KR
} FILTER;
// 단말기 별로 설정 가능
typedef enum
{
TYPE_NORMAL = 0,
TYPE_EVENT = 1,
TYPE_MANUAL = 2,
TYPE_PARKING = 3,
TYPE_PARKING_EVENT = 4,
TYPE_PARKING_MOTION = 5,
#if (RM_MODEL_EMT_KR)
TYPE_MY_BOX = 6,
#endif // #if (RM_MODEL_EMT_KR)
TYPE_UNDEFINED = 9999,
} GROUP_TYPE;
#if (RM_MODEL == RM_MODEL_TYPE_AN6000)
void removeTemps();
#endif //
#if (RM_MODEL_EMT_KR)
static RMVideoFileList::GROUP_TYPE parseName(QString baseName, QDateTime* dateTime, int* tag);
#endif
#if (USE_1HOUR_FILTER)
//! \brief 1시간 단위 필터 설정
//! \param b1Hour true: 1시간 단위 표시, false: 전체 표시
//! \param bEmit: listUpdateEnd 발생여부
void set1HourList(bool b1Hour, bool bEmit, RMVideoItem* selected);
bool get1HourList()
{
return b1HourList;
}
//! \brief 시간순, 최신순으로 정렬
//! \param bAsc: true: 시간순
void setSortList(bool bAsc);
bool getSortList()
{
return bSortDsc;
}
bool bSortDsc; // 시간순으로 정렬, 최신순으로 정렬
//! \brief 전체 항목에서 1시간 항목 추출
//! \param res : 결과
void load1HourList(QList<RMVideoItem*>& res);
//! \brief 전달된 item 에 속하는 모든 RMVideoItem 추출
//! - Thumbnail 에서 사용
//! \param dt : 탐색할 아이템 시간
//! \param res : 결과
void load1HourInList(QDateTime dt, QList<RMVideoItem*>& res);
//! \brief RMVideoItem 리스트 에서 썸네일 경로 추출
//! \param src : 추출할 VideoItem 리스트
//! \param res : Thumbnail 경로 <CH1,CH2>
void loadThumbnails(QList<RMVideoItem*>& src, QList<QPair<QString,QString>>& res);
#endif // USE_1HOUR_FILTER
#if (USE_DATE_TIME_LIST)
//! \brief 파일리스트 중 지정된 년/월 에 영상인 포함된 날짜만 리턴
//! \param year: 지정된 년
//! \param month: 지정된 월
//! \param ret<out>: 영상이 존재하는 일 리스트
//! \param dayItems<out>: 속도개선을 위해 해당일자의 파일리스트 리턴
void getMonthList(int year, int month, QSet<int>& ret, QList<RMVideoItem*>& dayItems);
//! \brief 로딩된 파일 중 가장 먼저 녹화된 날짜 확인
//! \return
QDate getFirstDate();
#endif// USE_DATE_TIME_LIST
#ifdef _DEBUG
void print();
#endif
#if !(USE_1HOUR_FILTER)
//! \brief 백업용으로 선택된 항목 초기화
void clearChecked();
#endif // #if !(USE_1HOUR_FILTER)
private:
#if (USE_1HOUR_FILTER)
bool b1HourList; // 1시간 단위 리스트
public:
int count1Hour; // 텍스트 표시용
//! \brief 데이터 로딩 후 All 및 1Hour 영상 개수 확인
void count1HourItems();
QDateTime simpleFromDateTime(QDateTime& startTime);
//! \brief 리스트 텍스트 색사용
//! \param startTime
//! \return
QDateTime simpleFromDateTime2(QDateTime& startTime);
private:
//! \brief 현재 선택된 아이템이 전체<->1HOUR 리스트 전환시 유지
//! \param searchItem: 현재 선택된 아이템
//! \return 이후 선택될 아이템
RMVideoItem* findFirstItemIn1Hour(RMVideoItem* searchItem);
#endif // #if (USE_1HOUR_FILTER)
QList<RMVideoItem*> _items; // 전체 아이템
QList<RMVideoItem*> _filteredItems; // 필터 적용된 아이템
#if !(USE_1HOUR_FILTER)
void _updateItemsByFilter(); // 필터 적용 후
#endif // #if (USE_1HOUR_FILTER)
#if !(RECURSIVE_APPEND_FILE)
static void _appendFrontRear(QString& folder,QList<QUrl>& list);
#endif
#if (!(FORCE_2CH) && (!SINGLE_CH_VIEWER))
#if (TRI_CHANNEL || PENTA_CHANNEL)
QString _checkChannelInfo(QString filePath,int *ch);
#else // TRI_CHANNEL
QString _checkChannelInfo(QString filePath,bool *isCH2, bool *is2CH = NULL);
#endif // TRI_CHANNEL
bool _addOtherChannelFile(RMVideoItem* item);
#endif
void _backupOverwrite(QString target,QString src,int countLeft);
public:
void loadFromList(QList<QUrl> list,bool bPlayFirstAdded = false);
//! \brief 리스트 삭제하고 이벤트 전달
void clearList();
//! \brief item 존재할 경우 파일 재생
//! \param item: 재생할 item
void playItem(RMVideoItem* item);
//! \brief 파일 경로로 item 탐색
//! \param path : 탐색할 경로
//! \return
RMVideoItem* itemWithPath(QString path);
//!
//! \brief 파일 로딩 후 선택된 녹화타입 확인용
//! 현재 선택된 필터의 INDEX 를 리턴
//!
int currentFilterIndex();
protected:
int _filter;
RMVideoItem* _playItem; // 현재 플레이 아이템
void backup(QString dest); // 보존
bool addItem(QString filePath, QDateTime* pDateTime,GROUP_TYPE type);
bool itemExist(QString filePath);
// List Widget 에서만 호출 할 수 있도록 변경
void setPlayItem(RMVideoItem* item)
{
_playItem = item;
}
public:
explicit RMVideoFileList(QObject* parent = 0);
//! \brief 파일 시작명으로 재생 아이템 탐색
//! \param prefix: eg. 20231007-040556_PSR0_0017
//! \return 탐색된 재생 리스트 아이템 , 없으면 NULL
RMVideoItem* searchPlayItem(QString prefix);
static FILTER filterTypeFromGroupType(GROUP_TYPE type)
{
FILTER t = FILTER_NONE;
switch (type) {
#if (RM_MODEL == RM_MODEL_TYPE_XLDR_88)
case TYPE_NORMAL:
case TYPE_MANUAL:
t = FILTER_NORMAL;
break;
case TYPE_EVENT:
case TYPE_PARKING_EVENT:
case TYPE_PARKING_MOTION:
t = FILTER_EVENT;
break;
case TYPE_PARKING:
t = FILTER_PARK;
break;
}
#elif (RM_MODEL == RM_MODEL_TYPE_KEIYO1 || RM_MODEL == RM_MODEL_TYPE_MBJ5010 || RM_MODEL == RM_MODEL_TYPE_FC_DR232W || RM_MODEL == RM_MODEL_TYPE_BV2000 || RM_MODEL == RM_MODEL_TYPE_MH9000)
case TYPE_NORMAL:
t = FILTER_NORMAL;
break;
//#if !(RM_MODEL == RM_MODEL_TYPE_MH9000)
case TYPE_MANUAL:
t = FILTER_MANUAL;
break;
//#endif // RM_MODEL == RM_MODEL_TYPE_MH9000
case TYPE_PARKING:
case TYPE_PARKING_EVENT:
t = FILTER_PARK;
break;
case TYPE_EVENT:
case TYPE_PARKING_MOTION:
t = FILTER_EVENT;
break;
}
#elif (RM_MODEL_EMT_KR)
case TYPE_NORMAL:
t = FILTER_NORMAL;
break;
case TYPE_EVENT:
t = FILTER_EVENT;
break;
case TYPE_PARKING:
t = FILTER_PARK;
break;
case TYPE_PARKING_EVENT:
t = FILTER_PARK_EVENT;
break;
case TYPE_MANUAL:
t = FILTER_MANUAL;
break;
case TYPE_MY_BOX:
t = FILTER_MYBOX;
break;
}
#else
case TYPE_NORMAL:
t = FILTER_NORMAL;
break;
case TYPE_EVENT:
case TYPE_PARKING_EVENT:
case TYPE_PARKING_MOTION:
t = FILTER_EVENT;
break;
}
#endif
return t;
}
QList<RMVideoItem*>& allItems() { return _items; }
QList<RMVideoItem*>& filteredItems()
{
return _filteredItems;
}
void checkedItems(QList<RMVideoItem*>& items);
#if (PLAYER_ONLY_LIBRARY_MODE)
static RMVideoFileList * _instance;
static void updateInstance(RMVideoFileList* instance)
{
_instance = instance;
}
static RMVideoFileList* instance()
{
return _instance;
}
#else // PLAYER_ONLY_LIBRARY_MODE
// singletone
static RMVideoFileList* instance()
{
static RMVideoFileList * _instance = 0;
if ( _instance == 0 ) {
_instance = new RMVideoFileList();
}
return _instance;
}
#endif // PLAYER_ONLY_LIBRARY_MODE
#if (RM_MODEL_EMT_KR)
/**
* @brief 로딩된 파일 중 가장 많은 파일이 존재하는 녹화타입 선택
*/
void selectMaxCountFilter();
#endif // RM_MODEL_EMT_KR
#if !(USE_1HOUR_FILTER)
bool checkFilter(RMVideoFileList::FILTER filter)
{
return ((_filter & filter) == filter);
}
void setFilter(RMVideoFileList::FILTER filter, bool on);
void setFilterSingle(RMVideoFileList::FILTER filter);
#endif // #if !(USE_1HOUR_FILTER)
RMVideoItem* getPlayItem() // 현재 플레이 중이면 다시 플레이 하지 않는다.
{
return _playItem;
}
int getPlayIndex() // 현재 그룹에서 플레이 중인 인덱스 리턴
{
if(_playItem == NULL)
{
return -1;
}
return filteredItems().indexOf(_playItem);
}
//! \brief 영상 경로를 썸네일 경로로 변경
//! \param item: 추출할 아이템
//! \return
static QPair<QString,QString> getThumbnailPath(RMVideoItem* item);
static bool isRootPath(QString folderPath);
int removeItem(RMVideoItem* item); // 플레이중 삭제 등으로 사라진 아이템 제거
// List.appendToList
#if (RECURSIVE_APPEND_FILE)
static void appendFolderToList(QString rootFolderPath,QList<QUrl>& list,int depth=0);
#else
static void appendFolderToList(QString rootFolderPath,QList<QUrl>& list);
// Group.appendToList
static void appendToList(QString folderPath,QList<QUrl>& list, bool baseFolderOnly);
#endif
static RMVideoFileList::GROUP_TYPE checkGroupTypeFromFolderPath(QString folderPath);
#if (RM_MODEL == RM_MODEL_TYPE_AN6000)
static int parseSerial(QString baseName);
static QString groupTypeFromFilePath(QString filePath,RMVideoFileList::GROUP_TYPE* type, int* serial);
#else // RM_MODEL_TYPE_AN6000
static QString groupTypeFromFilePath(QString filePath,RMVideoFileList::GROUP_TYPE* type);
#endif // RM_MODEL_TYPE_AN6000
bool isNextPlayItemExist(bool bNext);
RMVideoItem* nexItem(bool bNext);
private:
bool _searchItem(bool next, RMVideoItem** item,int fromIndex); // 탐색 next = +1, previous = -1
public slots:
void onPlayNextVideo(int fromIndex);
void onPlayPreviousVideo(int fromIndex);
signals:
// 리스트 업데이트 시작(파일 로딩 또는 필터 변경시 호출)
void listUpdateStarted(bool bLoading);
void listUpdateEnd(bool bLoading,RMVideoItem* selected);
// 리스트의 경우 타입이 변경되어도 이벤트가 발생하니 로딩 종료시에만 처리
void loadListEnd();
void backupStarted();
void backupEnd();
void backupPaused(bool bPaused);
// 다음 플레이 아이템 확인, 더이상 없음
void playItemFound(RMVideoItem* item,int old);
void playNoMoreItem();
void updateProgress(int value);
};
#endif // RM_VIDEOFILELIST_H

View File

@@ -0,0 +1,6 @@
#include "rm_video_list_loader.h"
//RMVideoListLoader::RMVideoListLoader()
//{
//}

View File

@@ -0,0 +1,42 @@
#ifndef RM_VIDEO_LIST_LOADER_H
#define RM_VIDEO_LIST_LOADER_H
#include <QObject>
#include <QRunnable>
#include <QThreadPool>
#include "../rm_include.h"
#include "rm_video_list.h"
class RMVideoFileListLoader : public QRunnable
{
private:
QList<QUrl> _list;
bool _bPlayFirstAdded; // 추가된 1st 파일 즉시 플레이
public:
RMVideoFileListLoader(QList<QUrl>& list, bool bPlayFirstAdded = false)
{
_bPlayFirstAdded = bPlayFirstAdded;
_list = list;
}
virtual void run()
{
RMVideoFileList::instance()->loadFromList(_list,_bPlayFirstAdded);
}
};
class RMVideoFileListBackup : public QRunnable
{
private:
QString _dest;
public:
RMVideoFileListBackup(QString dest)
{
_dest = dest;
}
virtual void run()
{
RMVideoFileList::instance()->backup(_dest);
}
};
#endif // RM_VIDEO_LIST_LOADER_H