【特性】 cfg 实现静态本地化

main
walon 2021-06-17 14:57:39 +08:00
parent 97c2342cfc
commit d873aa3a6c
16 changed files with 480 additions and 288 deletions

Binary file not shown.

View File

@ -0,0 +1,14 @@
..\src\Luban.Client\bin\Debug\net5.0\Luban.Client.exe ^
-h %LUBAN_SERVER_IP% ^
-j cfg ^
-- ^
-d Defines/__root__.xml ^
--input_data_dir Datas ^
--output_data_dir output_lua ^
-s client ^
--gen_types data_lua ^
--export_test_data ^
--input_l10n_text_files l10n/TextTable_CN.xlsx ^
--output_l10n_not_converted_text_file l10n/NotLocalized_CN.txt
pause

View File

@ -0,0 +1,9 @@
/asfa|aabbcc
/asfa32|aabbcc22
key_name|aabbcc
key1|asdfa4
key2|asdfa7
key3|asdfa8
lua/key1|lua text
xml_key1|xml text
xml_key2|xml text222

View File

@ -1,6 +1,6 @@
using Luban.Job.Cfg.Datas; using Luban.Job.Cfg.Datas;
using Luban.Job.Cfg.Defs; using Luban.Job.Cfg.Defs;
using Luban.Job.Cfg.i10n; using Luban.Job.Cfg.l10n;
using System; using System;
using System.Collections.Generic; using System.Collections.Generic;
using System.Linq; using System.Linq;

View File

@ -1,5 +1,5 @@
using Luban.Job.Cfg.DataVisitors; using Luban.Job.Cfg.DataVisitors;
using Luban.Job.Cfg.i10n; using Luban.Job.Cfg.l10n;
using System.Collections.Generic; using System.Collections.Generic;
namespace Luban.Job.Cfg.Datas namespace Luban.Job.Cfg.Datas

View File

@ -1,6 +1,6 @@
using Luban.Config.Common.RawDefs; using Luban.Config.Common.RawDefs;
using Luban.Job.Cfg.Datas; using Luban.Job.Cfg.Datas;
using Luban.Job.Cfg.i10n; using Luban.Job.Cfg.l10n;
using Luban.Job.Cfg.TypeVisitors; using Luban.Job.Cfg.TypeVisitors;
using Luban.Job.Common.Defs; using Luban.Job.Common.Defs;
using Luban.Server.Common; using Luban.Server.Common;
@ -42,9 +42,15 @@ namespace Luban.Job.Cfg.Defs
public RawTextTable RawTextTable { get; } = new RawTextTable(); public RawTextTable RawTextTable { get; } = new RawTextTable();
public TextTable ExportTextTable { get; } = new TextTable(); public TextTable ExportTextTable { get; private set; }
public NotConvertTextSet NotConvertTextSet { get; } = new NotConvertTextSet(); public NotConvertTextSet NotConvertTextSet { get; private set; }
public void InitL10n()
{
ExportTextTable = new TextTable(this);
NotConvertTextSet = new NotConvertTextSet();
}
public void AddCfgTable(DefTable table) public void AddCfgTable(DefTable table)
{ {

View File

@ -1,13 +1,7 @@
using Bright.Serialization;
using Bright.Time; using Bright.Time;
using CommandLine; using CommandLine;
using Luban.Common.Protos; using Luban.Common.Protos;
using Luban.Common.Utils; using Luban.Common.Utils;
using Luban.Config.Common.RawDefs;
using Luban.Job.Cfg.Cache;
using Luban.Job.Cfg.Datas;
using Luban.Job.Cfg.DataSources;
using Luban.Job.Cfg.DataVisitors;
using Luban.Job.Cfg.Defs; using Luban.Job.Cfg.Defs;
using Luban.Job.Cfg.Generate; using Luban.Job.Cfg.Generate;
using Luban.Job.Cfg.RawDefs; using Luban.Job.Cfg.RawDefs;
@ -64,32 +58,14 @@ namespace Luban.Job.Cfg
[Option("export_test_data", Required = false, HelpText = "export test data")] [Option("export_test_data", Required = false, HelpText = "export test data")]
public bool ExportTestData { get; set; } = false; public bool ExportTestData { get; set; } = false;
[Option('t', "timezone", Required = false, HelpText = "timezone")] [Option('t', "i10n_timezone", Required = false, HelpText = "timezone")]
public string TimeZone { get; set; } public string TimeZone { get; set; }
}
private async Task LoadCfgDataAsync(RemoteAgent agent, DefAssembly ass, string dataDir, bool exportTestData) [Option("input_l10n_text_files", Required = false, HelpText = "input l10n text table files. can be multi, sep by ','")]
{ public string InputTextTableFiles { get; set; }
var ctx = agent;
List<DefTable> exportTables = ass.Types.Values.Where(t => t is DefTable ct && ct.NeedExport).Select(t => (DefTable)t).ToList();
var genDataTasks = new List<Task>();
var outputDataFiles = new ConcurrentBag<FileInfo>();
long genDataStartTime = TimeUtil.NowMillis;
foreach (DefTable c in exportTables) [Option("output_l10n_not_converted_text_file", Required = false, HelpText = "the file save not converted l10n texts.")]
{ public string OutputNotConvertTextFile { get; set; }
genDataTasks.Add(Task.Run(async () =>
{
long beginTime = TimeUtil.NowMillis;
await LoadTableAsync(agent, c, dataDir, exportTestData);
long endTime = TimeUtil.NowMillis;
if (endTime - beginTime > 100)
{
ctx.Info("====== load {0} cost {1} ms ======", c.FullName, (endTime - beginTime));
}
}));
}
await Task.WhenAll(genDataTasks.ToArray());
} }
private ICodeRender CreateCodeRender(string genType) private ICodeRender CreateCodeRender(string genType)
@ -171,6 +147,12 @@ namespace Luban.Job.Cfg
errMsg = "--output_data_json_monolithic_file missing"; errMsg = "--output_data_json_monolithic_file missing";
return false; return false;
} }
if (string.IsNullOrWhiteSpace(result.InputTextTableFiles) ^ string.IsNullOrWhiteSpace(result.OutputNotConvertTextFile))
{
errMsg = "--input_l10n_text_files must be provided with --output_l10n_not_converted_text_file";
return false;
}
} }
return true; return true;
@ -225,6 +207,7 @@ namespace Luban.Job.Cfg
bool hasLoadCfgData = false; bool hasLoadCfgData = false;
bool needL10NTextConvert = !string.IsNullOrWhiteSpace(args.InputTextTableFiles);
async Task CheckLoadCfgDataAsync() async Task CheckLoadCfgDataAsync()
{ {
@ -233,9 +216,15 @@ namespace Luban.Job.Cfg
hasLoadCfgData = true; hasLoadCfgData = true;
var timer = new ProfileTimer(); var timer = new ProfileTimer();
timer.StartPhase("load config data"); timer.StartPhase("load config data");
await LoadCfgDataAsync(agent, ass, args.InputDataDir, args.ExportTestData); await DataLoaderUtil.LoadCfgDataAsync(agent, ass, args.InputDataDir, args.ExportTestData);
timer.EndPhaseAndLog(); timer.EndPhaseAndLog();
if (needL10NTextConvert)
{
ass.InitL10n();
await DataLoaderUtil.LoadTextTablesAsync(agent, ass, args.InputDataDir, args.InputTextTableFiles);
}
timer.StartPhase("validate"); timer.StartPhase("validate");
var validateCtx = new ValidatorContext(ass, args.ValidateRootDir); var validateCtx = new ValidatorContext(ass, args.ValidateRootDir);
await validateCtx.ValidateTables(exportTables); await validateCtx.ValidateTables(exportTables);
@ -944,7 +933,7 @@ class Vector4:
{ {
tasks.Add(Task.Run(() => tasks.Add(Task.Run(() =>
{ {
var content = ToOutputData(c, ass.GetTableDataList(c), genType); var content = DataExporterUtil.ToOutputData(c, ass.GetTableDataList(c), genType);
var file = genType.EndsWith("json") ? c.JsonOutputDataFile : c.OutputDataFile; var file = genType.EndsWith("json") ? c.JsonOutputDataFile : c.OutputDataFile;
var md5 = FileUtil.CalcMD5(content); var md5 = FileUtil.CalcMD5(content);
CacheManager.Ins.AddCache(file, md5, content); CacheManager.Ins.AddCache(file, md5, content);
@ -961,7 +950,7 @@ class Vector4:
{ {
allJsonTask.Add(Task.Run(() => allJsonTask.Add(Task.Run(() =>
{ {
return ToOutputData(c, ass.GetTableDataList(c), "data_json"); return DataExporterUtil.ToOutputData(c, ass.GetTableDataList(c), "data_json");
})); }));
} }
await Task.WhenAll(allJsonTask); await Task.WhenAll(allJsonTask);
@ -1006,7 +995,7 @@ class Vector4:
{ {
tasks.Add(Task.Run(() => tasks.Add(Task.Run(() =>
{ {
var content = ToOutputData(c, ass.GetTableDataList(c), genType); var content = DataExporterUtil.ToOutputData(c, ass.GetTableDataList(c), genType);
var file = $"{c.Name}.lua"; var file = $"{c.Name}.lua";
var md5 = FileUtil.CalcMD5(content); var md5 = FileUtil.CalcMD5(content);
CacheManager.Ins.AddCache(file, md5, content); CacheManager.Ins.AddCache(file, md5, content);
@ -1023,7 +1012,7 @@ class Vector4:
{ {
genDataTasks.Add(Task.Run(() => genDataTasks.Add(Task.Run(() =>
{ {
return ExportResourceList(ass.GetTableDataList(c)); return DataExporterUtil.ExportResourceList(ass.GetTableDataList(c));
})); }));
} }
@ -1060,6 +1049,16 @@ class Vector4:
} }
await Task.WhenAll(tasks.ToArray()); await Task.WhenAll(tasks.ToArray());
if (needL10NTextConvert)
{
var notConvertTextList = DataExporterUtil.GenNotConvertTextList(ass.NotConvertTextSet);
var md5 = FileUtil.CalcMD5(notConvertTextList);
string outputNotConvertTextFile = args.OutputNotConvertTextFile;
CacheManager.Ins.AddCache(outputNotConvertTextFile, md5, notConvertTextList);
genScatteredFiles.Add(new FileInfo() { FilePath = outputNotConvertTextFile, MD5 = md5 });
}
if (!genCodeFilesInOutputCodeDir.IsEmpty) if (!genCodeFilesInOutputCodeDir.IsEmpty)
{ {
res.FileGroups.Add(new FileGroup() { Dir = outputCodeDir, Files = genCodeFilesInOutputCodeDir.ToList() }); res.FileGroups.Add(new FileGroup() { Dir = outputCodeDir, Files = genCodeFilesInOutputCodeDir.ToList() });
@ -1082,200 +1081,5 @@ class Vector4:
agent.Session.ReplyRpc<GenJob, GenJobArg, GenJobRes>(rpc, res); agent.Session.ReplyRpc<GenJob, GenJobArg, GenJobRes>(rpc, res);
} }
public string GetActualFileName(string file)
{
int index = file.IndexOf('@');
return index >= 0 ? file[(index + 1)..] : file;
}
private List<Record> LoadCfgRecords(DefTable table, string originFile, string sheetName, byte[] content, bool multiRecord, bool exportTestData)
{
// (md5,sheet,multiRecord,exportTestData) -> (valuetype, List<(datas)>)
var dataSource = DataSourceFactory.Create(originFile, sheetName, new MemoryStream(content), exportTestData);
try
{
List<DType> datas;
if (multiRecord)
{
datas = dataSource.ReadMulti(table.ValueTType);
}
else
{
datas = new List<DType> { dataSource.ReadOne(table.ValueTType) };
}
var records = new List<Record>(datas.Count);
foreach (var data in datas)
{
records.Add(new Record((DBean)data, originFile));
}
return records;
}
catch (Exception e)
{
throw new Exception($"配置文件:{originFile} 生成失败. ==> {e.Message}", e);
}
}
class InputFileInfo
{
public string MD5 { get; set; }
public string OriginFile { get; set; }
public string ActualFile { get; set; }
public string SheetName { get; set; }
}
private async Task<List<InputFileInfo>> CollectInputFilesAsync(RemoteAgent agent, DefTable table, string dataDir)
{
var collectTasks = new List<Task<List<InputFileInfo>>>();
foreach (var file in table.InputFiles)
{
(var actualFile, var sheetName) = RenderFileUtil.SplitFileAndSheetName(FileUtil.Standardize(file));
var actualFullPath = FileUtil.Combine(dataDir, actualFile);
var originFullPath = FileUtil.Combine(dataDir, file);
//s_logger.Info("== get input file:{file} actualFile:{actual}", file, actualFile);
collectTasks.Add(Task.Run(async () =>
{
var fileOrDirContent = await agent.GetFileOrDirectoryAsync(actualFullPath);
if (fileOrDirContent.IsFile)
{
return new List<InputFileInfo> { new InputFileInfo() { OriginFile = file, ActualFile = actualFullPath, SheetName = sheetName, MD5 = fileOrDirContent.Md5 } };
}
else
{
return fileOrDirContent.SubFiles.Select(f => new InputFileInfo() { OriginFile = f.FilePath, ActualFile = f.FilePath, MD5 = f.MD5 }).ToList();
}
}));
}
var allFiles = new List<InputFileInfo>();
foreach (var t in collectTasks)
{
allFiles.AddRange(await t);
}
return allFiles;
}
public async Task LoadTableAsync(RemoteAgent agent, DefTable table, string dataDir, bool exportTestData)
{
var tasks = new List<Task<List<Record>>>();
var inputFiles = await CollectInputFilesAsync(agent, table, dataDir);
// check cache (table, exporttestdata) -> (list<InputFileInfo>, List<DType>)
// (md5, sheetName,exportTestData) -> (value_type, List<DType>)
foreach (var file in inputFiles)
{
var actualFile = file.ActualFile;
//s_logger.Info("== get input file:{file} actualFile:{actual}", file, actualFile);
tasks.Add(Task.Run(async () =>
{
if (FileRecordCacheManager.Ins.TryGetCacheLoadedRecords(table, file.MD5, actualFile, file.SheetName, exportTestData, out var cacheRecords))
{
return cacheRecords;
}
var res = LoadCfgRecords(table,
file.OriginFile,
file.SheetName,
await agent.GetFromCacheOrReadAllBytesAsync(file.ActualFile, file.MD5),
RenderFileUtil.IsExcelFile(file.ActualFile),
exportTestData);
FileRecordCacheManager.Ins.AddCacheLoadedRecords(table, file.MD5, file.SheetName, exportTestData, res);
return res;
}));
}
var records = new List<Record>(tasks.Count);
foreach (var task in tasks)
{
records.AddRange(await task);
}
s_logger.Trace("== load recors. count:{count}", records.Count);
table.Assembly.AddDataTable(table, records);
s_logger.Trace("table:{name} record num:{num}", table.FullName, records.Count);
}
private byte[] ToOutputData(DefTable table, List<Record> records, string dataType)
{
switch (dataType)
{
case "data_bin":
{
var buf = ThreadLocalTemporalByteBufPool.Alloc(1024 * 1024);
BinaryExportor.Ins.WriteList(records, table.Assembly, buf);
var bytes = buf.CopyData();
ThreadLocalTemporalByteBufPool.Free(buf);
return bytes;
}
case "data_json":
{
var ss = new MemoryStream();
var jsonWriter = new Utf8JsonWriter(ss, new JsonWriterOptions()
{
Indented = true,
SkipValidation = false,
Encoder = System.Text.Encodings.Web.JavaScriptEncoder.Create(System.Text.Unicode.UnicodeRanges.All),
});
JsonExportor.Ins.WriteList(records, table.Assembly, jsonWriter);
jsonWriter.Flush();
return DataUtil.StreamToBytes(ss);
}
case "data_lua":
{
var content = new List<string>();
switch (table.Mode)
{
case ETableMode.ONE:
{
LuaExportor.Ins.ExportTableOne(table, records, content);
break;
}
case ETableMode.MAP:
{
LuaExportor.Ins.ExportTableOneKeyMap(table, records, content);
break;
}
case ETableMode.BMAP:
{
LuaExportor.Ins.ExportTableTwoKeyMap(table, records, content);
break;
}
default:
{
throw new NotSupportedException();
}
}
return System.Text.Encoding.UTF8.GetBytes(string.Join('\n', content));
}
default:
{
throw new ArgumentException($"not support datatype:{dataType}");
}
}
}
private List<ResourceInfo> ExportResourceList(List<Record> records)
{
var resList = new List<ResourceInfo>();
foreach (Record res in records)
{
ResourceExportor.Ins.Accept(res.Data, null, resList);
}
return resList;
}
} }
} }

View File

@ -0,0 +1,100 @@
using Bright.Serialization;
using Luban.Config.Common.RawDefs;
using Luban.Job.Cfg.Datas;
using Luban.Job.Cfg.DataVisitors;
using Luban.Job.Cfg.Defs;
using Luban.Job.Cfg.l10n;
using Luban.Job.Cfg.RawDefs;
using System;
using System.Collections.Generic;
using System.IO;
using System.Linq;
using System.Text;
using System.Text.Json;
using System.Threading.Tasks;
namespace Luban.Job.Cfg.Utils
{
public static class DataExporterUtil
{
public static byte[] ToOutputData(DefTable table, List<Record> records, string dataType)
{
switch (dataType)
{
case "data_bin":
{
var buf = ThreadLocalTemporalByteBufPool.Alloc(1024 * 1024);
BinaryExportor.Ins.WriteList(records, table.Assembly, buf);
var bytes = buf.CopyData();
ThreadLocalTemporalByteBufPool.Free(buf);
return bytes;
}
case "data_json":
{
var ss = new MemoryStream();
var jsonWriter = new Utf8JsonWriter(ss, new JsonWriterOptions()
{
Indented = true,
SkipValidation = false,
Encoder = System.Text.Encodings.Web.JavaScriptEncoder.Create(System.Text.Unicode.UnicodeRanges.All),
});
JsonExportor.Ins.WriteList(records, table.Assembly, jsonWriter);
jsonWriter.Flush();
return DataUtil.StreamToBytes(ss);
}
case "data_lua":
{
var content = new List<string>();
switch (table.Mode)
{
case ETableMode.ONE:
{
LuaExportor.Ins.ExportTableOne(table, records, content);
break;
}
case ETableMode.MAP:
{
LuaExportor.Ins.ExportTableOneKeyMap(table, records, content);
break;
}
case ETableMode.BMAP:
{
LuaExportor.Ins.ExportTableTwoKeyMap(table, records, content);
break;
}
default:
{
throw new NotSupportedException();
}
}
return System.Text.Encoding.UTF8.GetBytes(string.Join('\n', content));
}
default:
{
throw new ArgumentException($"not support datatype:{dataType}");
}
}
}
public static List<ResourceInfo> ExportResourceList(List<Record> records)
{
var resList = new List<ResourceInfo>();
foreach (Record res in records)
{
ResourceExportor.Ins.Accept(res.Data, null, resList);
}
return resList;
}
public static byte[] GenNotConvertTextList(NotConvertTextSet notConvertSet)
{
StringBuilder sb = new StringBuilder();
foreach (var e in notConvertSet.SortedEntry)
{
sb.Append(e.Key).Append('|').Append(e.Value).Append('\n');
}
return System.Text.Encoding.UTF8.GetBytes(sb.ToString());
}
}
}

View File

@ -0,0 +1,197 @@
using Bright.Time;
using Luban.Common.Utils;
using Luban.Job.Cfg.Cache;
using Luban.Job.Cfg.Datas;
using Luban.Job.Cfg.DataSources;
using Luban.Job.Cfg.Defs;
using Luban.Job.Common.Types;
using Luban.Job.Common.Utils;
using Luban.Server.Common;
using System;
using System.Collections.Concurrent;
using System.Collections.Generic;
using System.IO;
using System.Linq;
using System.Text;
using System.Threading.Tasks;
namespace Luban.Job.Cfg.Utils
{
public static class DataLoaderUtil
{
private static readonly NLog.Logger s_logger = NLog.LogManager.GetCurrentClassLogger();
public class InputFileInfo
{
public string MD5 { get; set; }
public string OriginFile { get; set; }
public string ActualFile { get; set; }
public string SheetName { get; set; }
}
public static async Task<List<InputFileInfo>> CollectInputFilesAsync(RemoteAgent agent, IEnumerable<string> files, string dataDir)
{
var collectTasks = new List<Task<List<InputFileInfo>>>();
foreach (var file in files)
{
(var actualFile, var sheetName) = RenderFileUtil.SplitFileAndSheetName(FileUtil.Standardize(file));
var actualFullPath = FileUtil.Combine(dataDir, actualFile);
var originFullPath = FileUtil.Combine(dataDir, file);
//s_logger.Info("== get input file:{file} actualFile:{actual}", file, actualFile);
collectTasks.Add(Task.Run(async () =>
{
var fileOrDirContent = await agent.GetFileOrDirectoryAsync(actualFullPath);
if (fileOrDirContent.IsFile)
{
return new List<InputFileInfo> { new InputFileInfo() { OriginFile = file, ActualFile = actualFullPath, SheetName = sheetName, MD5 = fileOrDirContent.Md5 } };
}
else
{
return fileOrDirContent.SubFiles.Select(f => new InputFileInfo() { OriginFile = f.FilePath, ActualFile = f.FilePath, MD5 = f.MD5 }).ToList();
}
}));
}
var allFiles = new List<InputFileInfo>();
foreach (var t in collectTasks)
{
allFiles.AddRange(await t);
}
return allFiles;
}
//private async Task<List<InputFileInfo>> CollectInputFilesAsync(RemoteAgent agent, DefTable table, string dataDir)
//{
// var collectTasks = new List<Task<List<InputFileInfo>>>();
// foreach (var file in table.InputFiles)
// return CollectInputFilesAsync(agent, table.InputFiles, dataDir)
//}
public static async Task LoadTableAsync(RemoteAgent agent, DefTable table, string dataDir, bool exportTestData)
{
var tasks = new List<Task<List<Record>>>();
var inputFiles = await CollectInputFilesAsync(agent, table.InputFiles, dataDir);
// check cache (table, exporttestdata) -> (list<InputFileInfo>, List<DType>)
// (md5, sheetName,exportTestData) -> (value_type, List<DType>)
foreach (var file in inputFiles)
{
var actualFile = file.ActualFile;
//s_logger.Info("== get input file:{file} actualFile:{actual}", file, actualFile);
tasks.Add(Task.Run(async () =>
{
if (FileRecordCacheManager.Ins.TryGetCacheLoadedRecords(table, file.MD5, actualFile, file.SheetName, exportTestData, out var cacheRecords))
{
return cacheRecords;
}
var res = LoadCfgRecords(table.ValueTType,
file.OriginFile,
file.SheetName,
await agent.GetFromCacheOrReadAllBytesAsync(file.ActualFile, file.MD5),
RenderFileUtil.IsExcelFile(file.ActualFile),
exportTestData);
FileRecordCacheManager.Ins.AddCacheLoadedRecords(table, file.MD5, file.SheetName, exportTestData, res);
return res;
}));
}
var records = new List<Record>(tasks.Count);
foreach (var task in tasks)
{
records.AddRange(await task);
}
s_logger.Trace("== load recors. count:{count}", records.Count);
table.Assembly.AddDataTable(table, records);
s_logger.Trace("table:{name} record num:{num}", table.FullName, records.Count);
}
public static async Task LoadCfgDataAsync(RemoteAgent agent, DefAssembly ass, string dataDir, bool exportTestData)
{
var ctx = agent;
List<DefTable> exportTables = ass.Types.Values.Where(t => t is DefTable ct && ct.NeedExport).Select(t => (DefTable)t).ToList();
var genDataTasks = new List<Task>();
var outputDataFiles = new ConcurrentBag<FileInfo>();
long genDataStartTime = TimeUtil.NowMillis;
foreach (DefTable c in exportTables)
{
genDataTasks.Add(Task.Run(async () =>
{
long beginTime = TimeUtil.NowMillis;
await LoadTableAsync(agent, c, dataDir, exportTestData);
long endTime = TimeUtil.NowMillis;
if (endTime - beginTime > 100)
{
ctx.Info("====== load {0} cost {1} ms ======", c.FullName, (endTime - beginTime));
}
}));
}
await Task.WhenAll(genDataTasks.ToArray());
}
public static List<Record> LoadCfgRecords(TBean recordType, string originFile, string sheetName, byte[] content, bool multiRecord, bool exportTestData)
{
// (md5,sheet,multiRecord,exportTestData) -> (valuetype, List<(datas)>)
var dataSource = DataSourceFactory.Create(originFile, sheetName, new MemoryStream(content), exportTestData);
try
{
List<DType> datas;
if (multiRecord)
{
datas = dataSource.ReadMulti(recordType);
}
else
{
datas = new List<DType> { dataSource.ReadOne(recordType) };
}
var records = new List<Record>(datas.Count);
foreach (var data in datas)
{
records.Add(new Record((DBean)data, originFile));
}
return records;
}
catch (Exception e)
{
throw new Exception($"配置文件:{originFile} 生成失败. ==> {e.Message}", e);
}
}
public static async Task LoadTextTablesAsync(RemoteAgent agent, DefAssembly ass, string baseDir, string textTableFiles)
{
var tasks = new List<Task<byte[]>>();
var files = textTableFiles.Split(',');
foreach (var file in await CollectInputFilesAsync(agent, files, baseDir))
{
tasks.Add(agent.GetFromCacheOrReadAllBytesAsync(file.ActualFile, file.MD5));
}
var textTable = ass.ExportTextTable;
for (int i = 0; i < tasks.Count; i++)
{
var bytes = await tasks[i];
try
{
textTable.LoadFromFile(files[i], bytes);
}
catch (Exception e)
{
throw new Exception($"load text table file:{files[i]} fail. ==> {e.Message} ");
}
}
}
}
}

View File

@ -69,7 +69,6 @@ namespace Luban.Job.Cfg
{ {
var records = t.Assembly.GetTableDataList(t); var records = t.Assembly.GetTableDataList(t);
ValidateTableModeIndex(t, records); ValidateTableModeIndex(t, records);
})); }));
} }
await Task.WhenAll(tasks); await Task.WhenAll(tasks);

View File

@ -1,18 +0,0 @@
using System;
using System.Collections.Concurrent;
using System.Collections.Generic;
using System.Linq;
using System.Text;
using System.Threading.Tasks;
namespace Luban.Job.Cfg.i10n
{
public class NotConvertTextSet
{
private readonly ConcurrentDictionary<string, string> _notConvertTexts = new();
public void Add(string key, string text)
{
_notConvertTexts.TryAdd(key, text);
}
}
}

View File

@ -1,33 +0,0 @@
using System;
using System.Collections.Concurrent;
using System.Collections.Generic;
using System.Linq;
using System.Text;
using System.Threading.Tasks;
namespace Luban.Job.Cfg.i10n
{
public class TextTable
{
private readonly Dictionary<string, string> _key2Texts = new();
public TextTable()
{
_key2Texts.Add("test/a", "这是本地化数据 test/a");
_key2Texts.Add("name", "这是本地化数据 name");
}
public void AddText(string key, string text)
{
if (!_key2Texts.TryAdd(key, text))
{
throw new Exception($"text key:{key} 重复");
}
}
public bool TryGetText(string key, out string text)
{
return _key2Texts.TryGetValue(key, out text);
}
}
}

View File

@ -0,0 +1,31 @@
using System;
using System.Collections.Concurrent;
using System.Collections.Generic;
using System.Linq;
using System.Text;
using System.Threading.Tasks;
namespace Luban.Job.Cfg.l10n
{
public class NotConvertTextSet
{
private readonly ConcurrentDictionary<string, string> _notConvertTexts = new();
public void Add(string key, string text)
{
if (key != "")
{
_notConvertTexts.TryAdd(key, text);
}
}
public List<KeyValuePair<string, string>> SortedEntry
{
get
{
var list = _notConvertTexts.ToList();
list.Sort((a, b) => a.Key.CompareTo(b.Key));
return list;
}
}
}
}

View File

@ -6,7 +6,7 @@ using System.Linq;
using System.Text; using System.Text;
using System.Threading.Tasks; using System.Threading.Tasks;
namespace Luban.Job.Cfg.i10n namespace Luban.Job.Cfg.l10n
{ {
public class RawTextTable public class RawTextTable
{ {

View File

@ -0,0 +1,83 @@
using Luban.Config.Common.RawDefs;
using Luban.Job.Cfg.Datas;
using Luban.Job.Cfg.DataVisitors;
using Luban.Job.Cfg.Defs;
using Luban.Job.Cfg.Utils;
using Luban.Job.Common.Types;
using System;
using System.Collections.Concurrent;
using System.Collections.Generic;
using System.Linq;
using System.Text;
using System.Threading.Tasks;
namespace Luban.Job.Cfg.l10n
{
public class TextTable
{
private static readonly NLog.Logger s_logger = NLog.LogManager.GetCurrentClassLogger();
public DefAssembly Assembly { get; }
private readonly Dictionary<string, string> _key2Texts = new();
private readonly TBean _textRowType;
public TextTable(DefAssembly ass)
{
this.Assembly = ass;
var defTextRowType = new DefBean(new CfgBean()
{
Namespace = "__intern__",
Name = "__TextInfo__",
Parent = "",
Alias = "",
IsValueType = false,
Sep = "",
TypeId = 0,
IsSerializeCompatible = false,
Fields = new List<Common.RawDefs.Field>
{
new CfgField() { Name = "key", Type = "string" },
//new Common.RawDefs.Field() { Id = 1, Name = "origin_text", Type = "string" },
new CfgField() { Name = "text", Type = "string" },
}
})
{
AssemblyBase = ass,
};
defTextRowType.PreCompile();
defTextRowType.Compile();
defTextRowType.PostCompile();
_textRowType = new TBean(defTextRowType, false);
}
public void AddText(string key, string text)
{
if (!_key2Texts.TryAdd(key, text))
{
throw new Exception($"text key:{key} 重复");
}
}
public bool TryGetText(string key, out string text)
{
return _key2Texts.TryGetValue(key, out text);
}
public void LoadFromFile(string fileName, byte[] bytes)
{
var records = DataLoaderUtil.LoadCfgRecords(_textRowType, fileName, null, bytes, true, false);
foreach (var r in records)
{
//s_logger.Info("== read text:{}", r.Data);
string key = (r.Data.Fields[0] as DString).Value;
string text = (r.Data.Fields[1] as DString).Value;
if (!_key2Texts.TryAdd(key, text))
{
throw new Exception($"TextTableFile:{fileName} key:{key} text:{text} 重复");
}
}
}
}
}

View File

@ -100,7 +100,7 @@ namespace Luban.Server.Common
GET_INPUT_FILE_TIMEOUT); GET_INPUT_FILE_TIMEOUT);
if (res.Err != Luban.Common.EErrorCode.OK) if (res.Err != Luban.Common.EErrorCode.OK)
{ {
throw new ReadRemoteFailException($"{res.Err}"); throw new ReadRemoteFailException($"ReadFile:{file} fail. {res.Err}");
} }
s_logger.Trace("read GetFileOrDirectoryAsync end. file:{file} cost:{time}", file, TimeUtil.NowMillis - t1); s_logger.Trace("read GetFileOrDirectoryAsync end. file:{file} cost:{time}", file, TimeUtil.NowMillis - t1);
return res; return res;