mirror of
https://github.com/curioustorvald/Terrarum-sans-bitmap.git
synced 2026-03-15 23:46:09 +09:00
Compare commits
70 Commits
| Author | SHA1 | Date | |
|---|---|---|---|
|
|
a5572b1d95 | ||
|
|
0f9fbe9713 | ||
|
|
f2bc61928b | ||
|
|
0b730c7a47 | ||
|
|
0afbfdf043 | ||
|
|
12629ee3e8 | ||
|
|
99c6ed5c8c | ||
|
|
f7ffeec0e2 | ||
|
|
b106e1c1b0 | ||
|
|
ec911e568d | ||
|
|
68873c8d80 | ||
|
|
fed73338e2 | ||
|
|
da59fe24d4 | ||
|
|
cb2f432479 | ||
|
|
bc2dbf8b69 | ||
|
|
3e79181aa3 | ||
|
|
73c2b6986d | ||
|
|
f4573536e4 | ||
|
|
c8b197ec01 | ||
|
|
c299aa8d50 | ||
|
|
673ca100d4 | ||
|
|
db327d8357 | ||
|
|
cce9d62bd1 | ||
|
|
b3acbf1c0e | ||
|
|
602923f5bc | ||
|
|
714cca79be | ||
|
|
83303603c0 | ||
|
|
e2550b6ef6 | ||
|
|
a69aee9aa7 | ||
|
|
5c6da36fa8 | ||
|
|
3c9bc38dfd | ||
|
|
0811971a8e | ||
|
|
b0391e5d80 | ||
|
|
95fafe51a9 | ||
|
|
b78b4711fb | ||
|
|
35d4d94818 | ||
|
|
7c788eb9d8 | ||
|
|
23e748cc88 | ||
|
|
5d10bdb8e8 | ||
|
|
95912acc32 | ||
|
|
e3a3079fb2 | ||
|
|
3e3e20e5d4 | ||
|
|
f55f90352b | ||
|
|
80b67a3886 | ||
|
|
982fb94828 | ||
|
|
be1c8e2f79 | ||
|
|
08d1b41cc0 | ||
|
|
3f9f5fb679 | ||
|
|
a2a73128e0 | ||
|
|
488304b7b4 | ||
|
|
b73aa76285 | ||
|
|
f38cd8f4da | ||
|
|
a567b9f7fc | ||
|
|
86699af92d | ||
|
|
cdc3499f38 | ||
|
|
fca02f1a3d | ||
|
|
73fcd7d922 | ||
|
|
1d6eb7b2c8 | ||
|
|
d94bac6186 | ||
|
|
63adbba1bb | ||
|
|
8d1e669a93 | ||
|
|
949b6aa777 | ||
|
|
5e2cacd491 | ||
|
|
208466bbb2 | ||
|
|
b5f01a4d41 | ||
|
|
e7afe0135e | ||
|
|
e3904790dc | ||
|
|
0c3a73c2f9 | ||
|
|
648f3ffadd | ||
|
|
2dc148116e |
8
.gitignore
vendored
8
.gitignore
vendored
@@ -13,3 +13,11 @@ tmp_*
|
||||
*.bak
|
||||
*-autosave.kra
|
||||
.directory
|
||||
|
||||
*/__pycache__
|
||||
OTFbuild/*.ttf
|
||||
OTFbuild/*.otf
|
||||
OTFbuild/*.woff
|
||||
OTFbuild/*.woff2
|
||||
*.fea
|
||||
*.xdp-*
|
||||
|
||||
4
.idea/artifacts/TerrarumSansBitmap.xml
generated
4
.idea/artifacts/TerrarumSansBitmap.xml
generated
@@ -1,12 +1,12 @@
|
||||
<component name="ArtifactManager">
|
||||
<artifact type="jar" name="TerrarumSansBitmap">
|
||||
<artifact type="jar" build-on-make="true" name="TerrarumSansBitmap">
|
||||
<output-path>$PROJECT_DIR$/lib</output-path>
|
||||
<root id="archive" name="TerrarumSansBitmap.jar">
|
||||
<element id="module-output" name="BuildJAR_TerrarumSansBitmap" />
|
||||
<element id="directory" name="META-INF">
|
||||
<element id="file-copy" path="$PROJECT_DIR$/META-INF/MANIFEST.MF" />
|
||||
</element>
|
||||
<element id="dir-copy" path="$PROJECT_DIR$/src" />
|
||||
<element id="module-output" name="BuildJAR_TerrarumSansBitmap" />
|
||||
</root>
|
||||
</artifact>
|
||||
</component>
|
||||
10
.idea/kotlinc.xml
generated
Executable file → Normal file
10
.idea/kotlinc.xml
generated
Executable file → Normal file
@@ -1,7 +1,13 @@
|
||||
<?xml version="1.0" encoding="UTF-8"?>
|
||||
<project version="4">
|
||||
<component name="Kotlin2JsCompilerArguments">
|
||||
<option name="moduleKind" value="plain" />
|
||||
</component>
|
||||
<component name="Kotlin2JvmCompilerArguments">
|
||||
<option name="jvmTarget" value="21" />
|
||||
</component>
|
||||
<component name="KotlinCommonCompilerArguments">
|
||||
<option name="apiVersion" value="1.4" />
|
||||
<option name="languageVersion" value="1.4" />
|
||||
<option name="apiVersion" value="2.0" />
|
||||
<option name="languageVersion" value="2.0" />
|
||||
</component>
|
||||
</project>
|
||||
18
.idea/libraries/KotlinJavaRuntime.xml
generated
18
.idea/libraries/KotlinJavaRuntime.xml
generated
@@ -1,19 +1,33 @@
|
||||
<component name="libraryTable">
|
||||
<library name="KotlinJavaRuntime">
|
||||
<library name="KotlinJavaRuntime" type="repository">
|
||||
<properties maven-id="org.jetbrains.kotlin:kotlin-stdlib-jdk8:2.1.21" />
|
||||
<CLASSES>
|
||||
<root url="jar://$KOTLIN_BUNDLED$/lib/kotlin-stdlib.jar!/" />
|
||||
<root url="jar://$KOTLIN_BUNDLED$/lib/kotlin-reflect.jar!/" />
|
||||
<root url="jar://$KOTLIN_BUNDLED$/lib/kotlin-test.jar!/" />
|
||||
<root url="jar://$KOTLIN_BUNDLED$/lib/kotlin-stdlib-jdk7.jar!/" />
|
||||
<root url="jar://$KOTLIN_BUNDLED$/lib/kotlin-stdlib-jdk8.jar!/" />
|
||||
<root url="jar://$MAVEN_REPOSITORY$/org/jetbrains/kotlin/kotlin-stdlib-jdk8/2.1.21/kotlin-stdlib-jdk8-2.1.21.jar!/" />
|
||||
<root url="jar://$MAVEN_REPOSITORY$/org/jetbrains/kotlin/kotlin-stdlib/2.1.21/kotlin-stdlib-2.1.21.jar!/" />
|
||||
<root url="jar://$MAVEN_REPOSITORY$/org/jetbrains/annotations/13.0/annotations-13.0.jar!/" />
|
||||
<root url="jar://$MAVEN_REPOSITORY$/org/jetbrains/kotlin/kotlin-stdlib-jdk7/2.1.21/kotlin-stdlib-jdk7-2.1.21.jar!/" />
|
||||
</CLASSES>
|
||||
<JAVADOC />
|
||||
<JAVADOC>
|
||||
<root url="jar://$MAVEN_REPOSITORY$/org/jetbrains/kotlin/kotlin-stdlib-jdk8/2.1.21/kotlin-stdlib-jdk8-2.1.21-javadoc.jar!/" />
|
||||
<root url="jar://$MAVEN_REPOSITORY$/org/jetbrains/kotlin/kotlin-stdlib/2.1.21/kotlin-stdlib-2.1.21-javadoc.jar!/" />
|
||||
<root url="jar://$MAVEN_REPOSITORY$/org/jetbrains/annotations/13.0/annotations-13.0-javadoc.jar!/" />
|
||||
<root url="jar://$MAVEN_REPOSITORY$/org/jetbrains/kotlin/kotlin-stdlib-jdk7/2.1.21/kotlin-stdlib-jdk7-2.1.21-javadoc.jar!/" />
|
||||
</JAVADOC>
|
||||
<SOURCES>
|
||||
<root url="jar://$KOTLIN_BUNDLED$/lib/kotlin-stdlib-sources.jar!/" />
|
||||
<root url="jar://$KOTLIN_BUNDLED$/lib/kotlin-reflect-sources.jar!/" />
|
||||
<root url="jar://$KOTLIN_BUNDLED$/lib/kotlin-test-sources.jar!/" />
|
||||
<root url="jar://$KOTLIN_BUNDLED$/lib/kotlin-stdlib-jdk7-sources.jar!/" />
|
||||
<root url="jar://$KOTLIN_BUNDLED$/lib/kotlin-stdlib-jdk8-sources.jar!/" />
|
||||
<root url="jar://$MAVEN_REPOSITORY$/org/jetbrains/kotlin/kotlin-stdlib-jdk8/2.1.21/kotlin-stdlib-jdk8-2.1.21-sources.jar!/" />
|
||||
<root url="jar://$MAVEN_REPOSITORY$/org/jetbrains/kotlin/kotlin-stdlib/2.1.21/kotlin-stdlib-2.1.21-sources.jar!/" />
|
||||
<root url="jar://$MAVEN_REPOSITORY$/org/jetbrains/annotations/13.0/annotations-13.0-sources.jar!/" />
|
||||
<root url="jar://$MAVEN_REPOSITORY$/org/jetbrains/kotlin/kotlin-stdlib-jdk7/2.1.21/kotlin-stdlib-jdk7-2.1.21-sources.jar!/" />
|
||||
</SOURCES>
|
||||
</library>
|
||||
</component>
|
||||
2
.idea/misc.xml
generated
2
.idea/misc.xml
generated
@@ -1,6 +1,6 @@
|
||||
<?xml version="1.0" encoding="UTF-8"?>
|
||||
<project version="4">
|
||||
<component name="ProjectRootManager" version="2" languageLevel="JDK_1_8" default="true" project-jdk-name="1.8.0_242" project-jdk-type="JavaSDK">
|
||||
<component name="ProjectRootManager" version="2" languageLevel="JDK_21" default="true" project-jdk-name="21" project-jdk-type="JavaSDK">
|
||||
<output url="file://$PROJECT_DIR$/out" />
|
||||
</component>
|
||||
</project>
|
||||
1
.idea/modules.xml
generated
1
.idea/modules.xml
generated
@@ -4,6 +4,7 @@
|
||||
<modules>
|
||||
<module fileurl="file://$PROJECT_DIR$/BuildJAR_TerrarumSansBitmap.iml" filepath="$PROJECT_DIR$/BuildJAR_TerrarumSansBitmap.iml" />
|
||||
<module fileurl="file://$PROJECT_DIR$/FontTestGDX/FontTestGDX.iml" filepath="$PROJECT_DIR$/FontTestGDX/FontTestGDX.iml" />
|
||||
<module fileurl="file://$PROJECT_DIR$/OTFbuild/OTFbuild.iml" filepath="$PROJECT_DIR$/OTFbuild/OTFbuild.iml" />
|
||||
</modules>
|
||||
</component>
|
||||
</project>
|
||||
140
.idea/workspace.xml
generated
140
.idea/workspace.xml
generated
@@ -9,31 +9,10 @@
|
||||
<option name="autoReloadType" value="SELECTIVE" />
|
||||
</component>
|
||||
<component name="ChangeListManager">
|
||||
<list default="true" id="22c5bc80-996c-4846-b173-7dc8c2096fe3" name="Default" comment="">
|
||||
<list default="true" id="22c5bc80-996c-4846-b173-7dc8c2096fe3" name="Default" comment="why are you still looking for tga.gz">
|
||||
<change beforePath="$PROJECT_DIR$/.idea/modules.xml" beforeDir="false" afterPath="$PROJECT_DIR$/.idea/modules.xml" afterDir="false" />
|
||||
<change beforePath="$PROJECT_DIR$/.idea/workspace.xml" beforeDir="false" afterPath="$PROJECT_DIR$/.idea/workspace.xml" afterDir="false" />
|
||||
<change beforePath="$PROJECT_DIR$/FontTestGDX/src/FontTestGDX.kt" beforeDir="false" afterPath="$PROJECT_DIR$/FontTestGDX/src/FontTestGDX.kt" afterDir="false" />
|
||||
<change beforePath="$PROJECT_DIR$/assets/cjkpunct.tga" beforeDir="false" afterPath="$PROJECT_DIR$/assets/cjkpunct.tga" afterDir="false" />
|
||||
<change beforePath="$PROJECT_DIR$/assets/currencies_variable.tga" beforeDir="false" afterPath="$PROJECT_DIR$/assets/currencies_variable.tga" afterDir="false" />
|
||||
<change beforePath="$PROJECT_DIR$/assets/futhark.tga" beforeDir="false" afterPath="$PROJECT_DIR$/assets/futhark.tga" afterDir="false" />
|
||||
<change beforePath="$PROJECT_DIR$/assets/latinExtC_variable.tga" beforeDir="false" afterPath="$PROJECT_DIR$/assets/latinExtC_variable.tga" afterDir="false" />
|
||||
<change beforePath="$PROJECT_DIR$/assets/richtext_furigana.tga" beforeDir="false" afterPath="$PROJECT_DIR$/assets/richtext_furigana.tga" afterDir="false" />
|
||||
<change beforePath="$PROJECT_DIR$/assets/typewriter/typewriter_intl_qwerty.tga" beforeDir="false" afterPath="$PROJECT_DIR$/assets/typewriter/typewriter_intl_qwerty.tga" afterDir="false" />
|
||||
<change beforePath="$PROJECT_DIR$/assets/typewriter/typewriter_ko_3set-390.tga" beforeDir="false" afterPath="$PROJECT_DIR$/assets/typewriter/typewriter_ko_3set-390.tga" afterDir="false" />
|
||||
<change beforePath="$PROJECT_DIR$/assets/wenquanyi.tga.gz" beforeDir="false" afterPath="$PROJECT_DIR$/assets/wenquanyi.tga.gz" afterDir="false" />
|
||||
<change beforePath="$PROJECT_DIR$/font_drawing_template.png" beforeDir="false" afterPath="$PROJECT_DIR$/font_drawing_template.png" afterDir="false" />
|
||||
<change beforePath="$PROJECT_DIR$/glyph_height_pos_annotation.png" beforeDir="false" afterPath="$PROJECT_DIR$/glyph_height_pos_annotation.png" afterDir="false" />
|
||||
<change beforePath="$PROJECT_DIR$/samples/wikipedia_x86.png" beforeDir="false" afterPath="$PROJECT_DIR$/samples/wikipedia_x86.png" afterDir="false" />
|
||||
<change beforePath="$PROJECT_DIR$/terrarum_sans_cyrillic_2.png" beforeDir="false" afterPath="$PROJECT_DIR$/terrarum_sans_cyrillic_2.png" afterDir="false" />
|
||||
<change beforePath="$PROJECT_DIR$/testing.PNG" beforeDir="false" afterPath="$PROJECT_DIR$/testing.PNG" afterDir="false" />
|
||||
<change beforePath="$PROJECT_DIR$/testtext.txt" beforeDir="false" afterPath="$PROJECT_DIR$/testtext.txt" afterDir="false" />
|
||||
<change beforePath="$PROJECT_DIR$/width_bit_encoding_annotated.png" beforeDir="false" afterPath="$PROJECT_DIR$/width_bit_encoding_annotated.png" afterDir="false" />
|
||||
<change beforePath="$PROJECT_DIR$/work_files/typewriter_input/alphnum_glyphs_master.kra" beforeDir="false" afterPath="$PROJECT_DIR$/work_files/typewriter_input/alphnum_glyphs_master.kra" afterDir="false" />
|
||||
<change beforePath="$PROJECT_DIR$/work_files/typewriter_input/alphnum_glyphs_resized.kra" beforeDir="false" afterPath="$PROJECT_DIR$/work_files/typewriter_input/alphnum_glyphs_resized.kra" afterDir="false" />
|
||||
<change beforePath="$PROJECT_DIR$/work_files/typewriter_input/hangul_3set_glyphs_master.kra" beforeDir="false" afterPath="$PROJECT_DIR$/work_files/typewriter_input/hangul_3set_glyphs_master.kra" afterDir="false" />
|
||||
<change beforePath="$PROJECT_DIR$/work_files/typewriter_input/typewriter_input_template.psd" beforeDir="false" afterPath="$PROJECT_DIR$/work_files/typewriter_input/typewriter_input_template.psd" afterDir="false" />
|
||||
<change beforePath="$PROJECT_DIR$/work_files/typewriter_input/typewriter_intl_qwerty.psd" beforeDir="false" afterPath="$PROJECT_DIR$/work_files/typewriter_input/typewriter_intl_qwerty.psd" afterDir="false" />
|
||||
<change beforePath="$PROJECT_DIR$/work_files/typewriter_input/typewriter_ko_3set-390.psd" beforeDir="false" afterPath="$PROJECT_DIR$/work_files/typewriter_input/typewriter_ko_3set-390.psd" afterDir="false" />
|
||||
<change beforePath="$PROJECT_DIR$/work_files/typewriter_input/typewriter_ko_3set_glyphs_resized.kra" beforeDir="false" afterPath="$PROJECT_DIR$/work_files/typewriter_input/typewriter_ko_3set_glyphs_resized.kra" afterDir="false" />
|
||||
<change beforePath="$PROJECT_DIR$/src/net/torvald/terrarumsansbitmap/gdx/TerrarumSansBitmap.kt" beforeDir="false" afterPath="$PROJECT_DIR$/src/net/torvald/terrarumsansbitmap/gdx/TerrarumSansBitmap.kt" afterDir="false" />
|
||||
</list>
|
||||
<option name="SHOW_DIALOG" value="false" />
|
||||
<option name="HIGHLIGHT_CONFLICTS" value="true" />
|
||||
@@ -43,13 +22,31 @@
|
||||
<component name="FileTemplateManagerImpl">
|
||||
<option name="RECENT_TEMPLATES">
|
||||
<list>
|
||||
<option value="Kotlin Class" />
|
||||
<option value="Interface" />
|
||||
<option value="Class" />
|
||||
<option value="Kotlin Class" />
|
||||
</list>
|
||||
</option>
|
||||
</component>
|
||||
<component name="Git.Settings">
|
||||
<excluded-from-favorite>
|
||||
<branch-storage>
|
||||
<map>
|
||||
<entry type="LOCAL">
|
||||
<value>
|
||||
<list>
|
||||
<branch-info repo="$PROJECT_DIR$" source="master" />
|
||||
</list>
|
||||
</value>
|
||||
</entry>
|
||||
</map>
|
||||
</branch-storage>
|
||||
</excluded-from-favorite>
|
||||
<option name="RECENT_BRANCH_BY_REPOSITORY">
|
||||
<map>
|
||||
<entry key="$PROJECT_DIR$" value="ttf-otf-build-system" />
|
||||
</map>
|
||||
</option>
|
||||
<option name="RECENT_GIT_ROOT_PATH" value="$PROJECT_DIR$" />
|
||||
</component>
|
||||
<component name="GitSEFilterConfiguration">
|
||||
@@ -60,9 +57,20 @@
|
||||
<filtered-out-file-type name="COMMIT_BY_MESSAGE" />
|
||||
</file-type-list>
|
||||
</component>
|
||||
<component name="HighlightingSettingsPerFile">
|
||||
<setting file="jar://$PROJECT_DIR$/lib/gdx-1.10.0-sources.jar!/com/badlogic/gdx/Input.java" root0="SKIP_INSPECTION" />
|
||||
<setting file="jar://$PROJECT_DIR$/lib/gdx-1.10.0-sources.jar!/com/badlogic/gdx/graphics/g2d/BitmapFont.java" root0="SKIP_INSPECTION" />
|
||||
</component>
|
||||
<component name="KotlinCompilerWorkspaceSettings">
|
||||
<option name="preciseIncrementalEnabled" value="false" />
|
||||
</component>
|
||||
<component name="MarkdownSettingsMigration">
|
||||
<option name="stateVersion" value="1" />
|
||||
</component>
|
||||
<component name="ProjectColorInfo">{
|
||||
"customColor": "",
|
||||
"associatedIndex": 2
|
||||
}</component>
|
||||
<component name="ProjectId" id="1aVE5t6KObkWt36lb07GBy1GY1S" />
|
||||
<component name="ProjectViewState">
|
||||
<option name="hideEmptyMiddlePackages" value="true" />
|
||||
@@ -70,13 +78,31 @@
|
||||
</component>
|
||||
<component name="PropertiesComponent">{
|
||||
"keyToString": {
|
||||
"Kotlin.FontTestGDXKt.executor": "Debug",
|
||||
"Kotlin.TypewriterGDXKt.executor": "Debug",
|
||||
"RunOnceActivity.CodyAccountHistoryMigration": "true",
|
||||
"RunOnceActivity.CodyAccountsIdsRefresh": "true",
|
||||
"RunOnceActivity.CodyAssignOrphanedChatsToActiveAccount": "true",
|
||||
"RunOnceActivity.CodyConvertUrlToCodebaseName": "true",
|
||||
"RunOnceActivity.CodyHistoryLlmMigration": "true",
|
||||
"RunOnceActivity.CodyMigrateChatHistory-v2": "true",
|
||||
"RunOnceActivity.CodyProjectSettingsMigration": "true",
|
||||
"RunOnceActivity.OpenProjectViewOnStart": "true",
|
||||
"RunOnceActivity.ToggleCodyToolWindowAfterMigration": "true",
|
||||
"RunOnceActivity.git.unshallow": "true",
|
||||
"git-widget-placeholder": "master",
|
||||
"kotlin-language-version-configured": "true",
|
||||
"last_opened_file_path": "/home/torvald/Documents/Terrarum-sans-bitmap",
|
||||
"project.structure.last.edited": "Artifacts",
|
||||
"project.structure.proportion": "0.0",
|
||||
"project.structure.side.proportion": "0.0"
|
||||
"project.structure.last.edited": "Modules",
|
||||
"project.structure.proportion": "0.15",
|
||||
"project.structure.side.proportion": "0.20724516",
|
||||
"settings.editor.selected.configurable": "project.kotlinCompiler"
|
||||
}
|
||||
}</component>
|
||||
<component name="RecentsManager">
|
||||
<key name="CopyFile.RECENT_KEYS">
|
||||
<recent name="$PROJECT_DIR$" />
|
||||
</key>
|
||||
<key name="MoveFile.RECENT_KEYS">
|
||||
<recent name="C:\Users\minjaesong\Documents\Terrarum-sans-bitmap\" />
|
||||
<recent name="C:\Users\minjaesong\Documents\Terrarum-sans-bitmap" />
|
||||
@@ -90,6 +116,15 @@
|
||||
<option name="Make" enabled="true" />
|
||||
</method>
|
||||
</configuration>
|
||||
<configuration default="true" type="#org.jetbrains.idea.devkit.run.PluginConfigurationType">
|
||||
<module name="" />
|
||||
<option name="VM_PARAMETERS" value="-Xmx512m -Xms256m -XX:MaxPermSize=250m -ea" />
|
||||
<option name="PROGRAM_PARAMETERS" />
|
||||
<predefined_log_file enabled="true" id="idea.log" />
|
||||
<method v="2">
|
||||
<option name="Make" enabled="true" />
|
||||
</method>
|
||||
</configuration>
|
||||
<configuration default="true" type="executeSpecs" factoryName="Gauge Execution">
|
||||
<setting name="environment" value="" />
|
||||
<setting name="specsToExecute" value="" />
|
||||
@@ -148,15 +183,6 @@
|
||||
<option name="Make" enabled="true" />
|
||||
</method>
|
||||
</configuration>
|
||||
<configuration default="true" type="#org.jetbrains.idea.devkit.run.PluginConfigurationType">
|
||||
<module name="" />
|
||||
<option name="VM_PARAMETERS" value="-Xmx512m -Xms256m -XX:MaxPermSize=250m -ea" />
|
||||
<option name="PROGRAM_PARAMETERS" />
|
||||
<predefined_log_file enabled="true" id="idea.log" />
|
||||
<method v="2">
|
||||
<option name="Make" enabled="true" />
|
||||
</method>
|
||||
</configuration>
|
||||
<recent_temporary>
|
||||
<list>
|
||||
<item itemvalue="Kotlin.FontTestGDXKt" />
|
||||
@@ -176,6 +202,39 @@
|
||||
<option name="presentableId" value="Default" />
|
||||
<updated>1497950823354</updated>
|
||||
</task>
|
||||
<task id="LOCAL-00001" summary="Old hangul rendering fix">
|
||||
<option name="closed" value="true" />
|
||||
<created>1705647715000</created>
|
||||
<option name="number" value="00001" />
|
||||
<option name="presentableId" value="LOCAL-00001" />
|
||||
<option name="project" value="LOCAL" />
|
||||
<updated>1705647715000</updated>
|
||||
</task>
|
||||
<task id="LOCAL-00002" summary="fix: characters not on overriden charset would not print">
|
||||
<option name="closed" value="true" />
|
||||
<created>1726151824465</created>
|
||||
<option name="number" value="00002" />
|
||||
<option name="presentableId" value="LOCAL-00002" />
|
||||
<option name="project" value="LOCAL" />
|
||||
<updated>1726151824465</updated>
|
||||
</task>
|
||||
<task id="LOCAL-00003" summary="moving assets inside classpath">
|
||||
<option name="closed" value="true" />
|
||||
<created>1771460240293</created>
|
||||
<option name="number" value="00003" />
|
||||
<option name="presentableId" value="LOCAL-00003" />
|
||||
<option name="project" value="LOCAL" />
|
||||
<updated>1771460240293</updated>
|
||||
</task>
|
||||
<task id="LOCAL-00004" summary="why are you still looking for tga.gz">
|
||||
<option name="closed" value="true" />
|
||||
<created>1771551906182</created>
|
||||
<option name="number" value="00004" />
|
||||
<option name="presentableId" value="LOCAL-00004" />
|
||||
<option name="project" value="LOCAL" />
|
||||
<updated>1771551906182</updated>
|
||||
</task>
|
||||
<option name="localTasksCounter" value="5" />
|
||||
<servers />
|
||||
</component>
|
||||
<component name="TodoView">
|
||||
@@ -198,6 +257,13 @@
|
||||
</map>
|
||||
</option>
|
||||
</component>
|
||||
<component name="VcsManagerConfiguration">
|
||||
<MESSAGE value="Old hangul rendering fix" />
|
||||
<MESSAGE value="fix: characters not on overriden charset would not print" />
|
||||
<MESSAGE value="moving assets inside classpath" />
|
||||
<MESSAGE value="why are you still looking for tga.gz" />
|
||||
<option name="LAST_COMMIT_MESSAGE" value="why are you still looking for tga.gz" />
|
||||
</component>
|
||||
<component name="XSLT-Support.FileAssociations.UIState">
|
||||
<expand />
|
||||
<select />
|
||||
|
||||
@@ -1,13 +1,15 @@
|
||||
<?xml version="1.0" encoding="UTF-8"?>
|
||||
<module type="JAVA_MODULE" version="4">
|
||||
<component name="NewModuleRootManager" inherit-compiler-output="true">
|
||||
<component name="NewModuleRootManager">
|
||||
<output url="file://$MODULE_DIR$/out/production/BuildJAR_TerrarumSansBitmap" />
|
||||
<output-test url="file://$MODULE_DIR$/out/test/BuildJAR_TerrarumSansBitmap" />
|
||||
<exclude-output />
|
||||
<content url="file://$MODULE_DIR$">
|
||||
<sourceFolder url="file://$MODULE_DIR$/src" isTestSource="false" />
|
||||
</content>
|
||||
<orderEntry type="jdk" jdkName="1.8.0_242" jdkType="JavaSDK" />
|
||||
<orderEntry type="inheritedJdk" />
|
||||
<orderEntry type="sourceFolder" forTests="false" />
|
||||
<orderEntry type="library" name="KotlinJavaRuntime" level="project" />
|
||||
<orderEntry type="library" scope="PROVIDED" name="lib" level="project" />
|
||||
<orderEntry type="library" name="lib" level="project" />
|
||||
</component>
|
||||
</module>
|
||||
82
CLAUDE.md
Normal file
82
CLAUDE.md
Normal file
@@ -0,0 +1,82 @@
|
||||
# CLAUDE.md
|
||||
|
||||
This file provides guidance to Claude Code (claude.ai/code) when working with code in this repository.
|
||||
|
||||
## Development Commands
|
||||
|
||||
### Building the JAR
|
||||
The project uses IntelliJ IDEA project files (`.iml`) for building. Build the main library:
|
||||
- Main library JAR: `lib/TerrarumSansBitmap.jar`
|
||||
- Font test application JAR: `FontDemoGDX.jar`
|
||||
|
||||
### Testing Font Rendering
|
||||
Run the font test application:
|
||||
```bash
|
||||
java -jar FontDemoGDX.jar
|
||||
```
|
||||
The test application demonstrates font rendering with text from `demotext_unaligned.txt` and outputs to `demo.PNG`.
|
||||
|
||||
### Key Development Files
|
||||
- **Source code**: `src/net/torvald/terrarumsansbitmap/`
|
||||
- **Font assets**: `assets/` directory (TGA format with alpha channel)
|
||||
- **Test text**: `demotext.txt`, `demotext_unaligned.txt`, `testtext.txt`
|
||||
- **Demo output**: Generated PNG files for visual verification
|
||||
|
||||
## Architecture Overview
|
||||
|
||||
### Core Components
|
||||
|
||||
**TerrarumSansBitmap** (`src/net/torvald/terrarumsansbitmap/gdx/TerrarumSansBitmap.kt`)
|
||||
- Main font class extending LibGDX's BitmapFont
|
||||
- Handles font asset loading from TGA sprite sheets
|
||||
- Manages variable-width character rendering with complex glyph tagging system
|
||||
- Supports multiple writing systems (Latin, CJK, Cyrillic, etc.)
|
||||
|
||||
**MovableType** (`src/net/torvald/terrarumsansbitmap/MovableType.kt`)
|
||||
- Advanced typesetting engine with justified text layout
|
||||
- Implements line-breaking, hyphenation, and kerning
|
||||
- Supports multiple typesetting strategies (justified, ragged, centered)
|
||||
- Handles complex text shaping for international scripts
|
||||
|
||||
**GlyphProps** (`src/net/torvald/terrarumsansbitmap/GlyphProps.kt`)
|
||||
- Defines glyph properties including width, diacritics anchors, alignment
|
||||
- Manages kerning data and special rendering directives
|
||||
- Handles complex glyph tagging system for font behavior
|
||||
|
||||
### Font Asset System
|
||||
|
||||
**Glyph Encoding**
|
||||
- Font data stored in TGA sprite sheets with embedded metadata
|
||||
- Width encoded in binary dots on rightmost column
|
||||
- Complex tagging system for diacritics, kerning, and special behaviors
|
||||
- Variable-width sheets use `_variable` naming convention
|
||||
|
||||
**Character Support**
|
||||
- Latin scripts with full diacritics support
|
||||
- CJK ideographs (Chinese variant)
|
||||
- Korean Hangul with syllable composition
|
||||
- Cyrillic with Bulgarian/Serbian variants (requires control characters U+FFFC1, U+FFFC2)
|
||||
- Devanagari, Tamil with ligature support
|
||||
- Many other scripts (see assets directory)
|
||||
|
||||
**Typewriter Font**
|
||||
- Separate typewriter bitmap font in `src/net/torvald/terrarumtypewriterbitmap/`
|
||||
- Includes audio feedback system with typing sounds
|
||||
- Supports international QWERTY and Korean 3-set layouts
|
||||
|
||||
### Key Technical Details
|
||||
|
||||
**Color Coding System**
|
||||
- Uses Unicode private use area for color codes
|
||||
- Utility functions: `GameFontBase.toColorCode()` for ARGB4444 format
|
||||
- U+100000 disables color codes
|
||||
|
||||
**Korean Hangul Assembly**
|
||||
- Decomposes Unicode Hangul into jamo components
|
||||
- Assembles glyphs from initial/medial/final sprite pieces
|
||||
- Supports modern Hangul range (U+AC00-U+D7A3)
|
||||
|
||||
**Font Metrics**
|
||||
- Variable-width sheets parse glyph tags from sprite metadata
|
||||
- Fixed-width sheets: `cjkpunct` (10px), `kana`/`hangul_johab` (12px), `wenquanyi` (16px)
|
||||
- Diacritics positioning via anchor point system
|
||||
@@ -150,7 +150,7 @@ To implement those, this two extra code points are needed, which are provided in
|
||||
|
||||
For working examples, take a note at the bengali sprite sheet.
|
||||
|
||||
This tag can be used as a general "replace this with these" directive, as long as you're replacing it into two letters. This directive is exploited to construct dutch ligature "IJ" (U+0132 and U+0133), in the sheet LatinExtA.
|
||||
This tag might be exploited as a general "replace this with these" directive, as long as you're replacing it into two letters. Such construction is FORBIDDEN due to diacritics incompatibility. Use Compiler Directives for such purposes.
|
||||
|
||||
Also note that the font compiler will not "stack" these diacritics.
|
||||
|
||||
@@ -170,7 +170,7 @@ Keming Machine Tags define the rough shape of the glyph. Please read `keming_mac
|
||||
## Technical Limitations
|
||||
|
||||
- Each spritesheet is 4096x4096 maximum, which is a size of 4K Texture. However it is recommended to be smaller or equal to 1024x1024.
|
||||
- Glyphs exceeding 15px of width needs to be broken down with 2 or more characters. Wider sheets WILL NOT BE IMPLEMENTED, can't waste much pixels just for few superwide glyphs.
|
||||
- Glyphs exceeding 15px of width needs to be broken down with 2 or more characters, or use EXTRAWIDE spritesheets.
|
||||
- Due to how the compiler is coded, actual glyph must have alpha value of 255, the tags must have alpha values LESS THAN 255 (and obviously greater than zero). RGB plane of the TGA image doesn't do anything, keep it as #FFFFFF white.
|
||||
|
||||
## Implementation of the Korean writing system
|
||||
|
||||
@@ -5,9 +5,9 @@
|
||||
<content url="file://$MODULE_DIR$">
|
||||
<sourceFolder url="file://$MODULE_DIR$/src" isTestSource="false" />
|
||||
</content>
|
||||
<orderEntry type="jdk" jdkName="1.8.0_242" jdkType="JavaSDK" />
|
||||
<orderEntry type="inheritedJdk" />
|
||||
<orderEntry type="sourceFolder" forTests="false" />
|
||||
<orderEntry type="module" module-name="BuildJAR_TerrarumSansBitmap" />
|
||||
<orderEntry type="module" module-name="BuildJAR_TerrarumSansBitmap" scope="PROVIDED" />
|
||||
<orderEntry type="library" name="KotlinJavaRuntime" level="project" />
|
||||
<orderEntry type="library" name="lib" level="project" />
|
||||
</component>
|
||||
|
||||
@@ -45,7 +45,7 @@ class FontTestGDX : Game() {
|
||||
private lateinit var testtex: TextureRegion
|
||||
|
||||
override fun create() {
|
||||
font = TerrarumSansBitmap("./assets", debug = true, flipY = false, errorOnUnknownChar = false, shadowAlpha = 0.5f) // must test for two flipY cases
|
||||
font = TerrarumSansBitmap(debug = true, flipY = false, errorOnUnknownChar = false, shadowAlpha = 0.5f) // must test for two flipY cases
|
||||
// font.scale = 2
|
||||
// font.interchar = 1
|
||||
|
||||
|
||||
@@ -39,7 +39,6 @@ class TypewriterGDX(val width: Int, val height: Int, val cols: Int, val hmargin:
|
||||
|
||||
override fun create() {
|
||||
font = TerrarumTypewriterBitmap(
|
||||
"./assets/typewriter",
|
||||
StringReader(
|
||||
"""ko_kr_3set-390_typewriter,typewriter_ko_3set-390.tga,16
|
||||
|en_intl_qwerty_typewriter,typewriter_intl_qwerty.tga,0
|
||||
@@ -61,17 +60,17 @@ class TypewriterGDX(val width: Int, val height: Int, val cols: Int, val hmargin:
|
||||
inputStrober = InputStrober(this)
|
||||
|
||||
try {
|
||||
sndMovingkey = Gdx.audio.newSound(Gdx.files.internal("assets/typewriter/audio/movingkey.wav"))
|
||||
sndDeadkey = Gdx.audio.newSound(Gdx.files.internal("assets/typewriter/audio/deadkey.wav"))
|
||||
sndShiftin = Gdx.audio.newSound(Gdx.files.internal("assets/typewriter/audio/shiftin.wav"))
|
||||
sndShiftout = Gdx.audio.newSound(Gdx.files.internal("assets/typewriter/audio/shiftout.wav"))
|
||||
sndSpace = Gdx.audio.newSound(Gdx.files.internal("assets/typewriter/audio/space.wav"))
|
||||
sndMovingkey = Gdx.audio.newSound(Gdx.files.classpath("assets/typewriter/audio/movingkey.wav"))
|
||||
sndDeadkey = Gdx.audio.newSound(Gdx.files.classpath("assets/typewriter/audio/deadkey.wav"))
|
||||
sndShiftin = Gdx.audio.newSound(Gdx.files.classpath("assets/typewriter/audio/shiftin.wav"))
|
||||
sndShiftout = Gdx.audio.newSound(Gdx.files.classpath("assets/typewriter/audio/shiftout.wav"))
|
||||
sndSpace = Gdx.audio.newSound(Gdx.files.classpath("assets/typewriter/audio/space.wav"))
|
||||
|
||||
sndCRs = Array(6) {
|
||||
Gdx.audio.newSound(Gdx.files.internal("assets/typewriter/audio/cr$it.wav"))
|
||||
Gdx.audio.newSound(Gdx.files.classpath("assets/typewriter/audio/cr$it.wav"))
|
||||
}
|
||||
|
||||
sndLF = Gdx.audio.newSound(Gdx.files.internal("assets/typewriter/audio/crlf.wav"))
|
||||
sndLF = Gdx.audio.newSound(Gdx.files.classpath("assets/typewriter/audio/crlf.wav"))
|
||||
}
|
||||
catch (e: GdxRuntimeException) {
|
||||
e.printStackTrace()
|
||||
|
||||
67
LICENSE.md
67
LICENSE.md
@@ -1,19 +1,54 @@
|
||||
Copyright (c) 2017-2024 CuriousTorvald (minjaesong)
|
||||
Copyright (c) 2017-2026 CuriousTorvald (curioustorvald.com), with Reserved Font Name TERRARUM.
|
||||
|
||||
Permission is hereby granted, free of charge, to any person obtaining a copy
|
||||
of this software and associated documentation files (the "Software"), to deal
|
||||
in the Software without restriction, including without limitation the rights
|
||||
to use, copy, modify, merge, publish, distribute, sublicense, and/or sell
|
||||
copies of the Software, and to permit persons to whom the Software is
|
||||
furnished to do so, subject to the following conditions:
|
||||
Permission is hereby granted, free of charge, to any person obtaining a copy of this software and associated documentation files (the "Software"), to deal in the Software without restriction, including without limitation the rights to use, copy, modify, merge, publish, distribute, sublicense, and/or sell
|
||||
copies of the Software, and to permit persons to whom the Software is furnished to do so, subject to the following conditions:
|
||||
|
||||
The above copyright notice and this permission notice shall be included in all
|
||||
copies or substantial portions of the Software.
|
||||
The above copyright notice and this permission notice shall be included in all copies or substantial portions of the Software.
|
||||
|
||||
THE SOFTWARE IS PROVIDED "AS IS", WITHOUT WARRANTY OF ANY KIND, EXPRESS OR
|
||||
IMPLIED, INCLUDING BUT NOT LIMITED TO THE WARRANTIES OF MERCHANTABILITY,
|
||||
FITNESS FOR A PARTICULAR PURPOSE AND NONINFRINGEMENT. IN NO EVENT SHALL THE
|
||||
AUTHORS OR COPYRIGHT HOLDERS BE LIABLE FOR ANY CLAIM, DAMAGES OR OTHER
|
||||
LIABILITY, WHETHER IN AN ACTION OF CONTRACT, TORT OR OTHERWISE, ARISING FROM,
|
||||
OUT OF OR IN CONNECTION WITH THE SOFTWARE OR THE USE OR OTHER DEALINGS IN THE
|
||||
SOFTWARE.
|
||||
THE SOFTWARE IS PROVIDED "AS IS", WITHOUT WARRANTY OF ANY KIND, EXPRESS OR IMPLIED, INCLUDING BUT NOT LIMITED TO THE WARRANTIES OF MERCHANTABILITY, FITNESS FOR A PARTICULAR PURPOSE AND NONINFRINGEMENT. IN NO EVENT SHALL THE AUTHORS OR COPYRIGHT HOLDERS BE LIABLE FOR ANY CLAIM, DAMAGES OR OTHER LIABILITY, WHETHER IN AN ACTION OF CONTRACT, TORT OR OTHERWISE, ARISING FROM, OUT OF OR IN CONNECTION WITH THE SOFTWARE OR THE USE OR OTHER DEALINGS IN THE SOFTWARE.
|
||||
|
||||
|
||||
This Font Software is licensed under the SIL Open Font License, Version 1.1.
|
||||
This license is copied below, and is also available with a FAQ at:
|
||||
https://openfontlicense.org
|
||||
|
||||
-———————————————————————
|
||||
SIL OPEN FONT LICENSE Version 1.1 - 26 February 2007
|
||||
-———————————————————————
|
||||
|
||||
PREAMBLE
|
||||
|
||||
The goals of the Open Font License (OFL) are to stimulate worldwide development of collaborative font projects, to support the font creation efforts of academic and linguistic communities, and to provide a free and open framework in which fonts may be shared and improved in partnership with others.
|
||||
|
||||
The OFL allows the licensed fonts to be used, studied, modified and redistributed freely as long as they are not sold by themselves. The fonts, including any derivative works, can be bundled, embedded, redistributed and/or sold with any software provided that any reserved names are not used by derivative works. The fonts and derivatives, however, cannot be released under any other type of license. The requirement for fonts to remain under this license does not apply to any document created using the fonts or their derivatives.
|
||||
DEFINITIONS
|
||||
|
||||
“Font Software” refers to the set of files released by the Copyright Holder(s) under this license and clearly marked as such. This may include source files, build scripts and documentation.
|
||||
|
||||
“Reserved Font Name” refers to any names specified as such after the copyright statement(s).
|
||||
|
||||
“Original Version” refers to the collection of Font Software components as distributed by the Copyright Holder(s).
|
||||
|
||||
“Modified Version” refers to any derivative made by adding to, deleting, or substituting – in part or in whole – any of the components of the Original Version, by changing formats or by porting the Font Software to a new environment.
|
||||
|
||||
“Author” refers to any designer, engineer, programmer, technical writer or other person who contributed to the Font Software.
|
||||
PERMISSION & CONDITIONS
|
||||
|
||||
Permission is hereby granted, free of charge, to any person obtaining a copy of the Font Software, to use, study, copy, merge, embed, modify, redistribute, and sell modified and unmodified copies of the Font Software, subject to the following conditions:
|
||||
|
||||
Neither the Font Software nor any of its individual components, in Original or Modified Versions, may be sold by itself.
|
||||
|
||||
Original or Modified Versions of the Font Software may be bundled, redistributed and/or sold with any software, provided that each copy contains the above copyright notice and this license. These can be included either as stand-alone text files, human-readable headers or in the appropriate machine-readable metadata fields within text or binary files as long as those fields can be easily viewed by the user.
|
||||
|
||||
No Modified Version of the Font Software may use the Reserved Font Name(s) unless explicit written permission is granted by the corresponding Copyright Holder. This restriction only applies to the primary font name as presented to the users.
|
||||
|
||||
The name(s) of the Copyright Holder(s) or the Author(s) of the Font Software shall not be used to promote, endorse or advertise any Modified Version, except to acknowledge the contribution(s) of the Copyright Holder(s) and the Author(s) or with their explicit written permission.
|
||||
|
||||
The Font Software, modified or unmodified, in part or in whole, must be distributed entirely under this license, and must not be distributed under any other license. The requirement for fonts to remain under this license does not apply to any document created using the Font Software.
|
||||
|
||||
TERMINATION
|
||||
|
||||
This license becomes null and void if any of the above conditions are not met.
|
||||
DISCLAIMER
|
||||
|
||||
THE FONT SOFTWARE IS PROVIDED “AS IS”, WITHOUT WARRANTY OF ANY KIND, EXPRESS OR IMPLIED, INCLUDING BUT NOT LIMITED TO ANY WARRANTIES OF MERCHANTABILITY, FITNESS FOR A PARTICULAR PURPOSE AND NONINFRINGEMENT OF COPYRIGHT, PATENT, TRADEMARK, OR OTHER RIGHT. IN NO EVENT SHALL THE COPYRIGHT HOLDER BE LIABLE FOR ANY CLAIM, DAMAGES OR OTHER LIABILITY, INCLUDING ANY GENERAL, SPECIAL, INDIRECT, INCIDENTAL, OR CONSEQUENTIAL DAMAGES, WHETHER IN AN ACTION OF CONTRACT, TORT OR OTHERWISE, ARISING FROM, OUT OF THE USE OR INABILITY TO USE THE FONT SOFTWARE OR FROM OTHER DEALINGS IN THE FONT SOFTWARE.
|
||||
328
OTFbuild/CLAUDE.md
Normal file
328
OTFbuild/CLAUDE.md
Normal file
@@ -0,0 +1,328 @@
|
||||
# OTFbuild
|
||||
|
||||
Python toolchain that builds an OpenType (CFF) and Web Open Font (WOFF2) font from the TGA sprite sheets used by the bitmap font engine.
|
||||
|
||||
## Building
|
||||
|
||||
```bash
|
||||
# builds both OTF and WOFF2
|
||||
make all
|
||||
```
|
||||
|
||||
## Debugging with HarfBuzz
|
||||
|
||||
Install `uharfbuzz` for shaping tests:
|
||||
|
||||
```bash
|
||||
pip install uharfbuzz
|
||||
```
|
||||
|
||||
Shape text and inspect glyph substitutions, advances, and positioning:
|
||||
|
||||
```python
|
||||
import uharfbuzz as hb
|
||||
from fontTools.ttLib import TTFont
|
||||
|
||||
with open('OTFbuild/TerrarumSansBitmap.otf', 'rb') as f:
|
||||
font_data = f.read()
|
||||
|
||||
blob = hb.Blob(font_data)
|
||||
face = hb.Face(blob)
|
||||
font = hb.Font(face)
|
||||
|
||||
text = "ऐतिहासिक"
|
||||
buf = hb.Buffer()
|
||||
buf.add_str(text)
|
||||
buf.guess_segment_properties()
|
||||
hb.shape(font, buf)
|
||||
|
||||
ttfont = TTFont('OTFbuild/TerrarumSansBitmap.otf')
|
||||
glyph_order = ttfont.getGlyphOrder()
|
||||
|
||||
for info, pos in zip(buf.glyph_infos, buf.glyph_positions):
|
||||
name = glyph_order[info.codepoint]
|
||||
print(f" {name} advance=({pos.x_advance},{pos.y_advance}) cluster={info.cluster}")
|
||||
```
|
||||
|
||||
Key things to check:
|
||||
- **advance=(0,0)** on a visible character means the glyph is zero-width (likely missing outline or failed GSUB substitution)
|
||||
- **glyph name starts with `uF0`** means GSUB substituted to an internal PUA form (expected for Devanagari consonants, Hangul jamo variants, etc.)
|
||||
- **cluster** groups glyphs that originated from the same input character(s)
|
||||
|
||||
### Inspecting GSUB tables
|
||||
|
||||
```python
|
||||
from fontTools.ttLib import TTFont
|
||||
|
||||
font = TTFont('OTFbuild/TerrarumSansBitmap.otf')
|
||||
gsub = font['GSUB']
|
||||
|
||||
# List scripts and their features
|
||||
for sr in gsub.table.ScriptList.ScriptRecord:
|
||||
tag = sr.ScriptTag
|
||||
if sr.Script.DefaultLangSys:
|
||||
for idx in sr.Script.DefaultLangSys.FeatureIndex:
|
||||
fr = gsub.table.FeatureList.FeatureRecord[idx]
|
||||
print(f" {tag}/{fr.FeatureTag}: lookups={fr.Feature.LookupListIndex}")
|
||||
|
||||
# Inspect a specific lookup's substitution mappings
|
||||
lookup = gsub.table.LookupList.Lookup[18] # e.g. DevaConsonantMap
|
||||
for st in lookup.SubTable:
|
||||
for src, dst in st.mapping.items():
|
||||
print(f" {src} -> {dst}")
|
||||
```
|
||||
|
||||
### Checking glyph outlines and metrics
|
||||
|
||||
```python
|
||||
font = TTFont('OTFbuild/TerrarumSansBitmap.otf')
|
||||
hmtx = font['hmtx']
|
||||
cff = font['CFF ']
|
||||
|
||||
name = 'uni0915' # Devanagari KA
|
||||
w, lsb = hmtx[name]
|
||||
cs = cff.cff.topDictIndex[0].CharStrings[name]
|
||||
cs.decompile()
|
||||
has_outlines = len(cs.program) > 2 # more than just width + endchar
|
||||
print(f"{name}: advance={w}, has_outlines={has_outlines}")
|
||||
```
|
||||
|
||||
## Architecture
|
||||
|
||||
### Build pipeline (`font_builder.py`)
|
||||
|
||||
1. **Parse sheets** — `glyph_parser.py` reads each TGA sprite sheet, extracts per-glyph bitmaps and tag-column metadata (width, alignment, diacritics anchors, kerning data, directives)
|
||||
2. **Compose Hangul** — `hangul.py` assembles 11,172 precomposed Hangul syllables from jamo components and stores jamo variants in PUA for GSUB
|
||||
3. **Populate Devanagari** — consonants U+0915-0939 have width=0 in the sprite sheet (the Kotlin engine normalises them to PUA forms); the builder copies PUA glyph data back to the Unicode positions so they render without GSUB
|
||||
4. **Expand replacewith** — glyphs with the `replacewith` directive (opcode 0x80-0x87) are collected for GSUB multiple substitution (e.g. U+0910 -> U+090F U+0947)
|
||||
5. **Build glyph order and cmap** — PUA internal forms (0xF0000-0xF0FFF) get glyphs but no cmap entries
|
||||
6. **Trace bitmaps** — `bitmap_tracer.py` converts 1-bit bitmaps to CFF rectangle contours (50 units/pixel)
|
||||
7. **Set metrics** — hmtx, hhea, OS/2, head, name, post tables
|
||||
8. **OpenType features** — `opentype_features.py` generates feaLib code, compiled via `fontTools.feaLib`
|
||||
9. **Bitmap strike** — optional EBDT/EBLC at 20ppem via TTX import
|
||||
|
||||
### Module overview
|
||||
|
||||
| Module | Purpose |
|
||||
|---|---|
|
||||
| `build_font.py` | CLI entry point |
|
||||
| `font_builder.py` | Orchestrates the build pipeline |
|
||||
| `sheet_config.py` | Sheet indices, code ranges, index functions, metric constants, Hangul/Devanagari/Tamil/Sundanese constants |
|
||||
| `glyph_parser.py` | TGA sprite sheet parsing; extracts bitmaps and tag-column properties |
|
||||
| `tga_reader.py` | Low-level TGA image reader |
|
||||
| `bitmap_tracer.py` | Converts 1-bit bitmaps to CFF outlines (rectangle merging) |
|
||||
| `opentype_features.py` | Generates GSUB/GPOS feature code for feaLib |
|
||||
| `keming_machine.py` | Generates kerning pairs from glyph kern masks |
|
||||
| `hangul.py` | Hangul syllable composition and jamo GSUB data |
|
||||
| `otf2woff2.py` | OTF to WOFF2 wrapper |
|
||||
|
||||
### OpenType features generated (`opentype_features.py`)
|
||||
|
||||
- **ccmp** — replacewith expansions (DFLT); consonant-to-PUA mapping + vowel decompositions + anusvara upper (dev2/deva); vowel decompositions (tml2)
|
||||
- **kern** — pair positioning from `keming_machine.py`
|
||||
- **liga** — Latin ligatures (ff, fi, fl, ffi, ffl, st) and Armenian ligatures
|
||||
- **locl** — Bulgarian/Serbian Cyrillic alternates; Devanagari consonant-to-PUA mapping + vowel decompositions + anusvara upper (dev2/deva, duplicated from ccmp for DirectWrite compatibility)
|
||||
- **nukt, akhn, half, blwf, cjct, pres, blws, rphf, abvs, psts, calt** — Devanagari complex script shaping (all under both `script dev2` and `script deva`)
|
||||
- **pres** (tml2) — Tamil consonant+vowel ligatures
|
||||
- **pres** (sund) — Sundanese diacritic combinations
|
||||
- **ljmo, vjmo, tjmo** — Hangul jamo positional variants
|
||||
- **mark** — GPOS mark-to-base diacritics positioning
|
||||
- **mkmk** — GPOS mark-to-mark diacritics stacking (successive marks shift by H_DIACRITICS)
|
||||
|
||||
### Devanagari PUA mapping
|
||||
|
||||
The bitmap font engine normalises Devanagari consonants to internal PUA forms before rendering. The OTF builder mirrors this:
|
||||
|
||||
| Unicode range | PUA range | Purpose |
|
||||
|---|---|---|
|
||||
| U+0915-0939 | 0xF0140-0xF0164 | Base consonants |
|
||||
| U+0915-0939 +48 | 0xF0170-0xF0194 | Nukta forms (consonant + U+093C) |
|
||||
| U+0915-0939 +240 | 0xF0230-0xF0254 | Half forms (consonant + virama) |
|
||||
| U+0915-0939 +480 | 0xF0320-0xF0404 | RA-appended forms (consonant + virama + RA) |
|
||||
| U+0915-0939 +720 | 0xF0410-0xF04F4 | RA-appended half forms (consonant + virama + RA + virama) |
|
||||
|
||||
Mapping formula: `to_deva_internal(c)` = `c - 0x0915 + 0xF0140` for U+0915-0939.
|
||||
|
||||
### Script tag gotcha
|
||||
|
||||
When a script-specific feature exists in GSUB (e.g. `ccmp` under `dev2`), HarfBuzz uses **only** the script-specific lookups and does **not** fall back to the DFLT script's lookups for that feature. Any substitutions needed for a specific script must be registered under that script's tag.
|
||||
|
||||
### languagesystem and language records
|
||||
|
||||
The `languagesystem` declarations in the preamble control which script/language records are created in the font tables. Key rules:
|
||||
|
||||
- `languagesystem` declarations must be at the **top level** of the feature file, not inside any `feature` block. Putting them inside `feature aalt { }` is invalid feaLib syntax and causes silent compilation failure.
|
||||
- When a language-specific record exists (e.g. `dev2/MAR` from `languagesystem dev2 MAR;`), features registered under `script dev2;` only populate `dev2/dflt` — they are **not** automatically copied to `dev2/MAR`. The language record inherits only from DFLT, resulting in incomplete feature sets.
|
||||
- Only declare language-specific records when you have `locl` or other language-differentiated features. Otherwise, use only `languagesystem <script> dflt;` to avoid partial feature inheritance that breaks DirectWrite and CoreText.
|
||||
|
||||
### Inspecting feature registration per script
|
||||
|
||||
To verify that features are correctly registered under each script:
|
||||
|
||||
```python
|
||||
from fontTools.ttLib import TTFont
|
||||
|
||||
font = TTFont('OTFbuild/TerrarumSansBitmap.otf')
|
||||
gsub = font['GSUB']
|
||||
|
||||
for sr in gsub.table.ScriptList.ScriptRecord:
|
||||
tag = sr.ScriptTag
|
||||
if sr.Script.DefaultLangSys:
|
||||
feats = []
|
||||
for idx in sr.Script.DefaultLangSys.FeatureIndex:
|
||||
fr = gsub.table.FeatureList.FeatureRecord[idx]
|
||||
feats.append(fr.FeatureTag)
|
||||
print(f"{tag}/dflt: {' '.join(sorted(set(feats)))}")
|
||||
for lsr in (sr.Script.LangSysRecord or []):
|
||||
feats = []
|
||||
for idx in lsr.LangSys.FeatureIndex:
|
||||
fr = gsub.table.FeatureList.FeatureRecord[idx]
|
||||
feats.append(fr.FeatureTag)
|
||||
print(f"{tag}/{lsr.LangSysTag}: {' '.join(sorted(set(feats)))}")
|
||||
```
|
||||
|
||||
Expected output for dev2: `dev2/dflt: abvs akhn blwf blws calt ccmp cjct half liga locl nukt pres psts rphf`. If language-specific records (e.g. `dev2/MAR`) appear with only `ccmp liga`, the language records have incomplete feature inheritance — remove the corresponding `languagesystem` declaration.
|
||||
|
||||
### Debugging feature compilation failures
|
||||
|
||||
The build writes `debugout_features.fea` with the raw feature code before compilation. When compilation fails, inspect this file to find syntax errors. Common issues:
|
||||
|
||||
- **`languagesystem` inside a feature block** — must be at the top level
|
||||
- **Named lookup defined inside a feature block** — applies unconditionally to all input. Define the lookup outside the feature block and reference it via contextual rules inside.
|
||||
- **Glyph not in font** — a substitution references a glyph name that doesn't exist in the font's glyph order (e.g. a control character was removed)
|
||||
|
||||
### HarfBuzz Indic shaper (dev2) feature order
|
||||
|
||||
Understanding feature application order is critical for Devanagari debugging:
|
||||
|
||||
1. **Pre-reordering** (Unicode order): `ccmp`
|
||||
2. **Reordering**: HarfBuzz reorders pre-base matras (e.g. I-matra U+093F moves before the consonant)
|
||||
3. **Post-reordering**: `nukt` → `akhn` → `rphf` → `half` → `blwf` → `cjct` → `pres` → `abvs` → `blws` → `psts` → `haln` → `calt`
|
||||
4. **GPOS**: `kern` → `mark`/`abvm` → `mkmk`
|
||||
|
||||
Implication: GSUB rules that need to match pre-base matras adjacent to post-base marks (e.g. anusvara substitution triggered by I-matra) must go in `ccmp`, not `psts`, because reordering separates them.
|
||||
|
||||
### Cross-platform shaper differences (DirectWrite, CoreText, HarfBuzz)
|
||||
|
||||
The three major shapers behave differently for Devanagari. The font registers all Devanagari features under **both** `dev2` (new Indic) and `deva` (old Indic) script tags. HarfBuzz and DirectWrite use `dev2`; CoreText uses `deva`.
|
||||
|
||||
#### Script tag selection
|
||||
|
||||
| Shaper | Script tag used | Indic model |
|
||||
|---|---|---|
|
||||
| HarfBuzz | `dev2` | New Indic (ot-indic2) |
|
||||
| DirectWrite | `dev2` | New Indic |
|
||||
| CoreText | `deva` | Old Indic |
|
||||
|
||||
Both tags must exist, and all GSUB/GPOS features must be registered under both, otherwise CoreText silently breaks.
|
||||
|
||||
#### Feature order differences
|
||||
|
||||
**HarfBuzz (dev2, reference implementation)**:
|
||||
1. Pre-reordering: `locl` → `ccmp`
|
||||
2. Reordering (I-matra moves before consonant, reph moves to end)
|
||||
3. Post-reordering: `nukt` → `akhn` → `rphf` → `half` → `blwf` → `cjct` → `pres` → `abvs` → `blws` → `psts` → `haln` → `calt`
|
||||
4. GPOS: `kern` → `abvm` → `blwm`
|
||||
|
||||
**DirectWrite (dev2)**:
|
||||
- `locl` → `nukt` → `akhn` → `rphf` → `rkrf` → `blwf` → `half` → `vatu` → `cjct` → `pres` → `abvs` → `blws` → `psts` → `haln` → `calt`
|
||||
- GPOS: `kern` → `dist` → `abvm` → `blwm`
|
||||
- **Does NOT apply `ccmp`** for the dev2 script. All lookups that must run before `nukt` (e.g. consonant-to-PUA mapping, anusvara upper) must be registered under `locl` instead.
|
||||
|
||||
**CoreText (deva)**:
|
||||
- Applies `locl` and `ccmp`, but may apply `ccmp` **after** reordering (unlike HarfBuzz).
|
||||
- Post-reordering features same as above: `nukt` → `akhn` → `rphf` → ... → `abvs` → ... → `psts`
|
||||
- GPOS: `kern` → `abvm` (+ `mark`/`mkmk` if registered under `deva`)
|
||||
|
||||
#### Key behavioural differences
|
||||
|
||||
**1. ccmp timing (CoreText vs HarfBuzz)**
|
||||
|
||||
HarfBuzz applies `ccmp` in Unicode order (before reordering). CoreText may apply it after reordering. This breaks adjacency-based rules:
|
||||
|
||||
```
|
||||
# In ccmp — works on HarfBuzz (Unicode order: C + matra + anusvara):
|
||||
sub uni093F uni0902' lookup AnusvaraUpper; # I-matra + anusvara
|
||||
|
||||
# After reordering on CoreText: I-matra + [consonants] + anusvara
|
||||
# The I-matra and anusvara are no longer adjacent → rule fails
|
||||
```
|
||||
|
||||
**Fix**: duplicate these rules in `abvs` (post-reordering) with wildcard gaps:
|
||||
```
|
||||
sub uni093F @devaAny uni0902' lookup AnusvaraUpper;
|
||||
sub uni093F @devaAny @devaAny uni0902' lookup AnusvaraUpper;
|
||||
```
|
||||
|
||||
**2. Reph eligibility testing**
|
||||
|
||||
| Shaper | Method |
|
||||
|---|---|
|
||||
| HarfBuzz | Pattern-based (RA + halant + consonant at syllable start) |
|
||||
| DirectWrite | `would_substitute([RA, virama], rphf)` with **Unicode** codepoints |
|
||||
| CoreText | `would_substitute()` with Unicode codepoints (same as DW) |
|
||||
|
||||
The `rphf` feature must include a rule with the Unicode form of RA (`uni0930`), not just the PUA form. Otherwise DW and CT won't detect reph.
|
||||
|
||||
**3. Within-lookup glyph visibility (CoreText)**
|
||||
|
||||
In OpenType, a single lookup processes the glyph string left-to-right. Per spec, a substitution at position N should be visible when the lookup reaches position N+1. CoreText appears to **not** propagate substitutions within a single lookup pass to subsequent positions' backtrack context.
|
||||
|
||||
Example: two rules in one anonymous lookup:
|
||||
```
|
||||
sub @trigger uF010C' lookup ComplexReph; # rule at pos N: uF010C → uF010D
|
||||
sub uF010D uF016C' lookup AnusvaraLower; # rule at pos N+1: needs uF010D in backtrack
|
||||
```
|
||||
|
||||
On HarfBuzz/DirectWrite, rule 2 sees the updated `uF010D` at position N. On CoreText, it still sees the original `uF010C` → rule 2 fails to match.
|
||||
|
||||
**Fix**: split into separate **named lookups** so each runs as an independent pass:
|
||||
```
|
||||
lookup AbvsPass1 {
|
||||
sub @trigger uF010C' lookup ComplexReph;
|
||||
} AbvsPass1;
|
||||
lookup AbvsPass2 {
|
||||
sub uF010D uF016C' lookup AnusvaraLower;
|
||||
} AbvsPass2;
|
||||
feature abvs {
|
||||
script dev2; lookup AbvsPass1; lookup AbvsPass2;
|
||||
script deva; lookup AbvsPass1; lookup AbvsPass2;
|
||||
} abvs;
|
||||
```
|
||||
|
||||
**4. GPOS mark stacking heuristics**
|
||||
|
||||
When two marks share the same base without MarkToMark, each shaper applies different internal Y adjustments:
|
||||
|
||||
| Shaper | Internal Y shift |
|
||||
|---|---|
|
||||
| HarfBuzz | 0 (no heuristic) |
|
||||
| DirectWrite | -100 |
|
||||
| CoreText | -200 |
|
||||
|
||||
No single GPOS Y value satisfies all three. **Fix**: use explicit MarkToMark positioning (e.g. `AnusvaraToComplexReph`) which suppresses shaper heuristics and gives consistent results across all three.
|
||||
|
||||
**5. GPOS double-application with dev2+deva**
|
||||
|
||||
When both script tags exist, CoreText/DirectWrite may merge lookup lists from both scripts. Inline (anonymous) GPOS rules create separate lookups per script → cumulative positioning doubles. **Fix**: use **named lookups** for all GPOS contextual positioning so both scripts reference the same lookup index.
|
||||
|
||||
**6. mark/mkmk feature scoping**
|
||||
|
||||
The `mark` and `mkmk` features are registered under `deva` (for CoreText) but **not** `dev2`. Under `dev2`, all mark positioning goes through `abvm` instead. This prevents double-application on HarfBuzz/DirectWrite where `abvm` already contains the same mark/mkmk lookups.
|
||||
|
||||
```
|
||||
# GPOS features per script:
|
||||
# dev2/dflt: abvm kern
|
||||
# deva/dflt: abvm kern mark mkmk
|
||||
```
|
||||
|
||||
#### Practical rules
|
||||
|
||||
1. **Standalone lookups**: define all substitution/positioning lookups (e.g. `DevaConsonantMap`, `DevaVowelDecomp`, `ComplexReph`) **outside** any feature block, then reference from both `locl`/`ccmp` and script-specific features.
|
||||
2. **locl mirrors ccmp** for Devanagari: DirectWrite skips `ccmp`, so anything that must run early (consonant mapping, anusvara upper, vowel decomposition) must also be in `locl`.
|
||||
3. **abvs post-reordering fallbacks**: rules that depend on matra+anusvara adjacency (broken by reordering on CoreText) need wildcard-gap variants in `abvs`.
|
||||
4. **Separate lookup passes**: if rule B's backtrack context depends on rule A's output at an adjacent position, put them in separate named lookups. CoreText may not propagate within-pass substitutions.
|
||||
5. **Named GPOS lookups**: all contextual GPOS rules must use named lookups to avoid double-application across dev2/deva.
|
||||
6. **MarkToMark for multi-mark stacking**: never rely on shaper heuristics for positioning multiple marks on the same base — always provide explicit MarkToMark.
|
||||
|
||||
Source: [Microsoft Devanagari shaping spec](https://learn.microsoft.com/en-us/typography/script-development/devanagari)
|
||||
19
OTFbuild/Makefile
Normal file
19
OTFbuild/Makefile
Normal file
@@ -0,0 +1,19 @@
|
||||
PYTHON ?= python3
|
||||
ASSETS ?= ../src/assets
|
||||
OTF = TerrarumSansBitmap.otf
|
||||
WOFF2 = TerrarumSansBitmap.woff2
|
||||
|
||||
all: $(OTF) $(WOFF2)
|
||||
|
||||
$(OTF): $(wildcard $(ASSETS)/*.tga) build_font.py font_builder.py glyph_parser.py \
|
||||
bitmap_tracer.py tga_reader.py keming_machine.py hangul.py sheet_config.py \
|
||||
opentype_features.py
|
||||
$(PYTHON) build_font.py $(ASSETS) -o $@
|
||||
|
||||
$(WOFF2): $(OTF) otf2woff2.py
|
||||
$(PYTHON) otf2woff2.py $< $@
|
||||
|
||||
clean:
|
||||
rm -f $(OTF) $(WOFF2)
|
||||
|
||||
.PHONY: all clean
|
||||
97
OTFbuild/bitmap_tracer.py
Normal file
97
OTFbuild/bitmap_tracer.py
Normal file
@@ -0,0 +1,97 @@
|
||||
"""
|
||||
Convert 1-bit bitmap arrays to TrueType quadratic outlines.
|
||||
|
||||
Each set pixel becomes part of a rectangle contour drawn clockwise.
|
||||
Adjacent identical horizontal runs are merged vertically into rectangles.
|
||||
|
||||
Scale: x_left = col * SCALE, y_top = (BASELINE_ROW - row) * SCALE
|
||||
where BASELINE_ROW = 16 (ascent in pixels).
|
||||
"""
|
||||
|
||||
from typing import Dict, List, Tuple
|
||||
|
||||
import sheet_config as SC
|
||||
|
||||
SCALE = SC.SCALE
|
||||
BASELINE_ROW = 16 # pixels from top to baseline
|
||||
|
||||
|
||||
def trace_bitmap(bitmap, glyph_width_px):
|
||||
"""
|
||||
Convert a bitmap to a list of rectangle contours.
|
||||
|
||||
Each rectangle is ((x0, y0), (x1, y1)) in font units, where:
|
||||
- (x0, y0) is bottom-left
|
||||
- (x1, y1) is top-right
|
||||
|
||||
Returns list of (x0, y0, x1, y1) tuples representing rectangles.
|
||||
"""
|
||||
if not bitmap or not bitmap[0]:
|
||||
return []
|
||||
|
||||
h = len(bitmap)
|
||||
w = len(bitmap[0])
|
||||
|
||||
# Step 1: Find horizontal runs per row
|
||||
runs = [] # list of (row, col_start, col_end)
|
||||
for row in range(h):
|
||||
col = 0
|
||||
while col < w:
|
||||
if bitmap[row][col]:
|
||||
start = col
|
||||
while col < w and bitmap[row][col]:
|
||||
col += 1
|
||||
runs.append((row, start, col))
|
||||
else:
|
||||
col += 1
|
||||
|
||||
# Step 2: Merge vertically adjacent identical runs into rectangles
|
||||
rects = [] # (row_start, row_end, col_start, col_end)
|
||||
used = [False] * len(runs)
|
||||
|
||||
for i, (row, cs, ce) in enumerate(runs):
|
||||
if used[i]:
|
||||
continue
|
||||
# Try to extend this run downward
|
||||
row_end = row + 1
|
||||
j = i + 1
|
||||
while j < len(runs):
|
||||
r2, cs2, ce2 = runs[j]
|
||||
if r2 > row_end:
|
||||
break
|
||||
if r2 == row_end and cs2 == cs and ce2 == ce and not used[j]:
|
||||
used[j] = True
|
||||
row_end = r2 + 1
|
||||
j += 1
|
||||
rects.append((row, row_end, cs, ce))
|
||||
|
||||
# Step 3: Convert to font coordinates
|
||||
contours = []
|
||||
for row_start, row_end, col_start, col_end in rects:
|
||||
x0 = col_start * SCALE
|
||||
x1 = col_end * SCALE
|
||||
y_top = (BASELINE_ROW - row_start) * SCALE
|
||||
y_bottom = (BASELINE_ROW - row_end) * SCALE
|
||||
contours.append((x0, y_bottom, x1, y_top))
|
||||
|
||||
return contours
|
||||
|
||||
|
||||
def draw_glyph_to_pen(contours, pen, x_offset=0, y_offset=0):
|
||||
"""
|
||||
Draw rectangle contours to a TTGlyphPen or similar pen.
|
||||
Each rectangle is drawn as a clockwise closed contour (4 on-curve points).
|
||||
|
||||
x_offset/y_offset shift all contours (used for alignment positioning).
|
||||
"""
|
||||
for x0, y0, x1, y1 in contours:
|
||||
ax0 = x0 + x_offset
|
||||
ax1 = x1 + x_offset
|
||||
ay0 = y0 + y_offset
|
||||
ay1 = y1 + y_offset
|
||||
# Clockwise: bottom-left -> top-left -> top-right -> bottom-right
|
||||
pen.moveTo((ax0, ay0))
|
||||
pen.lineTo((ax0, ay1))
|
||||
pen.lineTo((ax1, ay1))
|
||||
pen.lineTo((ax1, ay0))
|
||||
pen.closePath()
|
||||
93
OTFbuild/build_font.py
Normal file
93
OTFbuild/build_font.py
Normal file
@@ -0,0 +1,93 @@
|
||||
#!/usr/bin/env python3
|
||||
"""
|
||||
Terrarum Sans Bitmap OTF Builder v2 — Python + fonttools
|
||||
|
||||
Builds a TTF font with both vector-traced outlines (TrueType glyf)
|
||||
and embedded bitmap strike (EBDT/EBLC) from TGA sprite sheets.
|
||||
|
||||
Usage:
|
||||
python3 OTFbuild/build_font.py src/assets -o OTFbuild/TerrarumSansBitmap.otf
|
||||
|
||||
Options:
|
||||
--no-bitmap Skip EBDT/EBLC bitmap strike
|
||||
--no-features Skip GSUB/GPOS OpenType features
|
||||
"""
|
||||
|
||||
import argparse
|
||||
import sys
|
||||
import os
|
||||
|
||||
# Add OTFbuild dir to path for imports
|
||||
sys.path.insert(0, os.path.dirname(os.path.abspath(__file__)))
|
||||
|
||||
from font_builder import build_font
|
||||
|
||||
|
||||
def main():
|
||||
parser = argparse.ArgumentParser(
|
||||
description="Build Terrarum Sans Bitmap TTF from TGA sprite sheets"
|
||||
)
|
||||
parser.add_argument(
|
||||
"assets_dir",
|
||||
help="Path to assets directory containing TGA sprite sheets"
|
||||
)
|
||||
parser.add_argument(
|
||||
"-o", "--output",
|
||||
default="OTFbuild/TerrarumSansBitmap.otf",
|
||||
help="Output OTF file path (default: OTFbuild/TerrarumSansBitmap.otf)"
|
||||
)
|
||||
parser.add_argument(
|
||||
"--no-bitmap",
|
||||
action="store_true",
|
||||
help="Skip EBDT/EBLC bitmap strike"
|
||||
)
|
||||
parser.add_argument(
|
||||
"--no-features",
|
||||
action="store_true",
|
||||
help="Skip GSUB/GPOS OpenType features"
|
||||
)
|
||||
|
||||
args = parser.parse_args()
|
||||
|
||||
if not os.path.isdir(args.assets_dir):
|
||||
print(f"Error: assets directory not found: {args.assets_dir}", file=sys.stderr)
|
||||
sys.exit(1)
|
||||
|
||||
# Ensure output directory exists
|
||||
output_dir = os.path.dirname(args.output)
|
||||
if output_dir:
|
||||
os.makedirs(output_dir, exist_ok=True)
|
||||
|
||||
print(f"Terrarum Sans Bitmap OTF Builder v2")
|
||||
print(f" Assets: {args.assets_dir}")
|
||||
print(f" Output: {args.output}")
|
||||
print()
|
||||
|
||||
build_font(
|
||||
assets_dir=args.assets_dir,
|
||||
output_path=args.output,
|
||||
no_bitmap=args.no_bitmap,
|
||||
no_features=args.no_features,
|
||||
)
|
||||
|
||||
# Run OpenType Sanitizer to catch issues browsers would reject
|
||||
try:
|
||||
import ots
|
||||
print("\nRunning OpenType Sanitizer...")
|
||||
result = ots.sanitize(args.output, capture_output=True)
|
||||
if result.returncode == 0:
|
||||
print(" OTS: passed")
|
||||
else:
|
||||
print(f" OTS: FAILED (exit code {result.returncode})", file=sys.stderr)
|
||||
if result.stderr:
|
||||
for line in result.stderr.decode().strip().splitlines():
|
||||
print(f" {line}", file=sys.stderr)
|
||||
sys.exit(1)
|
||||
except ImportError:
|
||||
print("\nWarning: opentype-sanitizer not installed, skipping OTS validation",
|
||||
file=sys.stderr)
|
||||
print(" Install with: pip install opentype-sanitizer", file=sys.stderr)
|
||||
|
||||
|
||||
if __name__ == "__main__":
|
||||
main()
|
||||
BIN
OTFbuild/calligra_font_tests.odt
Normal file
BIN
OTFbuild/calligra_font_tests.odt
Normal file
Binary file not shown.
721
OTFbuild/font_builder.py
Normal file
721
OTFbuild/font_builder.py
Normal file
@@ -0,0 +1,721 @@
|
||||
"""
|
||||
Orchestrate fonttools TTFont assembly.
|
||||
|
||||
1. Parse all sheets -> glyphs dict
|
||||
2. Compose Hangul -> add to dict
|
||||
3. Expand replacewith directives
|
||||
4. Create glyph order and cmap
|
||||
5. Trace all bitmaps -> CFF charstrings
|
||||
6. Set hmtx, hhea, OS/2, head, name, post
|
||||
7. Generate and compile OpenType features via feaLib
|
||||
8. Add EBDT/EBLC bitmap strike at ppem=20
|
||||
9. Save OTF
|
||||
"""
|
||||
|
||||
import time
|
||||
from typing import Dict
|
||||
|
||||
from fontTools.fontBuilder import FontBuilder
|
||||
from fontTools.pens.t2CharStringPen import T2CharStringPen
|
||||
from fontTools.feaLib.builder import addOpenTypeFeatures
|
||||
from fontTools.ttLib import TTFont
|
||||
import io
|
||||
|
||||
from glyph_parser import ExtractedGlyph, GlyphProps, parse_all_sheets
|
||||
from hangul import compose_hangul, get_jamo_gsub_data, HANGUL_PUA_BASE
|
||||
from bitmap_tracer import trace_bitmap, draw_glyph_to_pen, SCALE, BASELINE_ROW
|
||||
from keming_machine import generate_kerning_pairs
|
||||
from opentype_features import generate_features, glyph_name
|
||||
import sheet_config as SC
|
||||
|
||||
FONT_VERSION = "1.15"
|
||||
|
||||
# Codepoints that get cmap entries (user-visible)
|
||||
# PUA forms used internally by GSUB get glyphs but NO cmap entries
|
||||
def _should_have_cmap(cp):
|
||||
"""Determine if a codepoint should have a cmap entry."""
|
||||
# Standard Unicode characters always get cmap entries
|
||||
if cp < 0xE000:
|
||||
return True
|
||||
# Custom sym PUA range
|
||||
if 0xE000 <= cp <= 0xE0FF:
|
||||
return True
|
||||
# Codestyle PUA
|
||||
if 0xF0520 <= cp <= 0xF057F:
|
||||
return True
|
||||
# Hangul syllables
|
||||
if 0xAC00 <= cp <= 0xD7A3:
|
||||
return True
|
||||
# Hangul compat jamo
|
||||
if 0x3130 <= cp <= 0x318F:
|
||||
return True
|
||||
# SMP characters (Enclosed Alphanumeric Supplement, Hentaigana, etc.)
|
||||
if 0x1F100 <= cp <= 0x1F1FF:
|
||||
return True
|
||||
if 0x1B000 <= cp <= 0x1B16F:
|
||||
return True
|
||||
# Unicode noncharacters — never map these (U+FFFE, U+FFFF are reserved;
|
||||
# format 4 cmap uses 0xFFFF as a sentinel, so mapping it causes OTS rejection)
|
||||
if cp >= 0xFFFE and cp <= 0xFFFF:
|
||||
return False
|
||||
# Everything in standard Unicode ranges (up to 0xFFFF plus SMP)
|
||||
if cp <= 0xFFFF:
|
||||
return True
|
||||
# Internal PUA forms — GSUB-only, no cmap
|
||||
if 0xF0000 <= cp <= 0xF0FFF:
|
||||
return False
|
||||
# Internal control characters
|
||||
if 0xFFE00 <= cp <= 0xFFFFF:
|
||||
return False
|
||||
return True
|
||||
|
||||
|
||||
def _expand_replacewith(glyphs):
|
||||
"""
|
||||
Find glyphs with 'replacewith' directive and generate GSUB multiple
|
||||
substitution data. Returns list of (source_cp, [target_cp, ...]).
|
||||
|
||||
A replacewith glyph's extInfo contains up to 7 codepoints that the
|
||||
glyph expands to (e.g. U+01C7 "LJ" → [0x4C, 0x4A]).
|
||||
"""
|
||||
replacements = []
|
||||
for cp, g in glyphs.items():
|
||||
if g.props.is_pragma("replacewith"):
|
||||
targets = []
|
||||
count = g.props.required_ext_info_count()
|
||||
for i in range(count):
|
||||
val = g.props.ext_info[i]
|
||||
if val != 0:
|
||||
targets.append(val)
|
||||
if targets:
|
||||
replacements.append((cp, targets))
|
||||
return replacements
|
||||
|
||||
|
||||
def build_font(assets_dir, output_path, no_bitmap=False, no_features=False):
|
||||
"""Build the complete OTF font."""
|
||||
t0 = time.time()
|
||||
|
||||
# Step 1: Parse all sheets
|
||||
print("Step 1: Parsing glyph sheets...")
|
||||
glyphs = parse_all_sheets(assets_dir)
|
||||
print(f" Parsed {len(glyphs)} glyphs from sheets")
|
||||
|
||||
# Step 2: Compose Hangul
|
||||
print("Step 2: Composing Hangul syllables...")
|
||||
hangul_glyphs = compose_hangul(assets_dir)
|
||||
glyphs.update(hangul_glyphs)
|
||||
print(f" Total glyphs after Hangul: {len(glyphs)}")
|
||||
|
||||
# Step 2b: Copy PUA consonant glyphs to Unicode positions
|
||||
# In the bitmap font, consonants U+0915-0939 have width=0 and empty bitmaps
|
||||
# because the engine normalises them to PUA forms (0xF0140+) before rendering.
|
||||
# For OTF, we need the Unicode positions to have actual outlines so that
|
||||
# consonants render even without GSUB shaping.
|
||||
print("Step 2b: Populating Devanagari consonant glyphs from PUA forms...")
|
||||
deva_copied = 0
|
||||
for uni_cp in range(0x0915, 0x093A):
|
||||
try:
|
||||
pua_cp = SC.to_deva_internal(uni_cp)
|
||||
except ValueError:
|
||||
continue
|
||||
if pua_cp in glyphs and uni_cp in glyphs:
|
||||
pua_g = glyphs[pua_cp]
|
||||
uni_g = glyphs[uni_cp]
|
||||
if uni_g.props.width == 0 and pua_g.props.width > 0:
|
||||
uni_g.props.width = pua_g.props.width
|
||||
uni_g.bitmap = pua_g.bitmap
|
||||
uni_g.color_bitmap = pua_g.color_bitmap
|
||||
deva_copied += 1
|
||||
# Also copy nukta consonant forms U+0958-095F
|
||||
for uni_cp in range(0x0958, 0x0960):
|
||||
try:
|
||||
pua_cp = SC.to_deva_internal(uni_cp)
|
||||
except ValueError:
|
||||
continue
|
||||
if pua_cp in glyphs and uni_cp in glyphs:
|
||||
pua_g = glyphs[pua_cp]
|
||||
uni_g = glyphs[uni_cp]
|
||||
if uni_g.props.width == 0 and pua_g.props.width > 0:
|
||||
uni_g.props.width = pua_g.props.width
|
||||
uni_g.bitmap = pua_g.bitmap
|
||||
uni_g.color_bitmap = pua_g.color_bitmap
|
||||
deva_copied += 1
|
||||
print(f" Copied {deva_copied} consonant glyphs from PUA forms")
|
||||
|
||||
# Step 3: Expand replacewith directives
|
||||
print("Step 3: Processing replacewith directives...")
|
||||
replacewith_subs = _expand_replacewith(glyphs)
|
||||
print(f" Found {len(replacewith_subs)} replacewith substitutions")
|
||||
|
||||
# Step 3b: Compose fallback bitmaps for replacewith glyphs
|
||||
# Glyphs with replacewith directives have width=0 and no bitmap; they
|
||||
# rely on GSUB ccmp to expand into their target sequence. Renderers
|
||||
# without GSUB support would show whitespace. Build a composite
|
||||
# bitmap by concatenating the target glyphs' bitmaps side by side.
|
||||
print("Step 3b: Composing fallback bitmaps for replacewith glyphs...")
|
||||
composed = 0
|
||||
for src_cp, target_cps in replacewith_subs:
|
||||
src_g = glyphs.get(src_cp)
|
||||
if src_g is None or src_g.props.width > 0:
|
||||
continue # already has content (e.g. Deva consonants fixed above)
|
||||
# Resolve target glyphs
|
||||
target_gs = [glyphs.get(t) for t in target_cps]
|
||||
if not all(target_gs):
|
||||
continue
|
||||
# Compute total advance and composite height
|
||||
total_width = sum(g.props.width for g in target_gs)
|
||||
if total_width == 0:
|
||||
continue
|
||||
bm_height = max((len(g.bitmap) for g in target_gs if g.bitmap), default=SC.H)
|
||||
# Build composite bitmap
|
||||
composite = [[0] * total_width for _ in range(bm_height)]
|
||||
x = 0
|
||||
for tg in target_gs:
|
||||
if not tg.bitmap:
|
||||
x += tg.props.width
|
||||
continue
|
||||
cols = min(tg.props.width, len(tg.bitmap[0])) if tg.props.width > 0 else len(tg.bitmap[0])
|
||||
nudge = tg.props.nudge_x
|
||||
for row in range(min(len(tg.bitmap), bm_height)):
|
||||
for col in range(cols):
|
||||
dst_col = x + col - nudge
|
||||
if 0 <= dst_col < total_width and tg.bitmap[row][col]:
|
||||
composite[row][dst_col] = 1
|
||||
if tg.props.width > 0:
|
||||
x += tg.props.width
|
||||
# Zero-width targets (combining marks) overlay at current position
|
||||
src_g.props.width = total_width
|
||||
src_g.bitmap = composite
|
||||
composed += 1
|
||||
print(f" Composed {composed} fallback bitmaps")
|
||||
|
||||
# Step 3c: Identify combining marks for zero advance width
|
||||
# Glyphs with write_on_top >= 0 are combining marks positioned via
|
||||
# GPOS mark-to-base. In OpenType they must have zero advance width;
|
||||
# otherwise the cursor advances past the base and diacritics appear
|
||||
# shifted to the right. We record them here but keep props.width
|
||||
# intact so the mark anchor calculation can use the original width.
|
||||
mark_cps = set()
|
||||
for cp, g in glyphs.items():
|
||||
if g.props.write_on_top >= 0 and g.props.width > 0:
|
||||
mark_cps.add(cp)
|
||||
if mark_cps:
|
||||
print(f"Step 3c: Found {len(mark_cps)} combining marks to zero in hmtx")
|
||||
|
||||
# Step 4: Create glyph order and cmap
|
||||
print("Step 4: Building glyph order and cmap...")
|
||||
glyph_order = [".notdef"]
|
||||
cmap = {}
|
||||
glyph_set = set()
|
||||
|
||||
# Sort codepoints for deterministic output
|
||||
sorted_cps = sorted(glyphs.keys())
|
||||
|
||||
for cp in sorted_cps:
|
||||
g = glyphs[cp]
|
||||
if g.props.is_illegal:
|
||||
continue
|
||||
# Skip C0/C1 control characters and DEL — some platforms render
|
||||
# their traced bitmaps, which is undesirable.
|
||||
if cp <= 0x001F or cp == 0x007F or 0x0080 <= cp <= 0x009F:
|
||||
continue
|
||||
name = glyph_name(cp)
|
||||
if name == ".notdef":
|
||||
continue
|
||||
if name in glyph_set:
|
||||
continue
|
||||
glyph_order.append(name)
|
||||
glyph_set.add(name)
|
||||
if _should_have_cmap(cp):
|
||||
cmap[cp] = name
|
||||
|
||||
print(f" Glyph order: {len(glyph_order)} glyphs, cmap: {len(cmap)} entries")
|
||||
|
||||
# Step 4a: Detect coloured glyphs and prepare COLR layer data
|
||||
print("Step 4a: Detecting coloured glyphs...")
|
||||
colr_layer_data = {} # base_name -> list of (layer_name, colour_rgb)
|
||||
palette_colours = {} # (r, g, b) -> palette_index
|
||||
layer_bitmaps = {} # layer_name -> 1-bit bitmap
|
||||
layer_insert = [] # (after_name, [layer_names]) for glyph_order insertion
|
||||
|
||||
for cp in sorted_cps:
|
||||
g = glyphs[cp]
|
||||
if g.props.is_illegal or g.color_bitmap is None:
|
||||
continue
|
||||
name = glyph_name(cp)
|
||||
if name == ".notdef" or name not in glyph_set:
|
||||
continue
|
||||
|
||||
# Group pixels by RGB value -> per-colour 1-bit masks
|
||||
colour_pixels = {} # (r, g, b) -> set of (row, col)
|
||||
cbm = g.color_bitmap
|
||||
for row in range(len(cbm)):
|
||||
for col in range(len(cbm[row])):
|
||||
px = cbm[row][col]
|
||||
a = px & 0xFF
|
||||
if a == 0:
|
||||
continue
|
||||
r = (px >> 24) & 0xFF
|
||||
g_ch = (px >> 16) & 0xFF
|
||||
b = (px >> 8) & 0xFF
|
||||
rgb = (r, g_ch, b)
|
||||
if rgb not in colour_pixels:
|
||||
colour_pixels[rgb] = set()
|
||||
colour_pixels[rgb].add((row, col))
|
||||
|
||||
if not colour_pixels:
|
||||
continue
|
||||
if len(colour_pixels) == 1 and (255, 255, 255) in colour_pixels:
|
||||
# Only white pixels — no colour layers needed
|
||||
continue
|
||||
|
||||
# Assign palette indices for each unique colour
|
||||
for rgb in colour_pixels:
|
||||
if rgb not in palette_colours:
|
||||
palette_colours[rgb] = len(palette_colours)
|
||||
|
||||
# Generate layer glyphs
|
||||
h = len(cbm)
|
||||
w = len(cbm[0]) if h > 0 else 0
|
||||
layers = []
|
||||
layer_names = []
|
||||
for i, (rgb, positions) in enumerate(sorted(colour_pixels.items())):
|
||||
layer_name = f"{name}.clr{i}"
|
||||
# Build 1-bit mask for this colour
|
||||
mask = [[0] * w for _ in range(h)]
|
||||
for (row, col) in positions:
|
||||
mask[row][col] = 1
|
||||
layer_bitmaps[layer_name] = mask
|
||||
layers.append((layer_name, rgb))
|
||||
layer_names.append(layer_name)
|
||||
|
||||
colr_layer_data[name] = layers
|
||||
layer_insert.append((name, layer_names))
|
||||
|
||||
# Insert layer glyph names into glyph_order immediately after their base glyph
|
||||
for base_name, lnames in layer_insert:
|
||||
idx = glyph_order.index(base_name)
|
||||
for j, ln in enumerate(lnames):
|
||||
glyph_order.insert(idx + 1 + j, ln)
|
||||
glyph_set.add(ln)
|
||||
|
||||
if colr_layer_data:
|
||||
print(f" Found {len(colr_layer_data)} coloured glyphs, {len(palette_colours)} palette colours, {sum(len(v) for v in colr_layer_data.values())} layer glyphs")
|
||||
else:
|
||||
print(" No coloured glyphs found")
|
||||
|
||||
# Step 5: Build font with fonttools (CFF/OTF)
|
||||
print("Step 5: Building font tables...")
|
||||
fb = FontBuilder(SC.UNITS_PER_EM, isTTF=False)
|
||||
fb.setupGlyphOrder(glyph_order)
|
||||
fb.setupCharacterMap(cmap)
|
||||
|
||||
# Step 6: Trace bitmaps -> CFF charstrings
|
||||
print("Step 6: Tracing bitmaps to CFF outlines...")
|
||||
|
||||
charstrings = {}
|
||||
|
||||
# .notdef glyph (empty box)
|
||||
pen = T2CharStringPen(SC.UNITS_PER_EM // 2, None)
|
||||
pen.moveTo((0, 0))
|
||||
pen.lineTo((0, SC.ASCENT))
|
||||
pen.lineTo((SC.UNITS_PER_EM // 2, SC.ASCENT))
|
||||
pen.lineTo((SC.UNITS_PER_EM // 2, 0))
|
||||
pen.closePath()
|
||||
_m = 2 * SCALE
|
||||
pen.moveTo((_m, _m))
|
||||
pen.lineTo((SC.UNITS_PER_EM // 2 - _m, _m))
|
||||
pen.lineTo((SC.UNITS_PER_EM // 2 - _m, SC.ASCENT - _m))
|
||||
pen.lineTo((_m, SC.ASCENT - _m))
|
||||
pen.closePath()
|
||||
charstrings[".notdef"] = pen.getCharString()
|
||||
|
||||
_unihan_cps = set(SC.CODE_RANGE[SC.SHEET_UNIHAN])
|
||||
_base_offsets = {} # glyph_name -> (x_offset, y_offset) for COLR layers
|
||||
|
||||
traced_count = 0
|
||||
for cp in sorted_cps:
|
||||
g = glyphs[cp]
|
||||
if g.props.is_illegal:
|
||||
continue
|
||||
name = glyph_name(cp)
|
||||
if name == ".notdef" or name not in glyph_set:
|
||||
continue
|
||||
|
||||
advance = 0 if cp in mark_cps else g.props.width * SCALE
|
||||
|
||||
# Compute alignment offset (lsb shift).
|
||||
# The Kotlin code draws the full cell at an offset position:
|
||||
# ALIGN_LEFT: offset = 0
|
||||
# ALIGN_RIGHT: offset = width - W_VAR_INIT (negative)
|
||||
# ALIGN_CENTRE: offset = ceil((width - W_VAR_INIT) / 2) (negative)
|
||||
# ALIGN_BEFORE: offset = 0
|
||||
# The bitmap cell width depends on the sheet type.
|
||||
# nudge_x shifts the glyph left by that many pixels in the
|
||||
# bitmap engine. The Kotlin engine always applies nudge_x to
|
||||
# the drawing position (posXbuffer = -nudgeX + ...) and the
|
||||
# next glyph compensates via extraWidth, so the effective
|
||||
# origin-to-origin advance stays at `width`. We must bake
|
||||
# the same leftward shift into the contour x_offset.
|
||||
import math
|
||||
# The Kotlin engine always uses W_VAR_INIT for alignment calculations,
|
||||
# even for EXTRAWIDE sheets. Use W_VAR_INIT here to match.
|
||||
bm_cols = SC.W_VAR_INIT
|
||||
if g.props.align_where == SC.ALIGN_RIGHT:
|
||||
x_offset = (g.props.width - bm_cols) * SCALE
|
||||
elif g.props.align_where == SC.ALIGN_CENTRE:
|
||||
x_offset = math.ceil((g.props.width - bm_cols) / 2) * SCALE
|
||||
else:
|
||||
x_offset = 0
|
||||
x_offset -= g.props.nudge_x * SCALE
|
||||
|
||||
# For STACK_DOWN marks (below-base diacritics), negative nudge_y
|
||||
# means "shift content down to below baseline". The sign convention
|
||||
# is opposite to non-marks where positive nudge_y means shift down.
|
||||
if g.props.stack_where == SC.STACK_DOWN and g.props.write_on_top >= 0:
|
||||
y_offset = g.props.nudge_y * SCALE
|
||||
else:
|
||||
y_offset = -g.props.nudge_y * SCALE
|
||||
|
||||
# Unihan glyphs are 16px tall in a 20px cell; the bitmap engine
|
||||
# centres them vertically with offsetUnihan = (H - H_UNIHAN) / 2.
|
||||
if cp in _unihan_cps:
|
||||
y_offset -= ((SC.H - SC.H_UNIHAN) // 2) * SCALE
|
||||
|
||||
# Hangul jungseong/jongseong PUA variants (rows 15-18) have zero
|
||||
# advance and overlay the preceding choseong. Shift their outlines
|
||||
# left by one syllable cell width so they render at the same position.
|
||||
if cp >= HANGUL_PUA_BASE:
|
||||
_pua_row = (cp - HANGUL_PUA_BASE) // 256
|
||||
if 15 <= _pua_row <= 18:
|
||||
x_offset -= SC.W_HANGUL_BASE * SCALE
|
||||
|
||||
# Store offsets for COLR layer glyphs
|
||||
if name in colr_layer_data:
|
||||
_base_offsets[name] = (x_offset, y_offset)
|
||||
|
||||
contours = trace_bitmap(g.bitmap, g.props.width)
|
||||
|
||||
pen = T2CharStringPen(advance, None)
|
||||
if contours:
|
||||
draw_glyph_to_pen(contours, pen, x_offset=x_offset, y_offset=y_offset)
|
||||
traced_count += 1
|
||||
charstrings[name] = pen.getCharString()
|
||||
|
||||
# Trace COLR layer glyphs
|
||||
layer_traced = 0
|
||||
for base_name, layers in colr_layer_data.items():
|
||||
base_xoff, base_yoff = _base_offsets.get(base_name, (0, 0))
|
||||
for layer_name, _rgb in layers:
|
||||
lbm = layer_bitmaps[layer_name]
|
||||
# Find the effective glyph width from the base glyph's bitmap
|
||||
lw = len(lbm[0]) if lbm and lbm[0] else 0
|
||||
contours = trace_bitmap(lbm, lw)
|
||||
pen = T2CharStringPen(0, None) # advance width 0 for layers
|
||||
if contours:
|
||||
draw_glyph_to_pen(contours, pen, x_offset=base_xoff, y_offset=base_yoff)
|
||||
layer_traced += 1
|
||||
charstrings[layer_name] = pen.getCharString()
|
||||
|
||||
print(f" Traced {traced_count} glyphs with outlines" + (f" + {layer_traced} colour layers" if layer_traced else ""))
|
||||
|
||||
fb.setupCFF(
|
||||
psName="TerrarumSansBitmap-Regular",
|
||||
fontInfo={},
|
||||
charStringsDict=charstrings,
|
||||
privateDict={},
|
||||
)
|
||||
|
||||
# Step 7: Set metrics
|
||||
print("Step 7: Setting font metrics...")
|
||||
metrics = {}
|
||||
metrics[".notdef"] = (SC.UNITS_PER_EM // 2, 0)
|
||||
|
||||
for cp in sorted_cps:
|
||||
g = glyphs[cp]
|
||||
if g.props.is_illegal:
|
||||
continue
|
||||
name = glyph_name(cp)
|
||||
if name == ".notdef" or name not in glyph_set:
|
||||
continue
|
||||
advance = 0 if cp in mark_cps else g.props.width * SCALE
|
||||
metrics[name] = (advance, 0)
|
||||
|
||||
# Add zero-advance metrics for COLR layer glyphs
|
||||
for _base_name, layers in colr_layer_data.items():
|
||||
for layer_name, _rgb in layers:
|
||||
metrics[layer_name] = (0, 0)
|
||||
|
||||
fb.setupHorizontalMetrics(metrics)
|
||||
fb.setupHorizontalHeader(
|
||||
ascent=SC.ASCENT,
|
||||
descent=-SC.DESCENT
|
||||
)
|
||||
|
||||
fb.setupNameTable({
|
||||
"copyright": "Copyright (c) 2026 CuriousTorvald (curioustorvald.com), with Reserved Font Name TERRARUM.",
|
||||
"familyName": "Terrarum Sans Bitmap",
|
||||
"styleName": "Regular",
|
||||
"uniqueFontIdentifier": "TerrarumSansBitmap-Regular-"+FONT_VERSION,
|
||||
"fullName": "Terrarum Sans Bitmap Regular",
|
||||
"psName": "TerrarumSansBitmap-Regular",
|
||||
"version": FONT_VERSION,
|
||||
"licenseDescription": "This Font Software is licensed under the SIL Open Font License, Version 1.1.",
|
||||
"licenseInfoURL": "https://openfontlicense.org/"
|
||||
})
|
||||
|
||||
fb.setupOS2(
|
||||
sTypoAscender=SC.ASCENT,
|
||||
sTypoDescender=-SC.DESCENT,
|
||||
sTypoLineGap=SC.LINE_GAP,
|
||||
usWinAscent=SC.ASCENT,
|
||||
usWinDescent=SC.DESCENT,
|
||||
sxHeight=SC.X_HEIGHT,
|
||||
sCapHeight=SC.CAP_HEIGHT,
|
||||
fsType=0,
|
||||
)
|
||||
|
||||
unix_ts = int(time.time())
|
||||
opentype_ts = unix_ts + 2082844800
|
||||
|
||||
fb.setupPost()
|
||||
fb.setupHead(
|
||||
unitsPerEm=SC.UNITS_PER_EM,
|
||||
created=opentype_ts,
|
||||
modified=opentype_ts,
|
||||
)
|
||||
|
||||
font = fb.font
|
||||
|
||||
# Step 7a: Build COLR v0 / CPAL tables
|
||||
if colr_layer_data:
|
||||
print("Step 7a: Building COLR v0/CPAL tables...")
|
||||
from fontTools.colorLib.builder import buildCOLR, buildCPAL
|
||||
|
||||
# CPAL: single palette normalised to 0..1
|
||||
palette = [(0, 0, 0, 1.0)] * len(palette_colours)
|
||||
for (r, g, b), idx in palette_colours.items():
|
||||
palette[idx] = (r / 255, g / 255, b / 255, 1.0)
|
||||
font["CPAL"] = buildCPAL([palette])
|
||||
|
||||
# COLR v0: list of (layer_glyph_name, palette_index) per base glyph
|
||||
colr_v0 = {}
|
||||
for base_name, layers in colr_layer_data.items():
|
||||
colr_v0[base_name] = [
|
||||
(layer_name, palette_colours[rgb])
|
||||
for layer_name, rgb in layers
|
||||
]
|
||||
font["COLR"] = buildCOLR(colr_v0, version=0)
|
||||
print(f" COLR v0: {len(colr_v0)} base glyphs, {len(palette)} palette entries")
|
||||
|
||||
# Step 8: Generate and compile OpenType features
|
||||
if not no_features:
|
||||
print("Step 8: Generating OpenType features...")
|
||||
kern_pairs = generate_kerning_pairs(glyphs)
|
||||
print(f" {len(kern_pairs)} kerning pairs")
|
||||
|
||||
jamo_data = get_jamo_gsub_data()
|
||||
fea_code = generate_features(glyphs, kern_pairs, glyph_set,
|
||||
replacewith_subs=replacewith_subs,
|
||||
jamo_data=jamo_data)
|
||||
|
||||
if fea_code.strip():
|
||||
print(" Compiling features with feaLib...")
|
||||
try:
|
||||
# Obtain raw .fea text for debugging
|
||||
with open("debugout_features.fea", "w") as text_file:
|
||||
text_file.write(fea_code)
|
||||
|
||||
fea_stream = io.StringIO(fea_code)
|
||||
addOpenTypeFeatures(font, fea_stream)
|
||||
print(" Features compiled successfully")
|
||||
except Exception as e:
|
||||
print(f" [WARNING] Feature compilation failed: {e}")
|
||||
print(" Continuing without OpenType features")
|
||||
else:
|
||||
print(" No features to compile")
|
||||
else:
|
||||
print("Step 8: Skipping OpenType features (--no-features)")
|
||||
|
||||
# Step 9: Add bitmap strike (EBDT/EBLC)
|
||||
if not no_bitmap:
|
||||
print("Step 9: Adding bitmap strike...")
|
||||
_add_bitmap_strike(font, glyphs, glyph_order, glyph_set)
|
||||
else:
|
||||
print("Step 9: Skipping bitmap strike (--no-bitmap)")
|
||||
|
||||
# Save
|
||||
print(f"Saving to {output_path}...")
|
||||
font.save(output_path)
|
||||
|
||||
elapsed = time.time() - t0
|
||||
print(f"Done! Built {len(glyph_order)} glyphs in {elapsed:.1f}s")
|
||||
print(f"Output: {output_path}")
|
||||
|
||||
|
||||
def _add_bitmap_strike(font, glyphs, glyph_order, glyph_set):
|
||||
"""Add EBDT/EBLC embedded bitmap strike at ppem=20 via TTX roundtrip."""
|
||||
import tempfile
|
||||
import os as _os
|
||||
|
||||
ppem = 20
|
||||
name_to_id = {name: idx for idx, name in enumerate(glyph_order)}
|
||||
|
||||
bitmap_entries = []
|
||||
for name in glyph_order:
|
||||
if name == ".notdef":
|
||||
continue
|
||||
cp = _name_to_cp(name)
|
||||
if cp is None or cp not in glyphs:
|
||||
continue
|
||||
g = glyphs[cp]
|
||||
if g.props.is_illegal or g.props.width == 0:
|
||||
continue
|
||||
|
||||
bitmap = g.bitmap
|
||||
h = len(bitmap)
|
||||
w = len(bitmap[0]) if h > 0 else 0
|
||||
if w == 0 or h == 0:
|
||||
continue
|
||||
|
||||
hex_rows = []
|
||||
for row in bitmap:
|
||||
row_bytes = bytearray()
|
||||
for col_start in range(0, w, 8):
|
||||
byte_val = 0
|
||||
for bit in range(8):
|
||||
col = col_start + bit
|
||||
if col < w and row[col]:
|
||||
byte_val |= (0x80 >> bit)
|
||||
row_bytes.append(byte_val)
|
||||
hex_rows.append(row_bytes.hex())
|
||||
|
||||
bitmap_entries.append({
|
||||
'name': name,
|
||||
'gid': name_to_id.get(name, 0),
|
||||
'height': h,
|
||||
'width': w,
|
||||
'advance': g.props.width,
|
||||
'hex_rows': hex_rows,
|
||||
})
|
||||
|
||||
if not bitmap_entries:
|
||||
print(" No bitmap data to embed")
|
||||
return
|
||||
|
||||
gid_sorted = sorted(bitmap_entries, key=lambda e: e['gid'])
|
||||
|
||||
runs = []
|
||||
current_run = [gid_sorted[0]]
|
||||
for i in range(1, len(gid_sorted)):
|
||||
if gid_sorted[i]['gid'] == gid_sorted[i-1]['gid'] + 1:
|
||||
current_run.append(gid_sorted[i])
|
||||
else:
|
||||
runs.append(current_run)
|
||||
current_run = [gid_sorted[i]]
|
||||
runs.append(current_run)
|
||||
|
||||
ebdt_xml = ['<EBDT>', '<header version="2.0"/>', '<strikedata index="0">']
|
||||
for entry in gid_sorted:
|
||||
ebdt_xml.append(f' <cbdt_bitmap_format_1 name="{entry["name"]}">')
|
||||
ebdt_xml.append(f' <SmallGlyphMetrics>')
|
||||
ebdt_xml.append(f' <height value="{entry["height"]}"/>')
|
||||
ebdt_xml.append(f' <width value="{entry["width"]}"/>')
|
||||
ebdt_xml.append(f' <BearingX value="0"/>')
|
||||
ebdt_xml.append(f' <BearingY value="{BASELINE_ROW}"/>')
|
||||
ebdt_xml.append(f' <Advance value="{entry["advance"]}"/>')
|
||||
ebdt_xml.append(f' </SmallGlyphMetrics>')
|
||||
ebdt_xml.append(f' <rawimagedata>')
|
||||
for hr in entry['hex_rows']:
|
||||
ebdt_xml.append(f' {hr}')
|
||||
ebdt_xml.append(f' </rawimagedata>')
|
||||
ebdt_xml.append(f' </cbdt_bitmap_format_1>')
|
||||
ebdt_xml.append('</strikedata>')
|
||||
ebdt_xml.append('</EBDT>')
|
||||
|
||||
all_gids = [e['gid'] for e in gid_sorted]
|
||||
desc = -(SC.H - BASELINE_ROW)
|
||||
|
||||
def _line_metrics_xml(direction, caret_num=1):
|
||||
return [
|
||||
f' <sbitLineMetrics direction="{direction}">',
|
||||
f' <ascender value="{BASELINE_ROW}"/>',
|
||||
f' <descender value="{desc}"/>',
|
||||
f' <widthMax value="{SC.W_WIDEVAR_INIT}"/>',
|
||||
f' <caretSlopeNumerator value="{caret_num}"/>',
|
||||
' <caretSlopeDenominator value="0"/>',
|
||||
' <caretOffset value="0"/>',
|
||||
' <minOriginSB value="0"/>',
|
||||
' <minAdvanceSB value="0"/>',
|
||||
f' <maxBeforeBL value="{BASELINE_ROW}"/>',
|
||||
f' <minAfterBL value="{desc}"/>',
|
||||
' <pad1 value="0"/>',
|
||||
' <pad2 value="0"/>',
|
||||
f' </sbitLineMetrics>',
|
||||
]
|
||||
|
||||
eblc_xml = [
|
||||
'<EBLC>', '<header version="2.0"/>',
|
||||
'<strike index="0">', ' <bitmapSizeTable>',
|
||||
' <colorRef value="0"/>',
|
||||
]
|
||||
eblc_xml.extend(_line_metrics_xml("hori", 1))
|
||||
eblc_xml.extend(_line_metrics_xml("vert", 0))
|
||||
eblc_xml.extend([
|
||||
f' <startGlyphIndex value="{all_gids[0]}"/>',
|
||||
f' <endGlyphIndex value="{all_gids[-1]}"/>',
|
||||
f' <ppemX value="{ppem}"/>',
|
||||
f' <ppemY value="{ppem}"/>',
|
||||
' <bitDepth value="1"/>',
|
||||
' <flags value="1"/>',
|
||||
' </bitmapSizeTable>',
|
||||
])
|
||||
|
||||
for run in runs:
|
||||
first_gid = run[0]['gid']
|
||||
last_gid = run[-1]['gid']
|
||||
eblc_xml.append(f' <eblc_index_sub_table_1 imageFormat="1" firstGlyphIndex="{first_gid}" lastGlyphIndex="{last_gid}">')
|
||||
for entry in run:
|
||||
eblc_xml.append(f' <glyphLoc name="{entry["name"]}"/>')
|
||||
eblc_xml.append(' </eblc_index_sub_table_1>')
|
||||
|
||||
eblc_xml.append('</strike>')
|
||||
eblc_xml.append('</EBLC>')
|
||||
|
||||
try:
|
||||
ttx_content = '<?xml version="1.0" encoding="UTF-8"?>\n<ttFont>\n'
|
||||
ttx_content += '\n'.join(ebdt_xml) + '\n'
|
||||
ttx_content += '\n'.join(eblc_xml) + '\n'
|
||||
ttx_content += '</ttFont>\n'
|
||||
|
||||
with tempfile.NamedTemporaryFile(mode='w', suffix='.ttx', delete=False) as f:
|
||||
f.write(ttx_content)
|
||||
ttx_path = f.name
|
||||
|
||||
font.importXML(ttx_path)
|
||||
_os.unlink(ttx_path)
|
||||
|
||||
print(f" Added bitmap strike at {ppem}ppem with {len(bitmap_entries)} glyphs ({len(runs)} index subtables)")
|
||||
except Exception as e:
|
||||
print(f" [WARNING] Bitmap strike failed: {e}")
|
||||
print(" Continuing without bitmap strike")
|
||||
|
||||
|
||||
def _name_to_cp(name):
|
||||
"""Convert glyph name back to codepoint."""
|
||||
if name == ".notdef":
|
||||
return None
|
||||
if name == "space":
|
||||
return 0x20
|
||||
if name.startswith("uni"):
|
||||
try:
|
||||
return int(name[3:], 16)
|
||||
except ValueError:
|
||||
return None
|
||||
if name.startswith("u"):
|
||||
try:
|
||||
return int(name[1:], 16)
|
||||
except ValueError:
|
||||
return None
|
||||
return None
|
||||
505
OTFbuild/glyph_parser.py
Normal file
505
OTFbuild/glyph_parser.py
Normal file
@@ -0,0 +1,505 @@
|
||||
"""
|
||||
Extract glyph bitmaps and tag-column properties from TGA sprite sheets.
|
||||
Ported from TerrarumSansBitmap.kt:buildWidthTable() and GlyphSheetParser.kt.
|
||||
|
||||
Enhancement over v1: extracts all 6 diacritics anchors for GPOS mark feature.
|
||||
"""
|
||||
|
||||
import os
|
||||
from dataclasses import dataclass, field
|
||||
from typing import Dict, List, Optional, Tuple
|
||||
|
||||
from tga_reader import TgaImage, read_tga
|
||||
import sheet_config as SC
|
||||
|
||||
|
||||
@dataclass
|
||||
class DiacriticsAnchor:
|
||||
type: int
|
||||
x: int
|
||||
y: int
|
||||
x_used: bool
|
||||
y_used: bool
|
||||
|
||||
|
||||
@dataclass
|
||||
class GlyphProps:
|
||||
width: int
|
||||
is_low_height: bool = False
|
||||
nudge_x: int = 0
|
||||
nudge_y: int = 0
|
||||
diacritics_anchors: List[DiacriticsAnchor] = field(default_factory=lambda: [
|
||||
DiacriticsAnchor(i, 0, 0, False, False) for i in range(6)
|
||||
])
|
||||
align_where: int = 0
|
||||
write_on_top: int = -1
|
||||
stack_where: int = 0
|
||||
ext_info: List[int] = field(default_factory=lambda: [0] * 15)
|
||||
has_kern_data: bool = False
|
||||
is_kern_y_type: bool = False
|
||||
kerning_mask: int = 255
|
||||
directive_opcode: int = 0
|
||||
directive_arg1: int = 0
|
||||
directive_arg2: int = 0
|
||||
|
||||
@property
|
||||
def is_illegal(self):
|
||||
return self.directive_opcode == 255
|
||||
|
||||
def required_ext_info_count(self):
|
||||
if self.stack_where == SC.STACK_BEFORE_N_AFTER:
|
||||
return 2
|
||||
if 0b10000_000 <= self.directive_opcode <= 0b10000_111:
|
||||
return 7
|
||||
return 0
|
||||
|
||||
def is_pragma(self, pragma):
|
||||
if pragma == "replacewith":
|
||||
return 0b10000_000 <= self.directive_opcode <= 0b10000_111
|
||||
return False
|
||||
|
||||
|
||||
@dataclass
|
||||
class ExtractedGlyph:
|
||||
codepoint: int
|
||||
props: GlyphProps
|
||||
bitmap: List[List[int]] # [row][col], 0 or 1
|
||||
color_bitmap: Optional[List[List[int]]] = None # [row][col], RGBA8888 values
|
||||
|
||||
|
||||
def _is_coloured_pixel(px):
|
||||
"""Return True if the pixel is visible (A > 0) and non-white (R+G+B < 765)."""
|
||||
a = px & 0xFF
|
||||
if a == 0:
|
||||
return False
|
||||
r = (px >> 24) & 0xFF
|
||||
g = (px >> 16) & 0xFF
|
||||
b = (px >> 8) & 0xFF
|
||||
return (r + g + b) < 765
|
||||
|
||||
|
||||
def _tagify(pixel):
|
||||
"""Return 0 if alpha channel is zero, else return the original value."""
|
||||
return 0 if (pixel & 0xFF) == 0 else pixel
|
||||
|
||||
|
||||
def _signed_byte(val):
|
||||
"""Convert unsigned byte to signed."""
|
||||
return val - 256 if val >= 128 else val
|
||||
|
||||
|
||||
def _parse_diacritics_anchors(image, code_start_x, code_start_y):
|
||||
"""Parse 6 diacritics anchors from tag column rows 11-14."""
|
||||
anchors = []
|
||||
for i in range(6):
|
||||
y_pos = 13 - (i // 3) * 2
|
||||
shift = (3 - (i % 3)) * 8
|
||||
y_pixel = _tagify(image.get_pixel(code_start_x, code_start_y + y_pos))
|
||||
x_pixel = _tagify(image.get_pixel(code_start_x, code_start_y + y_pos + 1))
|
||||
y_used = ((y_pixel >> shift) & 128) != 0
|
||||
x_used = ((x_pixel >> shift) & 128) != 0
|
||||
y_val = (y_pixel >> shift) & 127 if y_used else 0
|
||||
x_val = (x_pixel >> shift) & 127 if x_used else 0
|
||||
anchors.append(DiacriticsAnchor(i, x_val, y_val, x_used, y_used))
|
||||
return anchors
|
||||
|
||||
|
||||
def parse_variable_sheet(image, sheet_index, cell_w, cell_h, cols, is_xy_swapped):
|
||||
"""Parse a variable-width sheet: extract tag column for properties, bitmap for glyph."""
|
||||
code_range = SC.CODE_RANGE[sheet_index]
|
||||
binary_code_offset = cell_w - 1 # tag column is last pixel column of cell
|
||||
result = {}
|
||||
|
||||
for index, code in enumerate(code_range):
|
||||
if is_xy_swapped:
|
||||
cell_x = (index // cols) * cell_w
|
||||
cell_y = (index % cols) * cell_h
|
||||
else:
|
||||
cell_x = (index % cols) * cell_w
|
||||
cell_y = (index // cols) * cell_h
|
||||
|
||||
code_start_x = cell_x + binary_code_offset
|
||||
code_start_y = cell_y
|
||||
|
||||
# Width (5 bits)
|
||||
width = 0
|
||||
for y in range(5):
|
||||
if image.get_pixel(code_start_x, code_start_y + y) & 0xFF:
|
||||
width |= (1 << y)
|
||||
|
||||
is_low_height = (image.get_pixel(code_start_x, code_start_y + 5) & 0xFF) != 0
|
||||
|
||||
# Kerning data
|
||||
kerning_bit1 = _tagify(image.get_pixel(code_start_x, code_start_y + 6))
|
||||
# kerning_bit2 and kerning_bit3 are reserved
|
||||
is_kern_y_type = (kerning_bit1 & 0x80000000) != 0
|
||||
kerning_mask = (kerning_bit1 >> 8) & 0xFFFFFF
|
||||
has_kern_data = (kerning_bit1 & 0xFF) != 0
|
||||
if not has_kern_data:
|
||||
is_kern_y_type = False
|
||||
kerning_mask = 255
|
||||
|
||||
# Compiler directives
|
||||
compiler_directives = _tagify(image.get_pixel(code_start_x, code_start_y + 9))
|
||||
directive_opcode = (compiler_directives >> 24) & 255
|
||||
directive_arg1 = (compiler_directives >> 16) & 255
|
||||
directive_arg2 = (compiler_directives >> 8) & 255
|
||||
|
||||
# Nudge
|
||||
nudging_bits = _tagify(image.get_pixel(code_start_x, code_start_y + 10))
|
||||
nudge_x = _signed_byte((nudging_bits >> 24) & 0xFF)
|
||||
nudge_y = _signed_byte((nudging_bits >> 16) & 0xFF)
|
||||
|
||||
# Diacritics anchors
|
||||
diacritics_anchors = _parse_diacritics_anchors(image, code_start_x, code_start_y)
|
||||
|
||||
# Alignment
|
||||
align_where = 0
|
||||
for y in range(2):
|
||||
if image.get_pixel(code_start_x, code_start_y + y + 15) & 0xFF:
|
||||
align_where |= (1 << y)
|
||||
|
||||
# Write on top
|
||||
write_on_top_raw = image.get_pixel(code_start_x, code_start_y + 17) # NO tagify
|
||||
if (write_on_top_raw & 0xFF) == 0:
|
||||
write_on_top = -1
|
||||
else:
|
||||
if (write_on_top_raw >> 8) == 0xFFFFFF:
|
||||
write_on_top = 0
|
||||
else:
|
||||
write_on_top = (write_on_top_raw >> 28) & 15
|
||||
|
||||
# Stack where
|
||||
stack_where0 = _tagify(image.get_pixel(code_start_x, code_start_y + 18))
|
||||
stack_where1 = _tagify(image.get_pixel(code_start_x, code_start_y + 19))
|
||||
if stack_where0 == 0x00FF00FF and stack_where1 == 0x00FF00FF:
|
||||
stack_where = SC.STACK_DONT
|
||||
else:
|
||||
stack_where = 0
|
||||
for y in range(2):
|
||||
if image.get_pixel(code_start_x, code_start_y + y + 18) & 0xFF:
|
||||
stack_where |= (1 << y)
|
||||
|
||||
ext_info = [0] * 15
|
||||
props = GlyphProps(
|
||||
width=width, is_low_height=is_low_height,
|
||||
nudge_x=nudge_x, nudge_y=nudge_y,
|
||||
diacritics_anchors=diacritics_anchors,
|
||||
align_where=align_where, write_on_top=write_on_top,
|
||||
stack_where=stack_where, ext_info=ext_info,
|
||||
has_kern_data=has_kern_data, is_kern_y_type=is_kern_y_type,
|
||||
kerning_mask=kerning_mask,
|
||||
directive_opcode=directive_opcode, directive_arg1=directive_arg1,
|
||||
directive_arg2=directive_arg2,
|
||||
)
|
||||
|
||||
# Parse extInfo if needed
|
||||
ext_count = props.required_ext_info_count()
|
||||
if ext_count > 0:
|
||||
for x in range(ext_count):
|
||||
info = 0
|
||||
for y in range(20):
|
||||
if image.get_pixel(cell_x + x, cell_y + y) & 0xFF:
|
||||
info |= (1 << y)
|
||||
ext_info[x] = info
|
||||
|
||||
# Extract glyph bitmap: full cell minus the tag column.
|
||||
# The Kotlin code draws the ENTIRE cell at a computed position;
|
||||
# the tag column is the only thing excluded.
|
||||
# Alignment and width only affect advance/positioning, not the bitmap.
|
||||
max_w = cell_w - 1 # exclude tag column
|
||||
|
||||
bitmap = []
|
||||
for row in range(cell_h):
|
||||
row_data = []
|
||||
for col in range(max_w):
|
||||
px = image.get_pixel(cell_x + col, cell_y + row)
|
||||
row_data.append(1 if (px & 0xFF) != 0 else 0)
|
||||
bitmap.append(row_data)
|
||||
|
||||
# Now strip the tag column pixels that may have leaked into
|
||||
# the glyph area. Tag data lives at column (cell_w - 1) which
|
||||
# we already excluded, but extInfo columns 0..6 at the LEFT
|
||||
# edge of the cell also contain tag data for replacewith glyphs.
|
||||
# Clean those columns if they were used for extInfo.
|
||||
if ext_count > 0:
|
||||
for col_idx in range(min(ext_count, max_w)):
|
||||
for row in range(cell_h):
|
||||
bitmap[row][col_idx] = 0
|
||||
|
||||
# Colour extraction: check if any visible pixel is non-white
|
||||
has_colour = False
|
||||
color_bitmap = []
|
||||
for row in range(cell_h):
|
||||
row_data = []
|
||||
for col in range(max_w):
|
||||
px = image.get_pixel(cell_x + col, cell_y + row)
|
||||
row_data.append(px)
|
||||
if not has_colour and _is_coloured_pixel(px):
|
||||
has_colour = True
|
||||
color_bitmap.append(row_data)
|
||||
|
||||
if has_colour:
|
||||
# Strip extInfo columns from color_bitmap too
|
||||
if ext_count > 0:
|
||||
for col_idx in range(min(ext_count, max_w)):
|
||||
for row in range(cell_h):
|
||||
color_bitmap[row][col_idx] = 0
|
||||
else:
|
||||
color_bitmap = None
|
||||
|
||||
result[code] = ExtractedGlyph(code, props, bitmap, color_bitmap)
|
||||
|
||||
return result
|
||||
|
||||
|
||||
def _read_hangul_cell(image, column, row, cell_w=SC.W_HANGUL_BASE, cell_h=SC.H):
|
||||
"""Read a single cell from the Hangul johab sheet at (column, row)."""
|
||||
cell_x = column * cell_w
|
||||
cell_y = row * cell_h
|
||||
bitmap = []
|
||||
for r in range(cell_h):
|
||||
row_data = []
|
||||
for c in range(cell_w):
|
||||
px = image.get_pixel(cell_x + c, cell_y + r)
|
||||
row_data.append(1 if (px & 0xFF) != 0 else 0)
|
||||
bitmap.append(row_data)
|
||||
return bitmap
|
||||
|
||||
|
||||
def parse_hangul_jamo_sheet(image, cell_w, cell_h):
|
||||
"""
|
||||
Parse the Hangul Jamo sheet with correct row/column mapping.
|
||||
|
||||
Layout in hangul_johab.tga:
|
||||
- Choseong (U+1100-U+115E): column = choseongIndex, row = 1
|
||||
- Jungseong (U+1161-U+11A7): column = jungseongIndex+1, row = 15
|
||||
(column 0 is filler U+1160, stored at row 15 col 0)
|
||||
- Jongseong (U+11A8-U+11FF): column = jongseongIndex, row = 17
|
||||
(index starts at 1 for 11A8)
|
||||
- Extended Choseong (U+A960-U+A97F): column = 96+offset, row = 1
|
||||
- Extended Jungseong (U+D7B0-U+D7C6): column = 72+offset, row = 15
|
||||
- Extended Jongseong (U+D7CB-U+D7FB): column = 89+offset, row = 17
|
||||
|
||||
Each jamo gets a default-row bitmap. Multiple variant rows exist for
|
||||
syllable composition (handled separately by hangul.py / GSUB).
|
||||
"""
|
||||
result = {}
|
||||
|
||||
# U+1160 (Hangul Jungseong Filler) — column 0, row 15
|
||||
bm = _read_hangul_cell(image, 0, 15, cell_w, cell_h)
|
||||
result[0x1160] = ExtractedGlyph(0x1160, GlyphProps(width=cell_w), bm)
|
||||
|
||||
# Choseong: U+1100-U+115E → column = cp - 0x1100, row = 1
|
||||
for cp in range(0x1100, 0x115F):
|
||||
col = cp - 0x1100
|
||||
bm = _read_hangul_cell(image, col, 1, cell_w, cell_h)
|
||||
result[cp] = ExtractedGlyph(cp, GlyphProps(width=cell_w), bm)
|
||||
|
||||
# U+115F (Hangul Choseong Filler)
|
||||
col = 0x115F - 0x1100
|
||||
bm = _read_hangul_cell(image, col, 1, cell_w, cell_h)
|
||||
result[0x115F] = ExtractedGlyph(0x115F, GlyphProps(width=cell_w), bm)
|
||||
|
||||
# Jungseong: U+1161-U+11A7 → column = (cp - 0x1160), row = 15
|
||||
for cp in range(0x1161, 0x11A8):
|
||||
col = cp - 0x1160
|
||||
bm = _read_hangul_cell(image, col, 15, cell_w, cell_h)
|
||||
result[cp] = ExtractedGlyph(cp, GlyphProps(width=cell_w), bm)
|
||||
|
||||
# Jongseong: U+11A8-U+11FF → column = (cp - 0x11A8 + 1), row = 17
|
||||
for cp in range(0x11A8, 0x1200):
|
||||
col = cp - 0x11A8 + 1
|
||||
bm = _read_hangul_cell(image, col, 17, cell_w, cell_h)
|
||||
result[cp] = ExtractedGlyph(cp, GlyphProps(width=cell_w), bm)
|
||||
|
||||
# Extended Choseong: U+A960-U+A97F → column = (cp - 0xA960 + 96), row = 1
|
||||
for cp in range(0xA960, 0xA980):
|
||||
col = cp - 0xA960 + 96
|
||||
bm = _read_hangul_cell(image, col, 1, cell_w, cell_h)
|
||||
result[cp] = ExtractedGlyph(cp, GlyphProps(width=cell_w), bm)
|
||||
|
||||
# Extended Jungseong: U+D7B0-U+D7C6 → column = (cp - 0xD7B0 + 72), row = 15
|
||||
for cp in range(0xD7B0, 0xD7C7):
|
||||
col = cp - 0xD7B0 + 72
|
||||
bm = _read_hangul_cell(image, col, 15, cell_w, cell_h)
|
||||
result[cp] = ExtractedGlyph(cp, GlyphProps(width=cell_w), bm)
|
||||
|
||||
# Extended Jongseong: U+D7CB-U+D7FB → column = (cp - 0xD7CB + 88 + 1), row = 17
|
||||
for cp in range(0xD7CB, 0xD7FC):
|
||||
col = cp - 0xD7CB + 88 + 1
|
||||
bm = _read_hangul_cell(image, col, 17, cell_w, cell_h)
|
||||
result[cp] = ExtractedGlyph(cp, GlyphProps(width=cell_w), bm)
|
||||
|
||||
return result
|
||||
|
||||
|
||||
def parse_fixed_sheet(image, sheet_index, cell_w, cell_h, cols):
|
||||
"""Parse a fixed-width sheet (Hangul, Unihan, Runic, Custom Sym)."""
|
||||
# Hangul Jamo sheet has special layout — handled separately
|
||||
if sheet_index == SC.SHEET_HANGUL:
|
||||
return parse_hangul_jamo_sheet(image, cell_w, cell_h)
|
||||
|
||||
code_range = SC.CODE_RANGE[sheet_index]
|
||||
result = {}
|
||||
|
||||
fixed_width = {
|
||||
SC.SHEET_CUSTOM_SYM: 20,
|
||||
SC.SHEET_RUNIC: 9,
|
||||
SC.SHEET_UNIHAN: SC.W_UNIHAN,
|
||||
}.get(sheet_index, cell_w)
|
||||
|
||||
for index, code in enumerate(code_range):
|
||||
cell_x = (index % cols) * cell_w
|
||||
cell_y = (index // cols) * cell_h
|
||||
|
||||
bitmap = []
|
||||
has_colour = False
|
||||
color_bitmap = []
|
||||
for row in range(cell_h):
|
||||
row_data = []
|
||||
color_row = []
|
||||
for col in range(cell_w):
|
||||
px = image.get_pixel(cell_x + col, cell_y + row)
|
||||
row_data.append(1 if (px & 0xFF) != 0 else 0)
|
||||
color_row.append(px)
|
||||
if not has_colour and _is_coloured_pixel(px):
|
||||
has_colour = True
|
||||
bitmap.append(row_data)
|
||||
color_bitmap.append(color_row)
|
||||
|
||||
props = GlyphProps(width=fixed_width)
|
||||
result[code] = ExtractedGlyph(code, props, bitmap,
|
||||
color_bitmap if has_colour else None)
|
||||
|
||||
return result
|
||||
|
||||
|
||||
def _empty_bitmap(w=SC.W_VAR_INIT, h=SC.H):
|
||||
return [[0] * w for _ in range(h)]
|
||||
|
||||
|
||||
def parse_all_sheets(assets_dir):
|
||||
"""Parse all sheets and return a map of codepoint -> ExtractedGlyph."""
|
||||
result = {}
|
||||
|
||||
for sheet_index, filename in enumerate(SC.FILE_LIST):
|
||||
filepath = os.path.join(assets_dir, filename)
|
||||
if not os.path.exists(filepath):
|
||||
print(f" [SKIP] {filename} not found")
|
||||
continue
|
||||
|
||||
is_var = SC.is_variable(filename)
|
||||
is_xy = SC.is_xy_swapped(filename)
|
||||
is_ew = SC.is_extra_wide(filename)
|
||||
cell_w = SC.get_cell_width(sheet_index)
|
||||
cell_h = SC.get_cell_height(sheet_index)
|
||||
cols = SC.get_columns(sheet_index)
|
||||
|
||||
tags = []
|
||||
if is_var: tags.append("VARIABLE")
|
||||
if is_xy: tags.append("XYSWAP")
|
||||
if is_ew: tags.append("EXTRAWIDE")
|
||||
if not tags: tags.append("STATIC")
|
||||
print(f" Loading [{','.join(tags)}] {filename}")
|
||||
|
||||
image = read_tga(filepath)
|
||||
|
||||
if is_var:
|
||||
sheet_glyphs = parse_variable_sheet(image, sheet_index, cell_w, cell_h, cols, is_xy)
|
||||
else:
|
||||
sheet_glyphs = parse_fixed_sheet(image, sheet_index, cell_w, cell_h, cols)
|
||||
|
||||
result.update(sheet_glyphs)
|
||||
|
||||
# Fixed-width overrides
|
||||
_add_fixed_width_overrides(result)
|
||||
|
||||
return result
|
||||
|
||||
|
||||
def _add_fixed_width_overrides(result):
|
||||
"""Apply fixed-width overrides."""
|
||||
# Hangul compat jamo
|
||||
for code in SC.CODE_RANGE_HANGUL_COMPAT:
|
||||
if code not in result:
|
||||
result[code] = ExtractedGlyph(code, GlyphProps(width=SC.W_HANGUL_BASE), _empty_bitmap(SC.W_HANGUL_BASE))
|
||||
|
||||
# Zero-width ranges (only internal/PUA control ranges, not surrogates or full Plane 16)
|
||||
for code in range(0xFFFA0, 0x100000):
|
||||
result[code] = ExtractedGlyph(code, GlyphProps(width=0), _empty_bitmap(1, 1))
|
||||
|
||||
# Null char
|
||||
result[0] = ExtractedGlyph(0, GlyphProps(width=0), _empty_bitmap(1, 1))
|
||||
|
||||
# Replacement character at U+007F
|
||||
if 0x7F in result:
|
||||
result[0x7F].props.width = 15
|
||||
|
||||
|
||||
def get_hangul_jamo_bitmaps(assets_dir):
|
||||
"""
|
||||
Extract raw Hangul jamo bitmaps from the Hangul sheet for composition.
|
||||
Returns a function: (column_index, row) -> bitmap (list of list of int)
|
||||
"""
|
||||
filename = SC.FILE_LIST[SC.SHEET_HANGUL]
|
||||
filepath = os.path.join(assets_dir, filename)
|
||||
if not os.path.exists(filepath):
|
||||
print(" [WARNING] Hangul sheet not found")
|
||||
return lambda idx, row: _empty_bitmap(SC.W_HANGUL_BASE)
|
||||
|
||||
image = read_tga(filepath)
|
||||
cell_w = SC.W_HANGUL_BASE
|
||||
cell_h = SC.H
|
||||
|
||||
def get_bitmap(index, row):
|
||||
cell_x = index * cell_w
|
||||
cell_y = row * cell_h
|
||||
bitmap = []
|
||||
for r in range(cell_h):
|
||||
row_data = []
|
||||
for c in range(cell_w):
|
||||
px = image.get_pixel(cell_x + c, cell_y + r)
|
||||
row_data.append(1 if (px & 0xFF) != 0 else 0)
|
||||
bitmap.append(row_data)
|
||||
return bitmap
|
||||
|
||||
return get_bitmap
|
||||
|
||||
|
||||
def extract_hangul_jamo_variants(assets_dir):
|
||||
"""
|
||||
Extract ALL Hangul jamo variant bitmaps from hangul_johab.tga.
|
||||
Returns dict of (column, row) -> bitmap for every non-empty cell.
|
||||
Used by hangul.py to store variants in PUA for GSUB assembly.
|
||||
|
||||
Layout:
|
||||
Row 0: Hangul Compatibility Jamo (U+3130-U+318F)
|
||||
Rows 1-14: Choseong variants (row depends on jungseong context)
|
||||
Rows 15-16: Jungseong variants (15=no final, 16=with final)
|
||||
Rows 17-18: Jongseong variants (17=normal, 18=rightie jungseong)
|
||||
Rows 19-24: Additional choseong variants (giyeok remapping)
|
||||
"""
|
||||
filename = SC.FILE_LIST[SC.SHEET_HANGUL]
|
||||
filepath = os.path.join(assets_dir, filename)
|
||||
if not os.path.exists(filepath):
|
||||
return {}
|
||||
|
||||
image = read_tga(filepath)
|
||||
cell_w = SC.W_HANGUL_BASE
|
||||
cell_h = SC.H
|
||||
|
||||
variants = {}
|
||||
# Scan all rows that contain jamo data
|
||||
# Rows 0-24 at minimum, checking up to image height
|
||||
max_row = image.height // cell_h
|
||||
max_col = image.width // cell_w
|
||||
|
||||
for row in range(max_row):
|
||||
for col in range(max_col):
|
||||
bm = _read_hangul_cell(image, col, row, cell_w, cell_h)
|
||||
# Check if non-empty
|
||||
if any(px for r in bm for px in r):
|
||||
variants[(col, row)] = bm
|
||||
|
||||
return variants
|
||||
160
OTFbuild/hangul.py
Normal file
160
OTFbuild/hangul.py
Normal file
@@ -0,0 +1,160 @@
|
||||
"""
|
||||
Compose 11,172 Hangul syllables (U+AC00-U+D7A3) from jamo sprite pieces.
|
||||
Also composes Hangul Compatibility Jamo (U+3130-U+318F).
|
||||
Also stores all jamo variant bitmaps in PUA for GSUB-based jamo assembly.
|
||||
|
||||
Ported from HangulCompositor.kt and TerrarumSansBitmap.kt.
|
||||
"""
|
||||
|
||||
from typing import Dict, List, Tuple
|
||||
|
||||
from glyph_parser import (
|
||||
ExtractedGlyph, GlyphProps, get_hangul_jamo_bitmaps,
|
||||
extract_hangul_jamo_variants, _read_hangul_cell, _empty_bitmap,
|
||||
)
|
||||
import sheet_config as SC
|
||||
|
||||
# PUA range for Hangul jamo variant storage.
|
||||
# We need space for: max_col * max_row variants.
|
||||
# Using 0xF0600-0xF1E7F
|
||||
HANGUL_PUA_BASE = 0xF0600
|
||||
|
||||
|
||||
def _compose_bitmaps(a, b, w, h):
|
||||
"""OR two bitmaps together."""
|
||||
result = []
|
||||
for row in range(h):
|
||||
row_data = []
|
||||
for col in range(w):
|
||||
av = a[row][col] if row < len(a) and col < len(a[row]) else 0
|
||||
bv = b[row][col] if row < len(b) and col < len(b[row]) else 0
|
||||
row_data.append(1 if av or bv else 0)
|
||||
result.append(row_data)
|
||||
return result
|
||||
|
||||
|
||||
def _compose_bitmap_into(target, source, w, h):
|
||||
"""OR source bitmap into target (mutates target)."""
|
||||
for row in range(min(h, len(target), len(source))):
|
||||
for col in range(min(w, len(target[row]), len(source[row]))):
|
||||
if source[row][col]:
|
||||
target[row][col] = 1
|
||||
|
||||
|
||||
def _pua_for_jamo_variant(col, row):
|
||||
"""Get PUA codepoint for a jamo variant at (column, row) in the sheet."""
|
||||
# Encode as base + row * 256 + col (supports up to 256 columns per row)
|
||||
return HANGUL_PUA_BASE + row * 256 + col
|
||||
|
||||
|
||||
def compose_hangul(assets_dir) -> Dict[int, ExtractedGlyph]:
|
||||
"""
|
||||
Compose all Hangul syllables, compatibility jamo, and jamo variants.
|
||||
Returns a dict of codepoint -> ExtractedGlyph.
|
||||
"""
|
||||
get_jamo = get_hangul_jamo_bitmaps(assets_dir)
|
||||
cell_w = SC.W_HANGUL_BASE
|
||||
cell_h = SC.H
|
||||
result = {}
|
||||
|
||||
# Compose Hangul Compatibility Jamo (U+3130-U+318F)
|
||||
for c in range(0x3130, 0x3190):
|
||||
index = c - 0x3130
|
||||
bitmap = get_jamo(index, 0)
|
||||
props = GlyphProps(width=cell_w)
|
||||
result[c] = ExtractedGlyph(c, props, bitmap)
|
||||
|
||||
# Compose 11,172 Hangul syllables (U+AC00-U+D7A3)
|
||||
print(" Composing 11,172 Hangul syllables...")
|
||||
for c in range(0xAC00, 0xD7A4):
|
||||
c_int = c - 0xAC00
|
||||
index_cho = c_int // (SC.JUNG_COUNT * SC.JONG_COUNT)
|
||||
index_jung = c_int // SC.JONG_COUNT % SC.JUNG_COUNT
|
||||
index_jong = c_int % SC.JONG_COUNT # 0 = no jongseong
|
||||
|
||||
# Map to jamo codepoints
|
||||
cho_cp = 0x1100 + index_cho
|
||||
jung_cp = 0x1161 + index_jung
|
||||
jong_cp = 0x11A8 + index_jong - 1 if index_jong > 0 else 0
|
||||
|
||||
# Get sheet indices
|
||||
i_cho = SC.to_hangul_choseong_index(cho_cp)
|
||||
i_jung = SC.to_hangul_jungseong_index(jung_cp)
|
||||
if i_jung is None:
|
||||
i_jung = 0
|
||||
i_jong = 0
|
||||
if jong_cp != 0:
|
||||
idx = SC.to_hangul_jongseong_index(jong_cp)
|
||||
if idx is not None:
|
||||
i_jong = idx
|
||||
|
||||
# Get row positions
|
||||
cho_row = SC.get_han_initial_row(i_cho, i_jung, i_jong)
|
||||
jung_row = SC.get_han_medial_row(i_cho, i_jung, i_jong)
|
||||
jong_row = SC.get_han_final_row(i_cho, i_jung, i_jong)
|
||||
|
||||
# Get jamo bitmaps
|
||||
cho_bitmap = get_jamo(i_cho, cho_row)
|
||||
jung_bitmap = get_jamo(i_jung, jung_row)
|
||||
|
||||
# Compose
|
||||
composed = _compose_bitmaps(cho_bitmap, jung_bitmap, cell_w, cell_h)
|
||||
if index_jong > 0:
|
||||
jong_bitmap = get_jamo(i_jong, jong_row)
|
||||
_compose_bitmap_into(composed, jong_bitmap, cell_w, cell_h)
|
||||
|
||||
# Determine advance width
|
||||
advance_width = cell_w + 1 if i_jung in SC.HANGUL_PEAKS_WITH_EXTRA_WIDTH else cell_w
|
||||
|
||||
props = GlyphProps(width=advance_width)
|
||||
result[c] = ExtractedGlyph(c, props, composed)
|
||||
|
||||
print(f" Hangul syllable composition done: {len(result)} glyphs")
|
||||
|
||||
# Store jamo variant bitmaps in PUA for GSUB assembly
|
||||
print(" Extracting jamo variants for GSUB...")
|
||||
variants = extract_hangul_jamo_variants(assets_dir)
|
||||
variant_count = 0
|
||||
for (col, row), bm in variants.items():
|
||||
pua = _pua_for_jamo_variant(col, row)
|
||||
if pua not in result:
|
||||
# Jungseong (rows 15-16) and jongseong (rows 17-18) overlay the
|
||||
# choseong, so they need zero advance width.
|
||||
w = 0 if 15 <= row <= 18 else cell_w
|
||||
result[pua] = ExtractedGlyph(pua, GlyphProps(width=w), bm)
|
||||
variant_count += 1
|
||||
|
||||
# Ensure jungseong filler PUA variants exist (col=0, rows 15-16).
|
||||
# The filler has an empty bitmap so extract_hangul_jamo_variants skips
|
||||
# it, but the vjmo GSUB lookup needs a PUA target to substitute to.
|
||||
empty_bm = [[0] * cell_w for _ in range(cell_h)]
|
||||
for row in [15, 16]:
|
||||
pua = _pua_for_jamo_variant(0, row)
|
||||
if pua not in result:
|
||||
result[pua] = ExtractedGlyph(pua, GlyphProps(width=0), empty_bm)
|
||||
variant_count += 1
|
||||
|
||||
print(f" Stored {variant_count} jamo variant glyphs in PUA (0x{HANGUL_PUA_BASE:05X}+)")
|
||||
print(f" Total Hangul glyphs: {len(result)}")
|
||||
return result
|
||||
|
||||
|
||||
def get_jamo_gsub_data():
|
||||
"""
|
||||
Generate the data needed for Hangul jamo GSUB lookups.
|
||||
|
||||
Returns a dict with:
|
||||
- 'cho_rows': dict mapping (i_jung, has_jong) -> row for choseong
|
||||
- 'jung_rows': dict mapping has_jong -> row for jungseong
|
||||
- 'jong_rows': dict mapping is_rightie -> row for jongseong
|
||||
- 'pua_fn': function(col, row) -> PUA codepoint
|
||||
|
||||
These are the row-selection rules from the Kotlin code:
|
||||
Choseong row = getHanInitialRow(i_cho, i_jung, i_jong)
|
||||
Jungseong row = 15 if no final, else 16
|
||||
Jongseong row = 17 if jungseong is not rightie, else 18
|
||||
"""
|
||||
return {
|
||||
'pua_fn': _pua_for_jamo_variant,
|
||||
'pua_base': HANGUL_PUA_BASE,
|
||||
}
|
||||
126
OTFbuild/keming_machine.py
Normal file
126
OTFbuild/keming_machine.py
Normal file
@@ -0,0 +1,126 @@
|
||||
"""
|
||||
Generate kerning pairs from shape rules.
|
||||
Ported from TerrarumSansBitmap.kt "The Keming Machine" section.
|
||||
|
||||
6 base rules + 6 mirrored (auto-generated) = 12 rules total.
|
||||
Also includes r+dot special pairs.
|
||||
|
||||
Output kern values scaled by SCALE (50 units/pixel):
|
||||
-1px -> -50 units, -2px -> -100 units
|
||||
"""
|
||||
|
||||
from typing import Dict, Tuple
|
||||
|
||||
from glyph_parser import ExtractedGlyph
|
||||
import sheet_config as SC
|
||||
|
||||
SCALE = SC.SCALE
|
||||
|
||||
|
||||
class _Ing:
|
||||
"""Pattern matcher for kerning shape bits."""
|
||||
|
||||
def __init__(self, s):
|
||||
self.s = s
|
||||
self.care_bits = 0
|
||||
self.rule_bits = 0
|
||||
for index, char in enumerate(s):
|
||||
if char == '@':
|
||||
self.care_bits |= SC.KEMING_BIT_MASK[index]
|
||||
self.rule_bits |= SC.KEMING_BIT_MASK[index]
|
||||
elif char == '`':
|
||||
self.care_bits |= SC.KEMING_BIT_MASK[index]
|
||||
|
||||
def matches(self, shape_bits):
|
||||
return (shape_bits & self.care_bits) == self.rule_bits
|
||||
|
||||
|
||||
class _Kem:
|
||||
def __init__(self, first, second, bb=2, yy=1):
|
||||
self.first = first
|
||||
self.second = second
|
||||
self.bb = bb
|
||||
self.yy = yy
|
||||
|
||||
|
||||
def _build_kerning_rules():
|
||||
"""Build the 12 kerning rules (6 base + 6 mirrored)."""
|
||||
base_rules = [
|
||||
_Kem(_Ing("_`_@___`__"), _Ing("`_`___@___")),
|
||||
_Kem(_Ing("_@_`___`__"), _Ing("`_________")),
|
||||
_Kem(_Ing("_@_@___`__"), _Ing("`___@_@___"), 1, 1),
|
||||
_Kem(_Ing("_@_@_`_`__"), _Ing("`_____@___")),
|
||||
_Kem(_Ing("___`_`____"), _Ing("`___@_`___")),
|
||||
_Kem(_Ing("___`_`____"), _Ing("`_@___`___")),
|
||||
]
|
||||
|
||||
mirrored = []
|
||||
for rule in base_rules:
|
||||
left = rule.first.s
|
||||
right = rule.second.s
|
||||
new_left = []
|
||||
new_right = []
|
||||
for c in range(0, len(left), 2):
|
||||
new_left.append(right[c + 1])
|
||||
new_left.append(right[c])
|
||||
new_right.append(left[c + 1])
|
||||
new_right.append(left[c])
|
||||
mirrored.append(_Kem(
|
||||
_Ing(''.join(new_left)),
|
||||
_Ing(''.join(new_right)),
|
||||
rule.bb, rule.yy
|
||||
))
|
||||
|
||||
return base_rules + mirrored
|
||||
|
||||
|
||||
_KERNING_RULES = _build_kerning_rules()
|
||||
|
||||
|
||||
def generate_kerning_pairs(glyphs: Dict[int, ExtractedGlyph]) -> Dict[Tuple[int, int], int]:
|
||||
"""
|
||||
Generate kerning pairs from all glyphs that have kerning data.
|
||||
Returns dict of (left_codepoint, right_codepoint) -> kern_offset_in_font_units.
|
||||
Negative values = tighter spacing.
|
||||
"""
|
||||
result = {}
|
||||
|
||||
# Collect all codepoints with kerning data
|
||||
kernable = {cp: g for cp, g in glyphs.items() if g.props.has_kern_data}
|
||||
|
||||
if not kernable:
|
||||
print(" [KemingMachine] No glyphs with kern data found")
|
||||
return result
|
||||
|
||||
print(f" [KemingMachine] {len(kernable)} glyphs with kern data")
|
||||
|
||||
# Special rule: lowercase r + dot
|
||||
r_dot_count = 0
|
||||
for r in SC.LOWERCASE_RS:
|
||||
for d in SC.DOTS:
|
||||
if r in glyphs and d in glyphs:
|
||||
result[(r, d)] = -1 * SCALE
|
||||
r_dot_count += 1
|
||||
|
||||
# Apply kerning rules to all pairs
|
||||
kern_codes = list(kernable.keys())
|
||||
pairs_found = 0
|
||||
|
||||
for left_code in kern_codes:
|
||||
left_props = kernable[left_code].props
|
||||
mask_l = left_props.kerning_mask
|
||||
|
||||
for right_code in kern_codes:
|
||||
right_props = kernable[right_code].props
|
||||
mask_r = right_props.kerning_mask
|
||||
|
||||
for rule in _KERNING_RULES:
|
||||
if rule.first.matches(mask_l) and rule.second.matches(mask_r):
|
||||
contraction = rule.yy if (left_props.is_kern_y_type or right_props.is_kern_y_type) else rule.bb
|
||||
if contraction > 0:
|
||||
result[(left_code, right_code)] = -contraction * SCALE
|
||||
pairs_found += 1
|
||||
break # first matching rule wins
|
||||
|
||||
print(f" [KemingMachine] Generated {pairs_found} kerning pairs (+ {r_dot_count} r-dot pairs)")
|
||||
return result
|
||||
2171
OTFbuild/opentype_features.py
Normal file
2171
OTFbuild/opentype_features.py
Normal file
File diff suppressed because it is too large
Load Diff
10
OTFbuild/otf2woff2.py
Executable file
10
OTFbuild/otf2woff2.py
Executable file
@@ -0,0 +1,10 @@
|
||||
#!/usr/bin/env python3
|
||||
"""Convert an OTF/TTF font to WOFF2 format."""
|
||||
import sys
|
||||
from fontTools.ttLib import TTFont
|
||||
|
||||
src, dst = sys.argv[1], sys.argv[2]
|
||||
font = TTFont(src)
|
||||
font.flavor = 'woff2'
|
||||
font.save(dst)
|
||||
print(f" Written {dst}")
|
||||
3
OTFbuild/requirements.txt
Normal file
3
OTFbuild/requirements.txt
Normal file
@@ -0,0 +1,3 @@
|
||||
fonttools>=4.47.0
|
||||
brotli>=1.1.0
|
||||
opentype-sanitizer>=9.2.0
|
||||
543
OTFbuild/sheet_config.py
Normal file
543
OTFbuild/sheet_config.py
Normal file
@@ -0,0 +1,543 @@
|
||||
"""
|
||||
Sheet definitions, code ranges, index functions, and font metric constants.
|
||||
Ported from TerrarumSansBitmap.kt companion object and SheetConfig.kt.
|
||||
"""
|
||||
|
||||
# Font metrics
|
||||
H = 20
|
||||
H_UNIHAN = 16
|
||||
W_HANGUL_BASE = 13
|
||||
W_UNIHAN = 16
|
||||
W_LATIN_WIDE = 9
|
||||
W_VAR_INIT = 15
|
||||
W_WIDEVAR_INIT = 31
|
||||
HGAP_VAR = 1
|
||||
SIZE_CUSTOM_SYM = 20
|
||||
|
||||
H_DIACRITICS = 3
|
||||
H_STACKUP_LOWERCASE_SHIFTDOWN = 4
|
||||
H_OVERLAY_LOWERCASE_SHIFTDOWN = 2
|
||||
|
||||
LINE_HEIGHT = 24
|
||||
|
||||
# OTF metrics (1000 UPM, scale = 50 units/pixel)
|
||||
UNITS_PER_EM = 1000
|
||||
SCALE = 50 # units per pixel
|
||||
ASCENT = 16 * SCALE # 800
|
||||
DESCENT = 4 * SCALE # 200
|
||||
X_HEIGHT = 8 * SCALE # 400
|
||||
CAP_HEIGHT = 12 * SCALE # 600
|
||||
LINE_GAP = (LINE_HEIGHT - H) * SCALE # 200
|
||||
|
||||
# Sheet indices
|
||||
SHEET_ASCII_VARW = 0
|
||||
SHEET_HANGUL = 1
|
||||
SHEET_EXTA_VARW = 2
|
||||
SHEET_EXTB_VARW = 3
|
||||
SHEET_KANA = 4
|
||||
SHEET_CJK_PUNCT = 5
|
||||
SHEET_UNIHAN = 6
|
||||
SHEET_CYRILIC_VARW = 7
|
||||
SHEET_HALFWIDTH_FULLWIDTH_VARW = 8
|
||||
SHEET_UNI_PUNCT_VARW = 9
|
||||
SHEET_GREEK_VARW = 10
|
||||
SHEET_THAI_VARW = 11
|
||||
SHEET_HAYEREN_VARW = 12
|
||||
SHEET_KARTULI_VARW = 13
|
||||
SHEET_IPA_VARW = 14
|
||||
SHEET_RUNIC = 15
|
||||
SHEET_LATIN_EXT_ADD_VARW = 16
|
||||
SHEET_CUSTOM_SYM = 17
|
||||
SHEET_BULGARIAN_VARW = 18
|
||||
SHEET_SERBIAN_VARW = 19
|
||||
SHEET_TSALAGI_VARW = 20
|
||||
SHEET_PHONETIC_EXT_VARW = 21
|
||||
SHEET_DEVANAGARI_VARW = 22
|
||||
SHEET_KARTULI_CAPS_VARW = 23
|
||||
SHEET_DIACRITICAL_MARKS_VARW = 24
|
||||
SHEET_GREEK_POLY_VARW = 25
|
||||
SHEET_EXTC_VARW = 26
|
||||
SHEET_EXTD_VARW = 27
|
||||
SHEET_CURRENCIES_VARW = 28
|
||||
SHEET_INTERNAL_VARW = 29
|
||||
SHEET_LETTERLIKE_MATHS_VARW = 30
|
||||
SHEET_ENCLOSED_ALPHNUM_SUPL_VARW = 31
|
||||
SHEET_TAMIL_VARW = 32
|
||||
SHEET_BENGALI_VARW = 33
|
||||
SHEET_BRAILLE_VARW = 34
|
||||
SHEET_SUNDANESE_VARW = 35
|
||||
SHEET_DEVANAGARI2_INTERNAL_VARW = 36
|
||||
SHEET_CODESTYLE_ASCII_VARW = 37
|
||||
SHEET_ALPHABETIC_PRESENTATION_FORMS = 38
|
||||
SHEET_HENTAIGANA_VARW = 39
|
||||
SHEET_CONTROL_PICTURES_VARW = 40
|
||||
SHEET_LEGACY_COMPUTING_VARW = 41
|
||||
|
||||
SHEET_UNKNOWN = 254
|
||||
|
||||
FILE_LIST = [
|
||||
"ascii_variable.tga",
|
||||
"hangul_johab.tga",
|
||||
"latinExtA_variable.tga",
|
||||
"latinExtB_variable.tga",
|
||||
"kana_variable.tga",
|
||||
"cjkpunct_variable.tga",
|
||||
"wenquanyi.tga",
|
||||
"cyrilic_variable.tga",
|
||||
"halfwidth_fullwidth_variable.tga",
|
||||
"unipunct_variable.tga",
|
||||
"greek_variable.tga",
|
||||
"thai_variable.tga",
|
||||
"hayeren_variable.tga",
|
||||
"kartuli_variable.tga",
|
||||
"ipa_ext_variable.tga",
|
||||
"futhark.tga",
|
||||
"latinExt_additional_variable.tga",
|
||||
"puae000-e0ff.tga",
|
||||
"cyrilic_bulgarian_variable.tga",
|
||||
"cyrilic_serbian_variable.tga",
|
||||
"tsalagi_variable.tga",
|
||||
"phonetic_extensions_variable.tga",
|
||||
"devanagari_variable.tga",
|
||||
"kartuli_allcaps_variable.tga",
|
||||
"diacritical_marks_variable.tga",
|
||||
"greek_polytonic_xyswap_variable.tga",
|
||||
"latinExtC_variable.tga",
|
||||
"latinExtD_variable.tga",
|
||||
"currencies_variable.tga",
|
||||
"internal_variable.tga",
|
||||
"letterlike_symbols_variable.tga",
|
||||
"enclosed_alphanumeric_supplement_variable.tga",
|
||||
"tamil_extrawide_variable.tga",
|
||||
"bengali_variable.tga",
|
||||
"braille_variable.tga",
|
||||
"sundanese_variable.tga",
|
||||
"devanagari_internal_extrawide_variable.tga",
|
||||
"pua_codestyle_ascii_variable.tga",
|
||||
"alphabetic_presentation_forms_extrawide_variable.tga",
|
||||
"hentaigana_variable.tga",
|
||||
"control_pictures_variable.tga",
|
||||
"symbols_for_legacy_computing_variable.tga",
|
||||
]
|
||||
|
||||
CODE_RANGE = [
|
||||
list(range(0x00, 0x100)), # 0: ASCII
|
||||
list(range(0x1100, 0x1200)) + list(range(0xA960, 0xA980)) + list(range(0xD7B0, 0xD800)), # 1: Hangul Jamo
|
||||
list(range(0x100, 0x180)), # 2: Latin Ext A
|
||||
list(range(0x180, 0x250)), # 3: Latin Ext B
|
||||
list(range(0x3040, 0x3100)) + list(range(0x31F0, 0x3200)), # 4: Kana
|
||||
list(range(0x3000, 0x3040)), # 5: CJK Punct
|
||||
list(range(0x3400, 0xA000)), # 6: Unihan
|
||||
list(range(0x400, 0x530)), # 7: Cyrillic
|
||||
list(range(0xFF00, 0x10000)), # 8: Halfwidth/Fullwidth
|
||||
list(range(0x2000, 0x20A0)), # 9: Uni Punct
|
||||
list(range(0x370, 0x3CF)), # 10: Greek
|
||||
list(range(0xE00, 0xE60)), # 11: Thai
|
||||
list(range(0x530, 0x590)), # 12: Armenian
|
||||
list(range(0x10D0, 0x1100)), # 13: Georgian
|
||||
list(range(0x250, 0x300)), # 14: IPA
|
||||
list(range(0x16A0, 0x1700)), # 15: Runic
|
||||
list(range(0x1E00, 0x1F00)), # 16: Latin Ext Additional
|
||||
list(range(0xE000, 0xE100)), # 17: Custom Sym (PUA)
|
||||
list(range(0xF0000, 0xF0060)), # 18: Bulgarian
|
||||
list(range(0xF0060, 0xF00C0)), # 19: Serbian
|
||||
list(range(0x13A0, 0x13F6)), # 20: Cherokee
|
||||
list(range(0x1D00, 0x1DC0)), # 21: Phonetic Ext
|
||||
list(range(0x900, 0x980)) + list(range(0xF0100, 0xF0500)), # 22: Devanagari
|
||||
list(range(0x1C90, 0x1CC0)), # 23: Georgian Caps
|
||||
list(range(0x300, 0x370)), # 24: Diacritical Marks
|
||||
list(range(0x1F00, 0x2000)), # 25: Greek Polytonic
|
||||
list(range(0x2C60, 0x2C80)), # 26: Latin Ext C
|
||||
list(range(0xA720, 0xA800)), # 27: Latin Ext D
|
||||
list(range(0x20A0, 0x20D0)), # 28: Currencies
|
||||
list(range(0xFFE00, 0xFFFA0)), # 29: Internal
|
||||
list(range(0x2100, 0x2150)), # 30: Letterlike
|
||||
list(range(0x1F100, 0x1F200)), # 31: Enclosed Alphanum Supl
|
||||
list(range(0x0B80, 0x0C00)) + list(range(0xF00C0, 0xF0100)), # 32: Tamil
|
||||
list(range(0x980, 0xA00)), # 33: Bengali
|
||||
list(range(0x2800, 0x2900)), # 34: Braille
|
||||
list(range(0x1B80, 0x1BC0)) + list(range(0x1CC0, 0x1CD0)) + list(range(0xF0500, 0xF0510)), # 35: Sundanese
|
||||
list(range(0xF0110, 0xF0130)), # 36: Devanagari2 Internal
|
||||
list(range(0xF0520, 0xF0580)), # 37: Codestyle ASCII
|
||||
list(range(0xFB00, 0xFB18)), # 38: Alphabetic Presentation
|
||||
list(range(0x1B000, 0x1B170)), # 39: Hentaigana
|
||||
list(range(0x2400, 0x2440)), # 40: Control Pictures
|
||||
list(range(0x1FB00, 0x1FC00)), # 41: Legacy Computing
|
||||
]
|
||||
|
||||
CODE_RANGE_HANGUL_COMPAT = range(0x3130, 0x3190)
|
||||
|
||||
ALT_CHARSET_CODEPOINT_OFFSETS = [
|
||||
0,
|
||||
0xF0000 - 0x400, # Bulgarian
|
||||
0xF0060 - 0x400, # Serbian
|
||||
0xF0520 - 0x20, # Codestyle
|
||||
]
|
||||
|
||||
ALT_CHARSET_CODEPOINT_DOMAINS = [
|
||||
range(0, 0x10FFFF + 1),
|
||||
range(0x400, 0x460),
|
||||
range(0x400, 0x460),
|
||||
range(0x20, 0x80),
|
||||
]
|
||||
|
||||
# Unicode spacing characters
|
||||
NQSP = 0x2000
|
||||
MQSP = 0x2001
|
||||
ENSP = 0x2002
|
||||
EMSP = 0x2003
|
||||
THREE_PER_EMSP = 0x2004
|
||||
QUARTER_EMSP = 0x2005
|
||||
SIX_PER_EMSP = 0x2006
|
||||
FSP = 0x2007
|
||||
PSP = 0x2008
|
||||
THSP = 0x2009
|
||||
HSP = 0x200A
|
||||
ZWSP = 0x200B
|
||||
ZWNJ = 0x200C
|
||||
ZWJ = 0x200D
|
||||
SHY = 0xAD
|
||||
NBSP = 0xA0
|
||||
OBJ = 0xFFFC
|
||||
|
||||
FIXED_BLOCK_1 = 0xFFFD0
|
||||
MOVABLE_BLOCK_M1 = 0xFFFE0
|
||||
MOVABLE_BLOCK_1 = 0xFFFF0
|
||||
|
||||
CHARSET_OVERRIDE_DEFAULT = 0xFFFC0
|
||||
CHARSET_OVERRIDE_BG_BG = 0xFFFC1
|
||||
CHARSET_OVERRIDE_SR_SR = 0xFFFC2
|
||||
CHARSET_OVERRIDE_CODESTYLE = 0xFFFC3
|
||||
|
||||
# Alignment constants
|
||||
ALIGN_LEFT = 0
|
||||
ALIGN_RIGHT = 1
|
||||
ALIGN_CENTRE = 2
|
||||
ALIGN_BEFORE = 3
|
||||
|
||||
# Stack constants
|
||||
STACK_UP = 0
|
||||
STACK_DOWN = 1
|
||||
STACK_BEFORE_N_AFTER = 2
|
||||
STACK_UP_N_DOWN = 3
|
||||
STACK_DONT = 4
|
||||
|
||||
|
||||
def is_variable(filename):
|
||||
return filename.endswith("_variable.tga")
|
||||
|
||||
|
||||
def is_xy_swapped(filename):
|
||||
return "xyswap" in filename.lower()
|
||||
|
||||
|
||||
def is_extra_wide(filename):
|
||||
return "extrawide" in filename.lower()
|
||||
|
||||
|
||||
def get_cell_width(sheet_index):
|
||||
"""Returns the cell pitch in the sprite sheet (includes HGAP_VAR for variable sheets)."""
|
||||
fn = FILE_LIST[sheet_index]
|
||||
if is_extra_wide(fn):
|
||||
return W_WIDEVAR_INIT + HGAP_VAR # 32
|
||||
if is_variable(fn):
|
||||
return W_VAR_INIT + HGAP_VAR # 16
|
||||
if sheet_index == SHEET_UNIHAN:
|
||||
return W_UNIHAN
|
||||
if sheet_index == SHEET_HANGUL:
|
||||
return W_HANGUL_BASE
|
||||
if sheet_index == SHEET_CUSTOM_SYM:
|
||||
return SIZE_CUSTOM_SYM
|
||||
if sheet_index == SHEET_RUNIC:
|
||||
return W_LATIN_WIDE
|
||||
return W_VAR_INIT + HGAP_VAR
|
||||
|
||||
|
||||
def get_cell_height(sheet_index):
|
||||
if sheet_index == SHEET_UNIHAN:
|
||||
return H_UNIHAN
|
||||
if sheet_index == SHEET_CUSTOM_SYM:
|
||||
return SIZE_CUSTOM_SYM
|
||||
return H
|
||||
|
||||
|
||||
def get_columns(sheet_index):
|
||||
if sheet_index == SHEET_UNIHAN:
|
||||
return 256
|
||||
return 16
|
||||
|
||||
|
||||
# Hangul constants
|
||||
JUNG_COUNT = 21
|
||||
JONG_COUNT = 28
|
||||
|
||||
# Hangul shape arrays (sorted sets)
|
||||
JUNGSEONG_I = frozenset([21, 61])
|
||||
JUNGSEONG_OU = frozenset([9, 13, 14, 18, 34, 35, 39, 45, 51, 53, 54, 64, 73, 80, 83])
|
||||
JUNGSEONG_OU_COMPLEX = frozenset(
|
||||
[10, 11, 16] + list(range(22, 34)) + [36, 37, 38] + list(range(41, 45)) +
|
||||
list(range(46, 51)) + list(range(56, 60)) + [63] + list(range(67, 73)) +
|
||||
list(range(74, 80)) + list(range(81, 84)) + list(range(85, 92)) + [93, 94]
|
||||
)
|
||||
JUNGSEONG_RIGHTIE = frozenset([2, 4, 6, 8, 11, 16, 32, 33, 37, 42, 44, 48, 50, 71, 72, 75, 78, 79, 83, 86, 87, 88, 94])
|
||||
JUNGSEONG_OEWI = frozenset([12, 15, 17, 40, 52, 55, 89, 90, 91])
|
||||
JUNGSEONG_EU = frozenset([19, 62, 66])
|
||||
JUNGSEONG_YI = frozenset([20, 60, 65])
|
||||
JUNGSEONG_UU = frozenset([14, 15, 16, 17, 18, 27, 30, 41, 42, 43, 44, 45, 46, 47, 48, 49, 50, 51, 52, 53, 54, 55, 59, 67, 68, 73, 77, 78, 79, 80, 81, 82, 83, 84, 91])
|
||||
JUNGSEONG_WIDE = frozenset(list(JUNGSEONG_OU) + list(JUNGSEONG_EU))
|
||||
CHOSEONG_GIYEOKS = frozenset([0, 1, 15, 23, 30, 34, 45, 51, 56, 65, 82, 90, 100, 101, 110, 111, 115])
|
||||
HANGUL_PEAKS_WITH_EXTRA_WIDTH = frozenset([2, 4, 6, 8, 11, 16, 32, 33, 37, 42, 44, 48, 50, 71, 75, 78, 79, 83, 86, 87, 88, 94])
|
||||
|
||||
GIYEOK_REMAPPING = {5: 19, 6: 20, 7: 21, 8: 22, 11: 23, 12: 24}
|
||||
|
||||
|
||||
def is_hangul_choseong(c):
|
||||
return 0x1100 <= c <= 0x115F or 0xA960 <= c <= 0xA97F
|
||||
|
||||
|
||||
def is_hangul_jungseong(c):
|
||||
return 0x1160 <= c <= 0x11A7 or 0xD7B0 <= c <= 0xD7C6
|
||||
|
||||
|
||||
def is_hangul_jongseong(c):
|
||||
return 0x11A8 <= c <= 0x11FF or 0xD7CB <= c <= 0xD7FB
|
||||
|
||||
|
||||
def is_hangul_compat(c):
|
||||
return 0x3130 <= c <= 0x318F
|
||||
|
||||
|
||||
def to_hangul_choseong_index(c):
|
||||
if 0x1100 <= c <= 0x115F:
|
||||
return c - 0x1100
|
||||
if 0xA960 <= c <= 0xA97F:
|
||||
return c - 0xA960 + 96
|
||||
raise ValueError(f"Not a choseong: U+{c:04X}")
|
||||
|
||||
|
||||
def to_hangul_jungseong_index(c):
|
||||
if 0x1160 <= c <= 0x11A7:
|
||||
return c - 0x1160
|
||||
if 0xD7B0 <= c <= 0xD7C6:
|
||||
return c - 0xD7B0 + 72
|
||||
return None
|
||||
|
||||
|
||||
def to_hangul_jongseong_index(c):
|
||||
if 0x11A8 <= c <= 0x11FF:
|
||||
return c - 0x11A8 + 1
|
||||
if 0xD7CB <= c <= 0xD7FB:
|
||||
return c - 0xD7CB + 88 + 1
|
||||
return None
|
||||
|
||||
|
||||
def get_han_initial_row(i, p, f):
|
||||
if p in JUNGSEONG_I:
|
||||
ret = 3
|
||||
elif p in JUNGSEONG_OEWI:
|
||||
ret = 11
|
||||
elif p in JUNGSEONG_OU_COMPLEX:
|
||||
ret = 7
|
||||
elif p in JUNGSEONG_OU:
|
||||
ret = 5
|
||||
elif p in JUNGSEONG_EU:
|
||||
ret = 9
|
||||
elif p in JUNGSEONG_YI:
|
||||
ret = 13
|
||||
else:
|
||||
ret = 1
|
||||
|
||||
if f != 0:
|
||||
ret += 1
|
||||
|
||||
if p in JUNGSEONG_UU and i in CHOSEONG_GIYEOKS:
|
||||
mapped = GIYEOK_REMAPPING.get(ret)
|
||||
if mapped is None:
|
||||
raise ValueError(f"Giyeok remapping failed: i={i} p={p} f={f} ret={ret}")
|
||||
return mapped
|
||||
return ret
|
||||
|
||||
|
||||
def get_han_medial_row(i, p, f):
|
||||
return 15 if f == 0 else 16
|
||||
|
||||
|
||||
def get_han_final_row(i, p, f):
|
||||
return 17 if p not in JUNGSEONG_RIGHTIE else 18
|
||||
|
||||
|
||||
# Kerning constants
|
||||
KEMING_BIT_MASK = [1 << b for b in [7, 6, 5, 4, 3, 2, 1, 0, 15, 14]]
|
||||
|
||||
# Special characters for r+dot kerning
|
||||
LOWERCASE_RS = frozenset([0x72, 0x155, 0x157, 0x159, 0x211, 0x213, 0x27c, 0x1e59, 0x1e58, 0x1e5f])
|
||||
DOTS = frozenset([0x2c, 0x2e])
|
||||
|
||||
# Devanagari internal encoding
|
||||
DEVANAGARI_UNICODE_NUQTA_TABLE = [0xF0170, 0xF0171, 0xF0172, 0xF0177, 0xF017C, 0xF017D, 0xF0186, 0xF018A]
|
||||
|
||||
|
||||
def to_deva_internal(c):
|
||||
if 0x0915 <= c <= 0x0939:
|
||||
return c - 0x0915 + 0xF0140
|
||||
if 0x0958 <= c <= 0x095F:
|
||||
return DEVANAGARI_UNICODE_NUQTA_TABLE[c - 0x0958]
|
||||
raise ValueError(f"No internal form for U+{c:04X}")
|
||||
|
||||
|
||||
DEVANAGARI_CONSONANTS = frozenset(
|
||||
list(range(0x0915, 0x093A)) + list(range(0x0958, 0x0960)) +
|
||||
list(range(0x0978, 0x0980)) + list(range(0xF0140, 0xF0500)) +
|
||||
list(range(0xF0106, 0xF010A))
|
||||
)
|
||||
|
||||
# Sundanese internal forms
|
||||
SUNDANESE_ING = 0xF0500
|
||||
SUNDANESE_ENG = 0xF0501
|
||||
SUNDANESE_EUNG = 0xF0502
|
||||
SUNDANESE_IR = 0xF0503
|
||||
SUNDANESE_ER = 0xF0504
|
||||
SUNDANESE_EUR = 0xF0505
|
||||
SUNDANESE_LU = 0xF0506
|
||||
|
||||
# Tamil constants
|
||||
TAMIL_KSSA = 0xF00ED
|
||||
TAMIL_SHRII = 0xF00EE
|
||||
TAMIL_I = 0xBBF
|
||||
TAMIL_LIGATING_CONSONANTS = [
|
||||
0x0B95, 0x0B99, 0x0B9A, 0x0B9E, 0x0B9F, 0x0BA3, 0x0BA4, 0x0BA8,
|
||||
0x0BA9, 0x0BAA, 0x0BAE, 0x0BAF, 0x0BB0, 0x0BB1, 0x0BB2, 0x0BB3,
|
||||
0x0BB4, 0x0BB5,
|
||||
]
|
||||
|
||||
# Devanagari special codepoints
|
||||
DEVANAGARI_VIRAMA = 0x94D
|
||||
DEVANAGARI_NUQTA = 0x93C
|
||||
DEVANAGARI_RA = to_deva_internal(0x930)
|
||||
DEVANAGARI_YA = to_deva_internal(0x92F)
|
||||
DEVANAGARI_RRA = to_deva_internal(0x931)
|
||||
DEVANAGARI_VA = to_deva_internal(0x935)
|
||||
DEVANAGARI_HA = to_deva_internal(0x939)
|
||||
DEVANAGARI_U = 0x941
|
||||
DEVANAGARI_UU = 0x942
|
||||
DEVANAGARI_I_VOWEL = 0x093F
|
||||
DEVANAGARI_II_VOWEL = 0x0940
|
||||
DEVANAGARI_RYA = 0xF0106
|
||||
DEVANAGARI_HALF_RYA = 0xF0107
|
||||
DEVANAGARI_OPEN_YA = 0xF0108
|
||||
DEVANAGARI_OPEN_HALF_YA = 0xF0109
|
||||
DEVANAGARI_ALT_HALF_SHA = 0xF010F
|
||||
DEVANAGARI_RA_SUB = 0xF010A # below-base RA (rakaar); transient glyph for blwf/cjct
|
||||
DEVANAGARI_EYELASH_RA = 0xF010B
|
||||
DEVANAGARI_RA_SUPER = 0xF010C
|
||||
DEVANAGARI_RA_SUPER_COMPLEX = 0xF010D
|
||||
MARWARI_DD = 0x978
|
||||
MARWARI_LIG_DD_R = 0xF010E
|
||||
|
||||
DEVANAGARI_SYLL_RU = 0xF0100
|
||||
DEVANAGARI_SYLL_RUU = 0xF0101
|
||||
DEVANAGARI_SYLL_RRU = 0xF0102
|
||||
DEVANAGARI_SYLL_RRUU = 0xF0103
|
||||
DEVANAGARI_SYLL_HU = 0xF0104
|
||||
DEVANAGARI_SYLL_HUU = 0xF0105
|
||||
|
||||
# Devanagari ligature codepoints
|
||||
DEVANAGARI_LIG_K_T = 0xF01BC
|
||||
DEVANAGARI_LIG_K_SS = 0xF01A1
|
||||
DEVANAGARI_LIG_J_NY = 0xF01A2
|
||||
DEVANAGARI_LIG_T_T = 0xF01A3
|
||||
DEVANAGARI_LIG_N_T = 0xF01A4
|
||||
DEVANAGARI_LIG_N_N = 0xF01A5
|
||||
DEVANAGARI_LIG_S_V = 0xF01A6
|
||||
DEVANAGARI_LIG_SS_P = 0xF01A7
|
||||
DEVANAGARI_LIG_SH_C = 0xF01A8
|
||||
DEVANAGARI_LIG_SH_N = 0xF01A9
|
||||
DEVANAGARI_LIG_SH_V = 0xF01AA
|
||||
DEVANAGARI_LIG_J_Y = 0xF01AB
|
||||
DEVANAGARI_LIG_J_J_Y = 0xF01AC
|
||||
|
||||
MARWARI_LIG_DD_DD = 0xF01BA
|
||||
MARWARI_LIG_DD_DDH = 0xF01BB
|
||||
DEVANAGARI_ANUSVARA_UPPER = 0xF016C
|
||||
MARWARI_LIG_DD_Y = 0xF016E
|
||||
MARWARI_HALFLIG_DD_Y = 0xF016F
|
||||
|
||||
# Devanagari range sets for feature generation
|
||||
DEVANAGARI_PRESENTATION_CONSONANTS = range(0xF0140, 0xF0230)
|
||||
DEVANAGARI_PRESENTATION_CONSONANTS_HALF = range(0xF0230, 0xF0320)
|
||||
DEVANAGARI_PRESENTATION_CONSONANTS_WITH_RA = range(0xF0320, 0xF0410)
|
||||
DEVANAGARI_PRESENTATION_CONSONANTS_WITH_RA_HALF = range(0xF0410, 0xF0500)
|
||||
|
||||
# Index functions
|
||||
def _kana_index_y(c):
|
||||
return 12 if 0x31F0 <= c <= 0x31FF else (c - 0x3040) // 16
|
||||
|
||||
def _unihan_index_y(c):
|
||||
return (c - 0x3400) // 256
|
||||
|
||||
def _devanagari_index_y(c):
|
||||
return ((c - 0x0900) if c < 0xF0000 else (c - 0xF0080)) // 16
|
||||
|
||||
def _tamil_index_y(c):
|
||||
return ((c - 0x0B80) if c < 0xF0000 else (c - 0xF0040)) // 16
|
||||
|
||||
def _sundanese_index_y(c):
|
||||
if c >= 0xF0500:
|
||||
return (c - 0xF04B0) // 16
|
||||
if c < 0x1BC0:
|
||||
return (c - 0x1B80) // 16
|
||||
return (c - 0x1C80) // 16
|
||||
|
||||
|
||||
def index_x(c):
|
||||
return c % 16
|
||||
|
||||
def unihan_index_x(c):
|
||||
return (c - 0x3400) % 256
|
||||
|
||||
def index_y(sheet_index, c):
|
||||
"""Y-index (row) for codepoint c in the given sheet."""
|
||||
return {
|
||||
SHEET_ASCII_VARW: lambda: c // 16,
|
||||
SHEET_UNIHAN: lambda: _unihan_index_y(c),
|
||||
SHEET_EXTA_VARW: lambda: (c - 0x100) // 16,
|
||||
SHEET_EXTB_VARW: lambda: (c - 0x180) // 16,
|
||||
SHEET_KANA: lambda: _kana_index_y(c),
|
||||
SHEET_CJK_PUNCT: lambda: (c - 0x3000) // 16,
|
||||
SHEET_CYRILIC_VARW: lambda: (c - 0x400) // 16,
|
||||
SHEET_HALFWIDTH_FULLWIDTH_VARW: lambda: (c - 0xFF00) // 16,
|
||||
SHEET_UNI_PUNCT_VARW: lambda: (c - 0x2000) // 16,
|
||||
SHEET_GREEK_VARW: lambda: (c - 0x370) // 16,
|
||||
SHEET_THAI_VARW: lambda: (c - 0xE00) // 16,
|
||||
SHEET_CUSTOM_SYM: lambda: (c - 0xE000) // 16,
|
||||
SHEET_HAYEREN_VARW: lambda: (c - 0x530) // 16,
|
||||
SHEET_KARTULI_VARW: lambda: (c - 0x10D0) // 16,
|
||||
SHEET_IPA_VARW: lambda: (c - 0x250) // 16,
|
||||
SHEET_RUNIC: lambda: (c - 0x16A0) // 16,
|
||||
SHEET_LATIN_EXT_ADD_VARW: lambda: (c - 0x1E00) // 16,
|
||||
SHEET_BULGARIAN_VARW: lambda: (c - 0xF0000) // 16,
|
||||
SHEET_SERBIAN_VARW: lambda: (c - 0xF0060) // 16,
|
||||
SHEET_TSALAGI_VARW: lambda: (c - 0x13A0) // 16,
|
||||
SHEET_PHONETIC_EXT_VARW: lambda: (c - 0x1D00) // 16,
|
||||
SHEET_DEVANAGARI_VARW: lambda: _devanagari_index_y(c),
|
||||
SHEET_KARTULI_CAPS_VARW: lambda: (c - 0x1C90) // 16,
|
||||
SHEET_DIACRITICAL_MARKS_VARW: lambda: (c - 0x300) // 16,
|
||||
SHEET_GREEK_POLY_VARW: lambda: (c - 0x1F00) // 16,
|
||||
SHEET_EXTC_VARW: lambda: (c - 0x2C60) // 16,
|
||||
SHEET_EXTD_VARW: lambda: (c - 0xA720) // 16,
|
||||
SHEET_CURRENCIES_VARW: lambda: (c - 0x20A0) // 16,
|
||||
SHEET_INTERNAL_VARW: lambda: (c - 0xFFE00) // 16,
|
||||
SHEET_LETTERLIKE_MATHS_VARW: lambda: (c - 0x2100) // 16,
|
||||
SHEET_ENCLOSED_ALPHNUM_SUPL_VARW: lambda: (c - 0x1F100) // 16,
|
||||
SHEET_TAMIL_VARW: lambda: _tamil_index_y(c),
|
||||
SHEET_BENGALI_VARW: lambda: (c - 0x980) // 16,
|
||||
SHEET_BRAILLE_VARW: lambda: (c - 0x2800) // 16,
|
||||
SHEET_SUNDANESE_VARW: lambda: _sundanese_index_y(c),
|
||||
SHEET_DEVANAGARI2_INTERNAL_VARW: lambda: (c - 0xF0110) // 16,
|
||||
SHEET_CODESTYLE_ASCII_VARW: lambda: (c - 0xF0520) // 16,
|
||||
SHEET_ALPHABETIC_PRESENTATION_FORMS: lambda: (c - 0xFB00) // 16,
|
||||
SHEET_HENTAIGANA_VARW: lambda: (c - 0x1B000) // 16,
|
||||
SHEET_CONTROL_PICTURES_VARW: lambda: (c - 0x2400) // 16,
|
||||
SHEET_LEGACY_COMPUTING_VARW: lambda: (c - 0x1FB00) // 16,
|
||||
SHEET_HANGUL: lambda: 0,
|
||||
}.get(sheet_index, lambda: c // 16)()
|
||||
90
OTFbuild/tga_reader.py
Normal file
90
OTFbuild/tga_reader.py
Normal file
@@ -0,0 +1,90 @@
|
||||
"""
|
||||
TGA reader for uncompressed true-colour images (Type 2).
|
||||
Stores pixels as RGBA8888: (R<<24 | G<<16 | B<<8 | A).
|
||||
|
||||
Matches the convention in TerrarumSansBitmap.kt where .and(255) checks
|
||||
the alpha channel (lowest byte).
|
||||
"""
|
||||
|
||||
import struct
|
||||
from typing import List
|
||||
|
||||
|
||||
class TgaImage:
|
||||
__slots__ = ('width', 'height', 'pixels')
|
||||
|
||||
def __init__(self, width: int, height: int, pixels: List[int]):
|
||||
self.width = width
|
||||
self.height = height
|
||||
self.pixels = pixels # flat array, row-major
|
||||
|
||||
def get_pixel(self, x: int, y: int) -> int:
|
||||
"""Get pixel at (x, y) as RGBA8888 (R in bits 31-24, A in bits 7-0)."""
|
||||
if x < 0 or x >= self.width or y < 0 or y >= self.height:
|
||||
return 0
|
||||
return self.pixels[y * self.width + x]
|
||||
|
||||
|
||||
def read_tga(path: str) -> TgaImage:
|
||||
"""Read an uncompressed true-colour TGA file."""
|
||||
with open(path, 'rb') as f:
|
||||
data = f.read()
|
||||
|
||||
pos = 0
|
||||
|
||||
def u8():
|
||||
nonlocal pos
|
||||
val = data[pos]
|
||||
pos += 1
|
||||
return val
|
||||
|
||||
def u16():
|
||||
nonlocal pos
|
||||
val = struct.unpack_from('<H', data, pos)[0]
|
||||
pos += 2
|
||||
return val
|
||||
|
||||
id_length = u8()
|
||||
colour_map_type = u8()
|
||||
image_type = u8()
|
||||
|
||||
# colour map spec (5 bytes)
|
||||
u16(); u16(); u8()
|
||||
|
||||
# image spec
|
||||
x_origin = u16()
|
||||
y_origin = u16()
|
||||
width = u16()
|
||||
height = u16()
|
||||
bits_per_pixel = u8()
|
||||
descriptor = u8()
|
||||
|
||||
top_to_bottom = (descriptor & 0x20) != 0
|
||||
bytes_per_pixel = bits_per_pixel // 8
|
||||
|
||||
# skip ID
|
||||
pos += id_length
|
||||
|
||||
if colour_map_type != 0:
|
||||
raise ValueError("Colour-mapped TGA not supported")
|
||||
if image_type != 2:
|
||||
raise ValueError(f"Only uncompressed true-colour TGA supported (type 2), got type {image_type}")
|
||||
if bytes_per_pixel not in (3, 4):
|
||||
raise ValueError(f"Only 24-bit or 32-bit TGA supported, got {bits_per_pixel}-bit")
|
||||
|
||||
pixels = [0] * (width * height)
|
||||
|
||||
for row in range(height):
|
||||
y = row if top_to_bottom else (height - 1 - row)
|
||||
for x in range(width):
|
||||
b = data[pos]; pos += 1
|
||||
g = data[pos]; pos += 1
|
||||
r = data[pos]; pos += 1
|
||||
a = data[pos] if bytes_per_pixel == 4 else 0xFF
|
||||
if bytes_per_pixel == 4:
|
||||
pos += 1
|
||||
|
||||
# Store as RGBA8888: R in high byte, A in low byte
|
||||
pixels[y * width + x] = (r << 24) | (g << 16) | (b << 8) | a
|
||||
|
||||
return TgaImage(width, height, pixels)
|
||||
Binary file not shown.
Binary file not shown.
BIN
assets/ascii_variable.tga
LFS
BIN
assets/ascii_variable.tga
LFS
Binary file not shown.
BIN
assets/currencies_variable.tga
LFS
BIN
assets/currencies_variable.tga
LFS
Binary file not shown.
Binary file not shown.
BIN
assets/cyrilic_variable.tga
LFS
BIN
assets/cyrilic_variable.tga
LFS
Binary file not shown.
BIN
assets/devanagari_variable.tga
LFS
BIN
assets/devanagari_variable.tga
LFS
Binary file not shown.
Binary file not shown.
Binary file not shown.
BIN
assets/hangul_johab.tga
LFS
BIN
assets/hangul_johab.tga
LFS
Binary file not shown.
BIN
assets/hayeren_variable.tga
LFS
BIN
assets/hayeren_variable.tga
LFS
Binary file not shown.
BIN
assets/hentaigana_variable.tga
LFS
BIN
assets/hentaigana_variable.tga
LFS
Binary file not shown.
BIN
assets/latinExtA_variable.tga
LFS
BIN
assets/latinExtA_variable.tga
LFS
Binary file not shown.
BIN
assets/latinExtB_variable.tga
LFS
BIN
assets/latinExtB_variable.tga
LFS
Binary file not shown.
BIN
assets/latinExtD_variable.tga
LFS
BIN
assets/latinExtD_variable.tga
LFS
Binary file not shown.
BIN
assets/puae000-e0ff.tga
LFS
BIN
assets/puae000-e0ff.tga
LFS
Binary file not shown.
Binary file not shown.
Binary file not shown.
|
Before Width: | Height: | Size: 320 KiB |
Binary file not shown.
|
Before Width: | Height: | Size: 320 KiB |
Binary file not shown.
BIN
demo.PNG
BIN
demo.PNG
Binary file not shown.
|
Before Width: | Height: | Size: 167 KiB After Width: | Height: | Size: 168 KiB |
@@ -112,12 +112,13 @@ How multilingual? Real multilingual!
|
||||
⁃ Latin Extended-A/B/C/D
|
||||
⁃ Armenian
|
||||
⁃ Bengaliᶠⁱ
|
||||
⁃ Braile Patterns
|
||||
⁃ Braille Patterns
|
||||
⁃ Cherokee⁷
|
||||
⁃ CJK Symbols and Punctuation
|
||||
⁃ CJK Unified Ideographs⁶
|
||||
⁃ CJK Unified Ideographs Extension A¹²·¹
|
||||
⁃ Combining Diacritical Marks
|
||||
⁃ Control Pictures
|
||||
⁃ Currency Symbols
|
||||
⁃ Cyrillicᴭ
|
||||
⁃ Cyrillic Supplementᴭ
|
||||
@@ -148,6 +149,7 @@ How multilingual? Real multilingual!
|
||||
⁃ Sundanese
|
||||
⁃ Sundanese Supplement
|
||||
⁃ Superscripts and Subscripts
|
||||
⁃ Symbols for Legacy Computing
|
||||
⁃ Tamil
|
||||
⁃ Thai
|
||||
|
||||
|
||||
550
keming_calculator.html
Normal file
550
keming_calculator.html
Normal file
@@ -0,0 +1,550 @@
|
||||
<!DOCTYPE html>
|
||||
<html lang="en">
|
||||
<head>
|
||||
<meta charset="UTF-8">
|
||||
<title>Keming Machine Tag Calculator</title>
|
||||
<style>
|
||||
* { box-sizing: border-box; margin: 0; padding: 0; }
|
||||
body { font-family: 'Segoe UI', system-ui, sans-serif; background: #f5f5f5; color: #222; padding: 24px; min-height: 100vh; }
|
||||
h1 { font-size: 1.4em; margin-bottom: 4px; color: #111; }
|
||||
.subtitle { color: #666; font-size: 0.85em; margin-bottom: 20px; }
|
||||
|
||||
.main { display: flex; gap: 24px; flex-wrap: wrap; align-items: flex-start; }
|
||||
.panel { background: #fff; border-radius: 8px; padding: 20px; border: 1px solid #ddd; }
|
||||
.panel h2 { font-size: 1em; margin-bottom: 12px; color: #1a5fb4; }
|
||||
.col-left { display: flex; flex-direction: column; gap: 20px; }
|
||||
|
||||
/* Lowheight toggle */
|
||||
.lowheight-section { min-width: 300px; }
|
||||
.lowheight-row { display: flex; align-items: center; gap: 12px; }
|
||||
.lowheight-btn {
|
||||
width: 140px; height: 36px; border: 2px solid #bbb; border-radius: 4px;
|
||||
background: #f0f0f0; color: #666; font-weight: bold; font-size: 0.9em;
|
||||
cursor: pointer; transition: all 0.15s; user-select: none;
|
||||
}
|
||||
.lowheight-btn:hover { border-color: #888; background: #e8e8e8; }
|
||||
.lowheight-btn.active { background: #2a5a8a; border-color: #1a4a7a; color: #fff; }
|
||||
.lowheight-hint { font-size: 0.8em; color: #888; margin-top: 8px; }
|
||||
|
||||
/* Shape grid */
|
||||
.shape-section { min-width: 300px; }
|
||||
.grid-wrapper { display: flex; gap: 20px; align-items: flex-start; }
|
||||
.shape-grid { display: grid; grid-template-columns: auto 56px 10px 56px auto; grid-template-rows: repeat(9, auto); align-items: center; gap: 2px 0; }
|
||||
.zone-btn {
|
||||
width: 52px; height: 32px; border: 2px solid #bbb; border-radius: 4px;
|
||||
background: #f0f0f0; color: #666; font-weight: bold; font-size: 0.9em;
|
||||
cursor: pointer; transition: all 0.15s; display: flex; align-items: center; justify-content: center;
|
||||
user-select: none;
|
||||
}
|
||||
.zone-btn:hover { border-color: #888; background: #e8e8e8; }
|
||||
.zone-btn.active { background: #2a5a8a; border-color: #1a4a7a; color: #fff; }
|
||||
.zone-btn.active.wye { background: #7b3f9e; border-color: #5a2d75; }
|
||||
.grid-label { color: #888; font-size: 0.75em; text-align: center; padding: 0 4px; white-space: nowrap; }
|
||||
.grid-label-left { text-align: right; }
|
||||
.grid-label-right { text-align: left; }
|
||||
.grid-sep { grid-column: 1 / -1; height: 3px; background: #999; margin: 2px 0; border-radius: 1px; }
|
||||
.grid-dot { text-align: center; color: #ccc; font-size: 0.7em; }
|
||||
.grid-spacer { height: 12px; }
|
||||
|
||||
/* Y toggle */
|
||||
.y-toggle { margin-top: 16px; }
|
||||
.y-toggle label { display: flex; align-items: center; gap: 10px; cursor: pointer; font-size: 0.9em; }
|
||||
.y-toggle input { display: none; }
|
||||
.toggle-track {
|
||||
width: 48px; height: 24px; background: #2a5a8a; border-radius: 12px;
|
||||
position: relative; transition: background 0.2s;
|
||||
}
|
||||
.toggle-track::after {
|
||||
content: ''; position: absolute; top: 2px; left: 2px;
|
||||
width: 20px; height: 20px; background: #fff; border-radius: 50%;
|
||||
transition: transform 0.2s; box-shadow: 0 1px 3px rgba(0,0,0,0.3);
|
||||
}
|
||||
.y-toggle input:checked + .toggle-track { background: #7b3f9e; }
|
||||
.y-toggle input:checked + .toggle-track::after { transform: translateX(24px); }
|
||||
.toggle-labels { display: flex; gap: 4px; font-size: 0.8em; }
|
||||
.toggle-labels span { padding: 2px 6px; border-radius: 3px; }
|
||||
.toggle-labels .active-label { background: #2a5a8a; color: #fff; }
|
||||
.toggle-labels .active-label.wye { background: #7b3f9e; }
|
||||
|
||||
/* Codepoint input */
|
||||
.cp-section { min-width: 300px; }
|
||||
.cp-input-row { display: flex; gap: 8px; align-items: center; flex-wrap: wrap; }
|
||||
.cp-input {
|
||||
width: 180px; height: 34px; border: 2px solid #bbb; border-radius: 4px;
|
||||
background: #fafafa; padding: 0 8px; font-family: 'Consolas', 'Fira Code', monospace;
|
||||
font-size: 0.95em; color: #222; outline: none;
|
||||
}
|
||||
.cp-input:focus { border-color: #1a5fb4; }
|
||||
.cp-input.error { border-color: #c00; background: #fff0f0; }
|
||||
.cp-formats { font-size: 0.75em; color: #888; margin-top: 6px; line-height: 1.5; }
|
||||
.cp-formats code { background: #eee; padding: 1px 4px; border-radius: 3px; font-family: 'Consolas', monospace; color: #333; }
|
||||
.cp-resolved { margin-top: 8px; font-size: 0.85em; color: #444; }
|
||||
.cp-resolved .cp-char { font-size: 1.3em; }
|
||||
|
||||
/* Output */
|
||||
.output-section { min-width: 280px; }
|
||||
.pixel-row { display: flex; align-items: center; gap: 12px; margin-bottom: 12px; padding: 10px; background: #f8f8f8; border-radius: 6px; border: 1px solid #e0e0e0; }
|
||||
.colour-swatch {
|
||||
width: 48px; height: 48px; border-radius: 6px; border: 2px solid #ccc;
|
||||
flex-shrink: 0; image-rendering: pixelated;
|
||||
}
|
||||
.pixel-info { font-size: 0.85em; line-height: 1.6; }
|
||||
.pixel-info .hex { font-family: 'Consolas', 'Fira Code', monospace; font-size: 1.1em; color: #111; }
|
||||
.pixel-info .channels { color: #555; }
|
||||
.pixel-info .channel-r { color: #c00; }
|
||||
.pixel-info .channel-g { color: #070; }
|
||||
.pixel-info .channel-b { color: #00c; }
|
||||
.pixel-label { font-size: 0.8em; color: #1a5fb4; margin-bottom: 4px; font-weight: 600; }
|
||||
.pixel-inactive { font-size: 0.85em; color: #999; }
|
||||
.bit-display { font-family: 'Consolas', 'Fira Code', monospace; font-size: 0.8em; color: #777; margin-top: 2px; }
|
||||
|
||||
/* Mask display */
|
||||
.mask-section { margin-top: 16px; padding: 10px; background: #f8f8f8; border-radius: 6px; border: 1px solid #e0e0e0; }
|
||||
.mask-section .label { font-size: 0.8em; color: #1a5fb4; margin-bottom: 4px; }
|
||||
.mask-val { font-family: 'Consolas', 'Fira Code', monospace; font-size: 0.95em; color: #111; }
|
||||
|
||||
/* Examples */
|
||||
.examples-section { margin-top: 20px; }
|
||||
.examples-section h2 { font-size: 1em; margin-bottom: 8px; color: #1a5fb4; }
|
||||
.example-grid { display: grid; grid-template-columns: repeat(auto-fill, minmax(200px, 1fr)); gap: 4px; }
|
||||
.example-item {
|
||||
font-size: 0.8em; padding: 4px 8px; border-radius: 4px;
|
||||
background: #f0f0f0; cursor: pointer; transition: background 0.15s;
|
||||
display: flex; justify-content: space-between; align-items: center;
|
||||
border: 1px solid #e0e0e0;
|
||||
}
|
||||
.example-item:hover { background: #e0ecf8; border-color: #b0c8e8; }
|
||||
.example-item .ex-code { color: #555; }
|
||||
.example-item .ex-char { font-size: 1.2em; min-width: 24px; text-align: center; }
|
||||
|
||||
/* Notes */
|
||||
.notes { margin-top: 20px; font-size: 0.8em; color: #666; line-height: 1.5; }
|
||||
.notes code { background: #eee; padding: 1px 5px; border-radius: 3px; font-family: 'Consolas', monospace; color: #333; }
|
||||
</style>
|
||||
</head>
|
||||
<body>
|
||||
|
||||
<h1>Keming Machine Tag Calculator</h1>
|
||||
<p class="subtitle">Calculate pixel colour values for the three Keming Machine tag pixels (K at Y+6, Y+7, Y+8)</p>
|
||||
|
||||
<div class="main">
|
||||
<div class="col-left">
|
||||
|
||||
<!-- Pixel 1: Lowheight -->
|
||||
<div class="panel lowheight-section">
|
||||
<h2>Pixel 1 — Low Height (Y+6)</h2>
|
||||
<div class="lowheight-row">
|
||||
<button class="lowheight-btn" id="lowheightBtn" onclick="toggleLowheight()">Low Height: OFF</button>
|
||||
</div>
|
||||
<p class="lowheight-hint">
|
||||
Set for lowercase-height characters (a, b, c, d, e, etc.).<br>
|
||||
Set if above-diacritics should be lowered.
|
||||
</p>
|
||||
</div>
|
||||
|
||||
<!-- Pixel 2: Shape Grid -->
|
||||
<div class="panel shape-section">
|
||||
<h2>Pixel 2 — Glyph Shape (Y+7)</h2>
|
||||
<p style="font-size:0.8em; color:#666; margin-bottom:12px;">Click zones to mark which parts of the glyph are occupied.</p>
|
||||
|
||||
<div class="grid-wrapper">
|
||||
<div class="shape-grid" id="shapeGrid">
|
||||
<!-- Row: A B (top / ascenders) -->
|
||||
<span class="grid-label grid-label-left">top</span>
|
||||
<button class="zone-btn" data-zone="A" onclick="toggleZone(this)">A</button>
|
||||
<span class="grid-dot">·</span>
|
||||
<button class="zone-btn" data-zone="B" onclick="toggleZone(this)">B</button>
|
||||
<span class="grid-label grid-label-right">ascender</span>
|
||||
|
||||
<!-- Spacer row -->
|
||||
<span></span><span class="grid-spacer"></span><span></span><span class="grid-spacer"></span><span></span>
|
||||
|
||||
<!-- Row: C D -->
|
||||
<span class="grid-label grid-label-left">mid</span>
|
||||
<button class="zone-btn" data-zone="C" onclick="toggleZone(this)">C</button>
|
||||
<span class="grid-dot">·</span>
|
||||
<button class="zone-btn" data-zone="D" onclick="toggleZone(this)">D</button>
|
||||
<span class="grid-label grid-label-right">cap hole</span>
|
||||
|
||||
<!-- Row: E F -->
|
||||
<span class="grid-label grid-label-left"></span>
|
||||
<button class="zone-btn" data-zone="E" onclick="toggleZone(this)">E</button>
|
||||
<span class="grid-dot">·</span>
|
||||
<button class="zone-btn" data-zone="F" onclick="toggleZone(this)">F</button>
|
||||
<span class="grid-label grid-label-right">lc hole</span>
|
||||
|
||||
<!-- Row: G H -->
|
||||
<span class="grid-label grid-label-left">btm</span>
|
||||
<button class="zone-btn" data-zone="G" onclick="toggleZone(this)">G</button>
|
||||
<span class="grid-dot">·</span>
|
||||
<button class="zone-btn" data-zone="H" onclick="toggleZone(this)">H</button>
|
||||
<span class="grid-label grid-label-right">baseline</span>
|
||||
|
||||
<!-- Baseline separator -->
|
||||
<div class="grid-sep"></div>
|
||||
|
||||
<!-- Spacer row -->
|
||||
<span></span><span class="grid-spacer"></span><span></span><span class="grid-spacer"></span><span></span>
|
||||
|
||||
<!-- Row: J K (below baseline) -->
|
||||
<span class="grid-label grid-label-left">desc</span>
|
||||
<button class="zone-btn" data-zone="J" onclick="toggleZone(this)">J</button>
|
||||
<span class="grid-dot">·</span>
|
||||
<button class="zone-btn" data-zone="K" onclick="toggleZone(this)">K</button>
|
||||
<span class="grid-label grid-label-right">descender</span>
|
||||
</div>
|
||||
</div>
|
||||
|
||||
<!-- Y (Bar/Wye) toggle -->
|
||||
<div class="y-toggle">
|
||||
<label>
|
||||
<input type="checkbox" id="yToggle" onchange="recalc()">
|
||||
<span class="toggle-track"></span>
|
||||
<span class="toggle-labels">
|
||||
<span id="barLabel" class="active-label">Bar (B-type, 2px kern)</span>
|
||||
<span id="wyeLabel">Wye (Y-type, 1px kern)</span>
|
||||
</span>
|
||||
</label>
|
||||
<p style="font-size:0.75em; color:#888; margin-top:6px; margin-left:58px;">
|
||||
Set Wye when top/bottom of glyph tapers to a point (V, Y, A, v, etc.)
|
||||
</p>
|
||||
</div>
|
||||
|
||||
<!-- Kerning mask output -->
|
||||
<div class="mask-section">
|
||||
<div class="label">Kerning Mask (24-bit, used by rules)</div>
|
||||
<div id="maskVal" class="mask-val">0x0000FF</div>
|
||||
<div id="maskBin" class="bit-display">00000000 00000000 11111111</div>
|
||||
</div>
|
||||
</div>
|
||||
|
||||
<!-- Pixel 3: Dot Removal -->
|
||||
<div class="panel cp-section">
|
||||
<h2>Pixel 3 — Dot Removal (Y+8)</h2>
|
||||
<p style="font-size:0.8em; color:#666; margin-bottom:12px;">Replacement character for diacritics dot removal. All 24 bits encode the codepoint.</p>
|
||||
|
||||
<div class="cp-input-row">
|
||||
<input type="text" class="cp-input" id="cpInput" placeholder="e.g. U+0041, 65, A" oninput="updateCodepoint()">
|
||||
</div>
|
||||
<p class="cp-formats">
|
||||
Accepts: <code>U+0041</code> or <code>0x41</code> (hex), <code>65</code> (decimal), or a literal character <code>A</code>
|
||||
</p>
|
||||
<div class="cp-resolved" id="cpResolved"></div>
|
||||
</div>
|
||||
|
||||
</div><!-- col-left -->
|
||||
|
||||
<!-- Output -->
|
||||
<div class="panel output-section">
|
||||
<h2>Pixel Colour Values</h2>
|
||||
|
||||
<div class="pixel-label">Pixel 1: Low Height (Y+6)</div>
|
||||
<div class="pixel-row">
|
||||
<canvas id="swatch1" class="colour-swatch" width="48" height="48"></canvas>
|
||||
<div class="pixel-info">
|
||||
<div class="hex" id="hex1">—</div>
|
||||
<div id="p1desc" class="pixel-inactive">No pixel (not lowheight)</div>
|
||||
</div>
|
||||
</div>
|
||||
|
||||
<div class="pixel-label" style="margin-top:12px;">Pixel 2: Glyph Shape (Y+7)</div>
|
||||
<div class="pixel-row">
|
||||
<canvas id="swatch2" class="colour-swatch" width="48" height="48"></canvas>
|
||||
<div class="pixel-info">
|
||||
<div class="hex" id="hex2">#000000</div>
|
||||
<div class="channels">
|
||||
R: <span class="channel-r" id="r2">0</span>
|
||||
G: <span class="channel-g" id="g2">0</span>
|
||||
B: <span class="channel-b" id="b2">0</span>
|
||||
</div>
|
||||
<div class="bit-display" id="bits2">00000000 00000000 00000000</div>
|
||||
</div>
|
||||
</div>
|
||||
|
||||
<div class="pixel-label" style="margin-top:12px;">Pixel 3: Dot Removal (Y+8)</div>
|
||||
<div class="pixel-row">
|
||||
<canvas id="swatch3" class="colour-swatch" width="48" height="48"></canvas>
|
||||
<div class="pixel-info">
|
||||
<div class="hex" id="hex3">—</div>
|
||||
<div class="channels" id="p3channels" style="display:none">
|
||||
R: <span class="channel-r" id="r3">0</span>
|
||||
G: <span class="channel-g" id="g3">0</span>
|
||||
B: <span class="channel-b" id="b3">0</span>
|
||||
</div>
|
||||
<div id="p3desc" class="pixel-inactive">No replacement character set</div>
|
||||
</div>
|
||||
</div>
|
||||
|
||||
<div class="notes" style="margin-top:16px;">
|
||||
<strong>Alpha channel:</strong> must be non-zero (1–254) for the pixel to be read as a tag.
|
||||
Set alpha to <code>1</code> (or any value < 255 and > 0).<br>
|
||||
A fully transparent pixel (alpha = 0) means “no data”.
|
||||
</div>
|
||||
</div>
|
||||
</div>
|
||||
|
||||
<!-- Examples -->
|
||||
<div class="panel examples-section" style="margin-top: 20px;">
|
||||
<h2>Examples — Glyph Shape (click to load)</h2>
|
||||
<div class="example-grid" id="exampleGrid"></div>
|
||||
</div>
|
||||
|
||||
<script>
|
||||
const ZONES = ['A','B','C','D','E','F','G','H','J','K'];
|
||||
const state = { A:0, B:0, C:0, D:0, E:0, F:0, G:0, H:0, J:0, K:0 };
|
||||
let isLowheight = false;
|
||||
|
||||
// Bit positions within kerning_mask (24-bit RGB):
|
||||
// Blue byte: A=bit7, B=bit6, C=bit5, D=bit4, E=bit3, F=bit2, G=bit1, H=bit0
|
||||
// Green byte: J=bit15(=green bit7), K=bit14(=green bit6)
|
||||
// Red byte: Y=bit23(=red bit7) -- tracked separately as isKernYtype
|
||||
const BIT_POS = { A:7, B:6, C:5, D:4, E:3, F:2, G:1, H:0, J:15, K:14 };
|
||||
|
||||
function toggleLowheight() {
|
||||
isLowheight = !isLowheight;
|
||||
const btn = document.getElementById('lowheightBtn');
|
||||
btn.classList.toggle('active', isLowheight);
|
||||
btn.textContent = isLowheight ? 'Lowheight: ON' : 'Lowheight: OFF';
|
||||
recalc();
|
||||
}
|
||||
|
||||
function toggleZone(btn) {
|
||||
const zone = btn.dataset.zone;
|
||||
state[zone] = state[zone] ? 0 : 1;
|
||||
btn.classList.toggle('active', !!state[zone]);
|
||||
recalc();
|
||||
}
|
||||
|
||||
function recalc() {
|
||||
const isWye = document.getElementById('yToggle').checked;
|
||||
|
||||
// Update button styling for wye mode
|
||||
document.querySelectorAll('.zone-btn.active').forEach(btn => {
|
||||
btn.classList.toggle('wye', isWye);
|
||||
});
|
||||
|
||||
// Update toggle labels
|
||||
const barLabel = document.getElementById('barLabel');
|
||||
const wyeLabel = document.getElementById('wyeLabel');
|
||||
barLabel.className = isWye ? '' : 'active-label';
|
||||
wyeLabel.className = isWye ? 'active-label wye' : '';
|
||||
|
||||
// --- Pixel 1: Lowheight ---
|
||||
if (isLowheight) {
|
||||
// Any non-zero pixel; use white with alpha=1 for visibility in editors
|
||||
drawSwatchSolid('swatch1', 255, 255, 255);
|
||||
document.getElementById('hex1').textContent = '#FFFFFF';
|
||||
document.getElementById('p1desc').textContent = 'Any pixel with alpha > 0';
|
||||
document.getElementById('p1desc').className = 'channels';
|
||||
} else {
|
||||
drawSwatchEmpty('swatch1');
|
||||
document.getElementById('hex1').innerHTML = '—';
|
||||
document.getElementById('p1desc').textContent = 'No pixel (not lowheight)';
|
||||
document.getElementById('p1desc').className = 'pixel-inactive';
|
||||
}
|
||||
|
||||
// --- Pixel 2: Shape Data ---
|
||||
// Red: Y bit in MSB (bit 7)
|
||||
const r = isWye ? 0x80 : 0x00;
|
||||
// Green: J in bit 7, K in bit 6
|
||||
const g = (state.J ? 0x80 : 0) | (state.K ? 0x40 : 0);
|
||||
// Blue: ABCDEFGH
|
||||
const b = (state.A ? 0x80 : 0) | (state.B ? 0x40 : 0) |
|
||||
(state.C ? 0x20 : 0) | (state.D ? 0x10 : 0) |
|
||||
(state.E ? 0x08 : 0) | (state.F ? 0x04 : 0) |
|
||||
(state.G ? 0x02 : 0) | (state.H ? 0x01 : 0);
|
||||
|
||||
// Full 24-bit mask (same as what code extracts)
|
||||
const fullMask = (r << 16) | (g << 8) | b;
|
||||
|
||||
document.getElementById('hex2').textContent = '#' + hex2(r) + hex2(g) + hex2(b);
|
||||
document.getElementById('r2').textContent = r;
|
||||
document.getElementById('g2').textContent = g;
|
||||
document.getElementById('b2').textContent = b;
|
||||
document.getElementById('bits2').textContent = bin8(r) + ' ' + bin8(g) + ' ' + bin8(b);
|
||||
|
||||
document.getElementById('maskVal').textContent = '0x' + fullMask.toString(16).toUpperCase().padStart(6, '0');
|
||||
document.getElementById('maskBin').textContent = bin8((fullMask >> 16) & 0xFF) + ' ' + bin8((fullMask >> 8) & 0xFF) + ' ' + bin8(fullMask & 0xFF);
|
||||
|
||||
drawSwatchSolid('swatch2', r, g, b);
|
||||
}
|
||||
|
||||
function updateCodepoint() {
|
||||
const input = document.getElementById('cpInput');
|
||||
const raw = input.value.trim();
|
||||
|
||||
if (raw === '') {
|
||||
input.classList.remove('error');
|
||||
drawSwatchEmpty('swatch3');
|
||||
document.getElementById('hex3').innerHTML = '—';
|
||||
document.getElementById('p3channels').style.display = 'none';
|
||||
document.getElementById('p3desc').textContent = 'No replacement character set';
|
||||
document.getElementById('p3desc').style.display = '';
|
||||
document.getElementById('cpResolved').textContent = '';
|
||||
return;
|
||||
}
|
||||
|
||||
const cp = parseCodepoint(raw);
|
||||
|
||||
if (cp === null || cp < 0 || cp > 0xFFFFFF) {
|
||||
input.classList.add('error');
|
||||
drawSwatchEmpty('swatch3');
|
||||
document.getElementById('hex3').innerHTML = '—';
|
||||
document.getElementById('p3channels').style.display = 'none';
|
||||
document.getElementById('p3desc').textContent = cp !== null ? 'Codepoint out of 24-bit range' : 'Invalid input';
|
||||
document.getElementById('p3desc').style.display = '';
|
||||
document.getElementById('cpResolved').textContent = '';
|
||||
return;
|
||||
}
|
||||
|
||||
input.classList.remove('error');
|
||||
|
||||
const r3 = (cp >> 16) & 0xFF;
|
||||
const g3 = (cp >> 8) & 0xFF;
|
||||
const b3 = cp & 0xFF;
|
||||
|
||||
document.getElementById('hex3').textContent = '#' + hex2(r3) + hex2(g3) + hex2(b3);
|
||||
document.getElementById('r3').textContent = r3;
|
||||
document.getElementById('g3').textContent = g3;
|
||||
document.getElementById('b3').textContent = b3;
|
||||
document.getElementById('p3channels').style.display = '';
|
||||
document.getElementById('p3desc').style.display = 'none';
|
||||
|
||||
drawSwatchSolid('swatch3', r3, g3, b3);
|
||||
|
||||
// Show resolved character
|
||||
let charDisplay = '';
|
||||
try { charDisplay = String.fromCodePoint(cp); } catch(e) {}
|
||||
document.getElementById('cpResolved').innerHTML =
|
||||
'U+' + cp.toString(16).toUpperCase().padStart(4, '0') +
|
||||
' (decimal ' + cp + ')' +
|
||||
(charDisplay ? ' — <span class="cp-char">' + escapeHtml(charDisplay) + '</span>' : '');
|
||||
}
|
||||
|
||||
function parseCodepoint(s) {
|
||||
// U+XXXX or u+XXXX
|
||||
if (/^[Uu]\+([0-9A-Fa-f]+)$/.test(s)) {
|
||||
return parseInt(RegExp.$1, 16);
|
||||
}
|
||||
// 0xXXXX
|
||||
if (/^0[xX]([0-9A-Fa-f]+)$/.test(s)) {
|
||||
return parseInt(RegExp.$1, 16);
|
||||
}
|
||||
// Pure decimal number
|
||||
if (/^[0-9]+$/.test(s)) {
|
||||
return parseInt(s, 10);
|
||||
}
|
||||
// Literal character (single grapheme — could be a surrogate pair)
|
||||
const codepoints = [...s];
|
||||
if (codepoints.length === 1) {
|
||||
return codepoints[0].codePointAt(0);
|
||||
}
|
||||
return null;
|
||||
}
|
||||
|
||||
function escapeHtml(s) {
|
||||
const d = document.createElement('span');
|
||||
d.textContent = s;
|
||||
return d.innerHTML;
|
||||
}
|
||||
|
||||
function drawSwatchSolid(id, r, g, b) {
|
||||
const canvas = document.getElementById(id);
|
||||
const ctx = canvas.getContext('2d');
|
||||
// Chequerboard background
|
||||
ctx.fillStyle = '#ddd';
|
||||
ctx.fillRect(0, 0, 48, 48);
|
||||
ctx.fillStyle = '#fff';
|
||||
for (let y = 0; y < 48; y += 8) {
|
||||
for (let x = (y % 16 === 0) ? 8 : 0; x < 48; x += 16) {
|
||||
ctx.fillRect(x, y, 8, 8);
|
||||
}
|
||||
}
|
||||
// Colour
|
||||
ctx.fillStyle = `rgb(${r},${g},${b})`;
|
||||
ctx.fillRect(4, 4, 40, 40);
|
||||
}
|
||||
|
||||
function drawSwatchEmpty(id) {
|
||||
const canvas = document.getElementById(id);
|
||||
const ctx = canvas.getContext('2d');
|
||||
ctx.fillStyle = '#ddd';
|
||||
ctx.fillRect(0, 0, 48, 48);
|
||||
ctx.fillStyle = '#fff';
|
||||
for (let y = 0; y < 48; y += 8) {
|
||||
for (let x = (y % 16 === 0) ? 8 : 0; x < 48; x += 16) {
|
||||
ctx.fillRect(x, y, 8, 8);
|
||||
}
|
||||
}
|
||||
// Dash to indicate empty
|
||||
ctx.fillStyle = '#aaa';
|
||||
ctx.font = '20px sans-serif';
|
||||
ctx.textAlign = 'center';
|
||||
ctx.textBaseline = 'middle';
|
||||
ctx.fillText('\u2014', 24, 24);
|
||||
}
|
||||
|
||||
function hex2(v) { return v.toString(16).toUpperCase().padStart(2, '0'); }
|
||||
function bin8(v) { return v.toString(2).padStart(8, '0'); }
|
||||
|
||||
// Load a preset (shape grid only)
|
||||
function loadPreset(zones, wye) {
|
||||
for (const z of ZONES) state[z] = 0;
|
||||
for (const z of zones) state[z] = 1;
|
||||
document.getElementById('yToggle').checked = wye;
|
||||
document.querySelectorAll('.zone-btn').forEach(btn => {
|
||||
btn.classList.toggle('active', !!state[btn.dataset.zone]);
|
||||
});
|
||||
recalc();
|
||||
}
|
||||
|
||||
// Examples from keming_machine.txt
|
||||
const EXAMPLES = [
|
||||
{ zones: 'AB', wye: false, chars: 'T', desc: 'AB(B)' },
|
||||
{ zones: 'ABCEGH', wye: false, chars: 'C', desc: 'ABCEGH(B)' },
|
||||
{ zones: 'ABCEFGH', wye: true, chars: 'K', desc: 'ABCEFGH(Y)' },
|
||||
{ zones: 'ABCDEG', wye: false, chars: 'P', desc: 'ABCDEG' },
|
||||
{ zones: 'ABCDEFGH', wye: false, chars: 'B,D,O', desc: 'ABCDEFGH' },
|
||||
{ zones: 'ABCDFH', wye: false, chars: '\u0427', desc: 'ABCDFH' },
|
||||
{ zones: 'ABCEG', wye: false, chars: '\u0413', desc: 'ABCEG' },
|
||||
{ zones: 'ABGH', wye: false, chars: '\u13C6', desc: 'ABGH' },
|
||||
{ zones: 'ACDEG', wye: false, chars: '\u13B0', desc: 'ACDEG' },
|
||||
{ zones: 'ACDEFGH', wye: false, chars: 'h,\u0184', desc: 'ACDEFGH' },
|
||||
{ zones: 'ACDFH', wye: false, chars: '\u07C6', desc: 'ACDFH' },
|
||||
{ zones: 'ACEGH', wye: false, chars: 'L', desc: 'ACEGH' },
|
||||
{ zones: 'AH', wye: true, chars: '\\', desc: 'AH(Y)' },
|
||||
{ zones: 'BDEFGH', wye: false, chars: 'J', desc: 'BDEFGH' },
|
||||
{ zones: 'BDFGH', wye: false, chars: '\u027A', desc: 'BDFGH' },
|
||||
{ zones: 'BG', wye: true, chars: '/', desc: 'BG(Y)' },
|
||||
{ zones: 'CD', wye: false, chars: '\u10B5', desc: 'CD' },
|
||||
{ zones: 'CDEF', wye: true, chars: '\u03A6', desc: 'CDEF(Y)' },
|
||||
{ zones: 'CDEFGH', wye: false, chars: 'a,c,e', desc: 'CDEFGH' },
|
||||
{ zones: 'CDEFGHJK', wye: false, chars: 'g', desc: 'CDEFGHJK' },
|
||||
{ zones: 'CDEFGHK', wye: false, chars: '\u019E', desc: 'CDEFGHK' },
|
||||
{ zones: 'AB', wye: true, chars: 'Y', desc: 'AB(Y)' },
|
||||
{ zones: 'ABCD', wye: true, chars: 'V', desc: 'ABCD(Y)' },
|
||||
{ zones: 'CDEF', wye: true, chars: 'v', desc: 'CDEF(Y)' },
|
||||
{ zones: 'EFGH', wye: true, chars: '\u028C', desc: 'EFGH(Y)' },
|
||||
{ zones: 'CDEFGH', wye: true, chars: 'A', desc: 'CDEFGH(Y)' },
|
||||
];
|
||||
|
||||
function buildExamples() {
|
||||
const grid = document.getElementById('exampleGrid');
|
||||
for (const ex of EXAMPLES) {
|
||||
const div = document.createElement('div');
|
||||
div.className = 'example-item';
|
||||
div.innerHTML = `<span class="ex-code">${ex.desc}</span> <span class="ex-char">${ex.chars}</span>`;
|
||||
div.onclick = () => loadPreset(ex.zones.split(''), ex.wye);
|
||||
grid.appendChild(div);
|
||||
}
|
||||
}
|
||||
|
||||
// Init
|
||||
buildExamples();
|
||||
drawSwatchEmpty('swatch1');
|
||||
drawSwatchEmpty('swatch3');
|
||||
recalc();
|
||||
</script>
|
||||
|
||||
</body>
|
||||
</html>
|
||||
Binary file not shown.
|
Before Width: | Height: | Size: 75 KiB After Width: | Height: | Size: 130 B |
BIN
src/assets/alphabetic_presentation_forms_extrawide_variable.tga
LFS
Normal file
BIN
src/assets/alphabetic_presentation_forms_extrawide_variable.tga
LFS
Normal file
Binary file not shown.
BIN
src/assets/ascii_variable.tga
LFS
Executable file
BIN
src/assets/ascii_variable.tga
LFS
Executable file
Binary file not shown.
BIN
src/assets/currencies_variable.tga
LFS
Normal file
BIN
src/assets/currencies_variable.tga
LFS
Normal file
Binary file not shown.
BIN
src/assets/cyrilic_bulgarian_variable.tga
LFS
Executable file
BIN
src/assets/cyrilic_bulgarian_variable.tga
LFS
Executable file
Binary file not shown.
BIN
src/assets/cyrilic_variable.tga
LFS
Executable file
BIN
src/assets/cyrilic_variable.tga
LFS
Executable file
Binary file not shown.
BIN
src/assets/devanagari_variable.tga
LFS
Normal file
BIN
src/assets/devanagari_variable.tga
LFS
Normal file
Binary file not shown.
BIN
src/assets/diacritical_marks_variable.tga
LFS
Executable file
BIN
src/assets/diacritical_marks_variable.tga
LFS
Executable file
Binary file not shown.
BIN
src/assets/halfwidth_fullwidth_variable.tga
LFS
Normal file
BIN
src/assets/halfwidth_fullwidth_variable.tga
LFS
Normal file
Binary file not shown.
BIN
src/assets/hangul_johab.tga
LFS
Normal file
BIN
src/assets/hangul_johab.tga
LFS
Normal file
Binary file not shown.
BIN
src/assets/hayeren_variable.tga
LFS
Executable file
BIN
src/assets/hayeren_variable.tga
LFS
Executable file
Binary file not shown.
BIN
src/assets/hentaigana_variable.tga
LFS
Normal file
BIN
src/assets/hentaigana_variable.tga
LFS
Normal file
Binary file not shown.
BIN
src/assets/latinExtA_variable.tga
LFS
Executable file
BIN
src/assets/latinExtA_variable.tga
LFS
Executable file
Binary file not shown.
BIN
src/assets/latinExtB_variable.tga
LFS
Executable file
BIN
src/assets/latinExtB_variable.tga
LFS
Executable file
Binary file not shown.
BIN
src/assets/latinExtD_variable.tga
LFS
Normal file
BIN
src/assets/latinExtD_variable.tga
LFS
Normal file
Binary file not shown.
BIN
src/assets/puae000-e0ff.tga
LFS
Executable file
BIN
src/assets/puae000-e0ff.tga
LFS
Executable file
Binary file not shown.
BIN
src/assets/symbols_for_legacy_computing_variable.tga
LFS
Normal file
BIN
src/assets/symbols_for_legacy_computing_variable.tga
LFS
Normal file
Binary file not shown.
BIN
src/assets/tamil_extrawide_variable.tga
LFS
Normal file
BIN
src/assets/tamil_extrawide_variable.tga
LFS
Normal file
Binary file not shown.
Some files were not shown because too many files have changed in this diff Show More
Reference in New Issue
Block a user