From fbb7fe94fd4119a6d298458a6b96d8071faacf2b Mon Sep 17 00:00:00 2001 From: Daniel Gary Date: Tue, 9 Sep 2025 08:00:19 -0500 Subject: [PATCH 1/3] initial work for sqlite exporter --- .../dbml-core/src/export/ModelExporter.js | 5 + .../dbml-core/src/export/SqliteExporter.js | 407 ++++++++++++++++++ 2 files changed, 412 insertions(+) create mode 100644 packages/dbml-core/src/export/SqliteExporter.js diff --git a/packages/dbml-core/src/export/ModelExporter.js b/packages/dbml-core/src/export/ModelExporter.js index 4a734db5f..6476a5bcb 100644 --- a/packages/dbml-core/src/export/ModelExporter.js +++ b/packages/dbml-core/src/export/ModelExporter.js @@ -4,6 +4,7 @@ import PostgresExporter from './PostgresExporter'; import JsonExporter from './JsonExporter'; import SqlServerExporter from './SqlServerExporter'; import OracleExporter from './OracleExporter'; +import SqliteExporter from './SqliteExporter'; class ModelExporter { static export (model = {}, format = '', isNormalized = true) { @@ -34,6 +35,10 @@ class ModelExporter { res = OracleExporter.export(normalizedModel); break; + case 'sqlite': + res = SqliteExporter.export(normalizedModel); + break; + default: break; } diff --git a/packages/dbml-core/src/export/SqliteExporter.js b/packages/dbml-core/src/export/SqliteExporter.js new file mode 100644 index 000000000..b8b3c182e --- /dev/null +++ b/packages/dbml-core/src/export/SqliteExporter.js @@ -0,0 +1,407 @@ +/* eslint-disable max-len */ +import _ from 'lodash'; +import { + buildJunctionFields1, + buildJunctionFields2, + buildNewTableName, +} from './utils'; +import { shouldPrintSchemaName } from '../model_structure/utils'; + +// SQLite doesn't have schemas or built-in rich types; we use affinities. +// Basic mapping from common Postgres types (and typical custom names) to SQLite affinities. +const SQLITE_TYPE_MAP = (() => { + const int = new Set(['SMALLINT', 'INT2', 'INTEGER', 'INT', 'INT4', 'BIGINT', 'INT8', + 'SMALLSERIAL', 'SERIAL', 'BIGSERIAL']); + const real = new Set(['REAL', 'FLOAT', 'DOUBLE', 'DOUBLE PRECISION', 'NUMERIC(.*)']); // NUMERIC often -> NUMERIC, but many use REAL for decimals + const num = new Set(['DECIMAL', 'NUMERIC']); + const text = new Set(['CHAR', 'CHARACTER', 'NCHAR', 'NVARCHAR', 'VARCHAR', 'CHARACTER VARYING', 'TEXT', 'NAME', 'BPCHAR', 'UUID', 'XML', 'JSON', 'JSONB', 'INET', 'CIDR', 'MACADDR', 'MACADDR8']); + const blob = new Set(['BYTEA', 'BLOB']); + const date = new Set(['DATE', 'TIME', 'TIMESTAMP', 'TIMESTAMP WITH TIME ZONE', 'TIMESTAMP WITHOUT TIME ZONE', 'TIME WITH TIME ZONE', 'TIME WITHOUT TIME ZONE', 'INTERVAL']); + const bool = new Set(['BOOLEAN', 'BOOL']); + return { int, real, num, text, blob, date, bool }; +})(); + +// Normalize incoming type name (strip params, collapse spaces) +function normalizeTypeName (tn) { + const t = (tn || '').trim().replace(/\s+/g, ' ').toUpperCase(); + return t.replace(/\(.*\)$/, ''); // drop length/precision for mapping +} + +function mapTypeToSQLite (originalTypeName) { + const t = normalizeTypeName(originalTypeName); + + if (SQLITE_TYPE_MAP.int.has(t)) return 'INTEGER'; + if (SQLITE_TYPE_MAP.bool.has(t)) return 'INTEGER'; // (optionally add CHECK later) + if (SQLITE_TYPE_MAP.blob.has(t)) return 'BLOB'; + if (SQLITE_TYPE_MAP.text.has(t)) return 'TEXT'; + if (SQLITE_TYPE_MAP.date.has(t)) return 'TEXT'; // store ISO strings; alternative: NUMERIC + if (SQLITE_TYPE_MAP.num.has(t)) return 'NUMERIC'; + if (t === 'REAL' || t === 'FLOAT' || t === 'DOUBLE' || t === 'DOUBLE PRECISION') return 'REAL'; + + // Fallback: try to infer by keywords + if (/INT/i.test(t)) return 'INTEGER'; + if (/CHAR|CLOB|TEXT/i.test(t)) return 'TEXT'; + if (/BLOB/i.test(t)) return 'BLOB'; + if (/REAL|FLOA|DOUB/i.test(t)) return 'REAL'; + if (/NUM|DEC|MONEY/i.test(t)) return 'NUMERIC'; + + // Unknown/custom → TEXT to be safe + return 'TEXT'; +} + +class SqliteExporter { + // Build enum name → value array map so we can add CHECK constraints + static buildEnumMap (model) { + const map = new Map(); + Object.keys(model.enums).forEach((enumId) => { + const e = model.enums[enumId]; + const schema = model.schemas[e.schemaId]; + // Fully qualified DBML enum name (schema may exist in model, but SQLite won’t use it) + const fq = `"${schema.name}"."${e.name}"`; + const local = `"${e.name}"`; + const vals = e.valueIds.map(id => model.enumValues[id].name); + map.set(fq, vals); + map.set(local, vals); + map.set(e.name, vals); // convenience + }); + return map; + } + + // Collect per-table FK clauses (since SQLite needs inline FKs) + static collectForeignKeysByTable (refIds, model) { + const fksByTableId = new Map(); // tableId -> array of fk clause strings + const junctionCreates = []; // CREATE TABLE strings for M:N junctions (with inline FKs) + + const usedTableNames = new Set(Object.values(model.tables).map(t => t.name)); + + (refIds || []).forEach((refId) => { + const ref = model.refs[refId]; + const refOneIndex = ref.endpointIds.findIndex(endpointId => model.endpoints[endpointId].relation === '1'); + const refEndpointIndex = refOneIndex === -1 ? 0 : refOneIndex; + const foreignEndpointId = ref.endpointIds[1 - refEndpointIndex]; + const refEndpointId = ref.endpointIds[refEndpointIndex]; + const foreignEndpoint = model.endpoints[foreignEndpointId]; + const refEndpoint = model.endpoints[refEndpointId]; + + const refField = model.fields[refEndpoint.fieldIds[0]]; + const refTable = model.tables[refField.tableId]; + const foreignField = model.fields[foreignEndpoint.fieldIds[0]]; + const foreignTable = model.tables[foreignField.tableId]; + + const refCols = SqliteExporter.buildFieldName(refEndpoint.fieldIds, model); // "(colA, colB)" + const foreignCols = SqliteExporter.buildFieldName(foreignEndpoint.fieldIds, model); + + if (refOneIndex === -1) { + // Many-to-many: build a junction table with FKs inline + const firstTableFieldsMap = buildJunctionFields1(refEndpoint.fieldIds, model); + const secondTableFieldsMap = buildJunctionFields2(foreignEndpoint.fieldIds, model, firstTableFieldsMap); + const newTableName = buildNewTableName(refTable.name, foreignTable.name, usedTableNames); + + let line = `CREATE TABLE "${newTableName}" (\n`; + const key1s = [...firstTableFieldsMap.keys()].join('", "'); + const key2s = [...secondTableFieldsMap.keys()].join('", "'); + + // Columns + firstTableFieldsMap.forEach((fieldType, fieldName) => { + line += ` "${fieldName}" ${mapTypeToSQLite(fieldType)},\n`; + }); + secondTableFieldsMap.forEach((fieldType, fieldName) => { + line += ` "${fieldName}" ${mapTypeToSQLite(fieldType)},\n`; + }); + + // Composite PK + line += ` PRIMARY KEY ("${key1s}", "${key2s}"),\n`; + + // FKs (inline) + const refColsList = [...firstTableFieldsMap.keys()].map(k => `"${k}"`).join(', '); + const forColsList = [...secondTableFieldsMap.keys()].map(k => `"${k}"`).join(', '); + + line += ` FOREIGN KEY (${refColsList}) REFERENCES "${refTable.name}" ${refCols}`; + if (ref.onDelete) line += ` ON DELETE ${ref.onDelete.toUpperCase()}`; + if (ref.onUpdate) line += ` ON UPDATE ${ref.onUpdate.toUpperCase()}`; + line += ',\n'; + + line += ` FOREIGN KEY (${forColsList}) REFERENCES "${foreignTable.name}" ${foreignCols}`; + if (ref.onDelete) line += ` ON DELETE ${ref.onDelete.toUpperCase()}`; + if (ref.onUpdate) line += ` ON UPDATE ${ref.onUpdate.toUpperCase()}`; + line += '\n);\n'; + + junctionCreates.push(line); + } else { + // 1:N: attach FK to the "many" side (foreignEndpoint.table) + const fkClauseParts = []; + fkClauseParts.push(`FOREIGN KEY ${foreignCols} REFERENCES "${refTable.name}" ${refCols}`); + if (ref.onDelete) fkClauseParts.push(`ON DELETE ${ref.onDelete.toUpperCase()}`); + if (ref.onUpdate) fkClauseParts.push(`ON UPDATE ${ref.onUpdate.toUpperCase()}`); + + const fkLine = fkClauseParts.join(' '); + const tableId = foreignTable.id; + if (!fksByTableId.has(tableId)) fksByTableId.set(tableId, []); + fksByTableId.get(tableId).push(fkLine); + } + }); + + return { fksByTableId, junctionCreates }; + } + + static buildFieldName (fieldIds, model) { + const fieldNames = fieldIds.map(fieldId => `"${model.fields[fieldId].name}"`).join(', '); + return `(${fieldNames})`; + } + + static getFieldLines (tableId, model, enumMap) { + const table = model.tables[tableId]; + + const lines = table.fieldIds.map((fieldId) => { + const field = model.fields[fieldId]; + + // Type & affinity + let affinity; + let isBoolean = false; + let enumCheck = ''; + + if (!field.type.schemaName || !shouldPrintSchemaName(field.type.schemaName)) { + // Built-in or custom non-schema type string + const originalTypeName = field.type.type_name; + const upperType = normalizeTypeName(originalTypeName); + + // Enum detection (from enumMap): DBML enums appear as schema.type or type + // Build potential keys to probe: + const enumKeys = [ + `"${field.type.schemaName}"."${field.type.type_name}"`, + `"${field.type.type_name}"`, + field.type.type_name, + ].filter(Boolean); + + let enumVals = null; + enumKeys.some((k) => { + if (enumMap.has(k)) { + enumVals = enumMap.get(k); + return true; + } + return false; + }); + + if (enumVals && enumVals.length) { + affinity = 'TEXT'; + // CHECK uses the column name; applied after we build the base line + enumCheck = ` CHECK ("${field.name}" IN (${enumVals.map(v => `'${v.replace(/'/g, "''")}'`).join(', ')}))`; + } else { + affinity = mapTypeToSQLite(originalTypeName); + isBoolean = SQLITE_TYPE_MAP.bool.has(upperType); + } + } else { + // A custom namespaced type → TEXT (conservative) + affinity = 'TEXT'; + } + + // Start line with quoted identifier and affinity + let line = `"${field.name}" ${affinity}`; + + // AUTOINCREMENT/PK handling: + // In SQLite, AUTOINCREMENT only valid for a single-column INTEGER PRIMARY KEY. + // If field.increment is set, prefer the canonical pattern. + if (field.increment) { + if (affinity === 'INTEGER') { + // Make it the rowid PK. If user also marked pk, we’ll consume it here. + line = `"${field.name}" INTEGER PRIMARY KEY AUTOINCREMENT`; + // Note: do NOT add UNIQUE/NOT NULL/etc. here; SQLite implies NOT NULL for rowid PK. + } else { + // Non-integer increment requested → fallback to INTEGER (rowid) semantics not possible + // Keep original affinity but ignore increment, as SQLite cannot autoincrement non-integer. + // (Optionally: emit a comment) + line += ''; + } + } else { + // Regular constraints + if (field.unique) line += ' UNIQUE'; + // If PK on a non-increment column, allow either single or composite; for single, this will be a table-level or column-level PK. + if (field.pk) { + // Column-level PRIMARY KEY is valid (composite handled separately at table-level) + line += ' PRIMARY KEY'; + } + if (field.not_null) line += ' NOT NULL'; + } + + // Defaults + if (field.dbdefault) { + if (field.dbdefault.type === 'expression') { + line += ` DEFAULT (${field.dbdefault.value})`; + } else if (field.dbdefault.type === 'string') { + line += ` DEFAULT '${field.dbdefault.value.replace(/'/g, "''")}'`; + } else { + line += ` DEFAULT ${field.dbdefault.value}`; + } + } + + // Boolean CHECK (optional): enforce 0/1 if desired. + if (!field.increment && isBoolean) { + line += ` CHECK ("${field.name}" IN (0,1))`; + } + + // Enum CHECK + if (enumCheck) { + line += enumCheck; + } + + return line; + }); + + return lines; + } + + static getCompositePKs (tableId, model) { + const table = model.tables[tableId]; + const compositePkIds = table.indexIds ? table.indexIds.filter(indexId => model.indexes[indexId].pk) : []; + const lines = compositePkIds.map((keyId) => { + const key = model.indexes[keyId]; + const columnArr = []; + + key.columnIds.forEach((columnId) => { + const column = model.indexColumns[columnId]; + let columnStr = ''; + if (column.type === 'expression') { + columnStr = `(${column.value})`; + } else { + columnStr = `"${column.value}"`; + } + columnArr.push(columnStr); + }); + + return `PRIMARY KEY (${columnArr.join(', ')})`; + }); + return lines; + } + + static exportTables (tableIds, model, enumMap, fksByTableId) { + const tableStrs = (tableIds || []).map((tableId) => { + const fieldContents = SqliteExporter.getFieldLines(tableId, model, enumMap); + const compositePKs = SqliteExporter.getCompositePKs(tableId, model); + const fkClauses = fksByTableId.get(tableId) || []; + + const content = [...fieldContents, ...compositePKs, ...fkClauses]; + + const table = model.tables[tableId]; + // Ignore schemas in SQLite + const tableStr = `CREATE TABLE "${table.name}" (\n${content.map(line => ` ${line}`).join(',\n')}\n);\n`; + return tableStr; + }); + + return tableStrs; + } + + static exportIndexes (indexIds, model) { + // exclude composite pk index + const indexArr = (indexIds || []).filter((indexId) => !model.indexes[indexId].pk).map((indexId) => { + const index = model.indexes[indexId]; + const table = model.tables[index.tableId]; + + let line = 'CREATE'; + if (index.unique) line += ' UNIQUE'; + const indexName = index.name ? `"${index.name}"` : ''; + line += ' INDEX'; + if (indexName) line += ` ${indexName}`; + line += ` ON "${table.name}"`; + + // SQLite has no USING method + const columnArr = []; + index.columnIds.forEach((columnId) => { + const column = model.indexColumns[columnId]; + let columnStr = ''; + if (column.type === 'expression') { + columnStr = `(${column.value})`; + } else { + columnStr = `"${column.value}"`; + } + columnArr.push(columnStr); + }); + + line += ` (${columnArr.join(', ')})`; + line += ';\n'; + return line; + }); + + return indexArr; + } + + static exportComments (comments, model) { + // SQLite does not support COMMENT ON; emit as SQL comments + const commentArr = comments.map((comment) => { + const table = model.tables[comment.tableId]; + switch (comment.type) { + case 'table': { + const txt = (table.note || '').replace(/'/g, "''"); + return `-- TABLE "${table.name}" COMMENT: '${txt}'\n`; + } + case 'column': { + const field = model.fields[comment.fieldId]; + const txt = (field.note || '').replace(/'/g, "''"); + return `-- COLUMN "${table.name}"."${field.name}" COMMENT: '${txt}'\n`; + } + default: + return ''; + } + }); + return commentArr; + } + + static export (model) { + const database = model.database['1']; + + // Build enum map first + const enumMap = SqliteExporter.buildEnumMap(model); + + // Collect all refs once to produce inline FKs and any junction tables + const allRefIds = _.flatten(database.schemaIds.map(sid => model.schemas[sid].refIds || [])); + const { fksByTableId, junctionCreates } = SqliteExporter.collectForeignKeysByTable(allRefIds, model); + + const statements = database.schemaIds.reduce((prev, schemaId) => { + const schema = model.schemas[schemaId]; + const { tableIds } = schema; + + if (!_.isEmpty(tableIds)) { + prev.tables.push(...SqliteExporter.exportTables(tableIds, model, enumMap, fksByTableId)); + } + + const indexIds = _.flatten((tableIds || []).map((tableId) => model.tables[tableId].indexIds || [])); + if (!_.isEmpty(indexIds)) { + prev.indexes.push(...SqliteExporter.exportIndexes(indexIds, model)); + } + + const commentNodes = _.flatten((tableIds || []).map((tableId) => { + const { fieldIds, note } = model.tables[tableId]; + const fieldObjects = (fieldIds || []) + .filter((fieldId) => model.fields[fieldId].note) + .map((fieldId) => ({ type: 'column', fieldId, tableId })); + return note ? [{ type: 'table', tableId }].concat(fieldObjects) : fieldObjects; + })); + if (!_.isEmpty(commentNodes)) { + prev.comments.push(...SqliteExporter.exportComments(commentNodes, model)); + } + + return prev; + }, { + pragmas: [], + tables: [], + indexes: [], + comments: [], + junctions: [], + }); + + // Enable FK enforcement + const pragmas = ['PRAGMA foreign_keys = ON;']; + + const res = _.concat( + pragmas, + // No schemas or enums in SQLite + statements.tables, + junctionCreates, // junction tables created after base tables (they only reference them) + statements.indexes, + statements.comments, + ).join('\n'); + + return res; + } +} + +export default SqliteExporter; From 8c5f00ae5afa45a5f2fc9efb5f1b2e00eee5ea7b Mon Sep 17 00:00:00 2001 From: Daniel Gary Date: Tue, 9 Sep 2025 10:08:12 -0500 Subject: [PATCH 2/3] feat: adds support for exporting to sqlite --- packages/dbml-cli/src/cli/config.js | 3 + packages/dbml-cli/src/cli/index.js | 1 + packages/dbml-cli/src/cli/utils.js | 2 +- .../__tests__/exporter/exporter.spec.js | 5 + .../input/1_to_1_relations.in.dbml | 8 + .../input/general_schema.in.dbml | 81 ++++++ .../input/many_to_many_relationship.in.dbml | 86 ++++++ .../output/1_to_1_relations.out.sql | 11 + .../output/general_schema.out.sql | 58 ++++ .../output/many_to_many_relationship.out.sql | 141 ++++++++++ packages/dbml-core/jestHelpers.js | 2 +- .../dbml-core/src/export/SqliteExporter.js | 250 ++++++++---------- 12 files changed, 501 insertions(+), 147 deletions(-) create mode 100644 packages/dbml-core/__tests__/exporter/sqlite_exporter/input/1_to_1_relations.in.dbml create mode 100644 packages/dbml-core/__tests__/exporter/sqlite_exporter/input/general_schema.in.dbml create mode 100644 packages/dbml-core/__tests__/exporter/sqlite_exporter/input/many_to_many_relationship.in.dbml create mode 100644 packages/dbml-core/__tests__/exporter/sqlite_exporter/output/1_to_1_relations.out.sql create mode 100644 packages/dbml-core/__tests__/exporter/sqlite_exporter/output/general_schema.out.sql create mode 100644 packages/dbml-core/__tests__/exporter/sqlite_exporter/output/many_to_many_relationship.out.sql diff --git a/packages/dbml-cli/src/cli/config.js b/packages/dbml-cli/src/cli/config.js index 2bfe5b059..0426ae2e4 100644 --- a/packages/dbml-cli/src/cli/config.js +++ b/packages/dbml-cli/src/cli/config.js @@ -20,4 +20,7 @@ export default { snowflake: { name: 'Snowflake', }, + sqlite: { + name: 'SQLite' + } }; diff --git a/packages/dbml-cli/src/cli/index.js b/packages/dbml-cli/src/cli/index.js index 0574ca294..1706bbf6d 100644 --- a/packages/dbml-cli/src/cli/index.js +++ b/packages/dbml-cli/src/cli/index.js @@ -18,6 +18,7 @@ function dbml2sql (args) { .option('--postgres') .option('--mssql') .option('--oracle') + .option('--sqlite') .option('-o, --out-file ', 'compile all input files into a single files'); // .option('-d, --out-dir ', 'compile an input directory of dbml files into an output directory'); diff --git a/packages/dbml-cli/src/cli/utils.js b/packages/dbml-cli/src/cli/utils.js index 1726c82a4..d0ea88de3 100644 --- a/packages/dbml-cli/src/cli/utils.js +++ b/packages/dbml-cli/src/cli/utils.js @@ -16,7 +16,7 @@ function validateInputFilePaths (paths, validatePlugin) { function getFormatOpt (opts) { const formatOpts = Object.keys(opts).filter((opt) => { - return ['postgres', 'mysql', 'mssql', 'postgresLegacy', 'mysqlLegacy', 'mssqlLegacy', 'oracle', 'snowflake'].includes(opt); + return ['postgres', 'mysql', 'mssql', 'postgresLegacy', 'mysqlLegacy', 'mssqlLegacy', 'oracle', 'snowflake', 'sqlite'].includes(opt); }); let format = 'postgres'; diff --git a/packages/dbml-core/__tests__/exporter/exporter.spec.js b/packages/dbml-core/__tests__/exporter/exporter.spec.js index 6962254d2..b412e71fe 100644 --- a/packages/dbml-core/__tests__/exporter/exporter.spec.js +++ b/packages/dbml-core/__tests__/exporter/exporter.spec.js @@ -31,5 +31,10 @@ describe('@dbml/core - exporter', () => { test.each(scanTestNames(__dirname, 'oracle_exporter/input'))('oracle_exporter/%s', (name) => { runTest(name, 'oracle_exporter', 'oracle'); }); + + test.each(scanTestNames(__dirname, 'sqlite_exporter/input'))('sqlite_exporter/%s', (name) => { + runTest(name, 'sqlite_exporter', 'sqlite'); + }); + /* eslint-enable */ }); diff --git a/packages/dbml-core/__tests__/exporter/sqlite_exporter/input/1_to_1_relations.in.dbml b/packages/dbml-core/__tests__/exporter/sqlite_exporter/input/1_to_1_relations.in.dbml new file mode 100644 index 000000000..9658a9fd8 --- /dev/null +++ b/packages/dbml-core/__tests__/exporter/sqlite_exporter/input/1_to_1_relations.in.dbml @@ -0,0 +1,8 @@ +Table father { + obj_id char(50) [primary key, unique] +} + +Table child { + obj_id char(50) [primary key, unique] + father_obj_id char(50) [ref: - father.obj_id] +} \ No newline at end of file diff --git a/packages/dbml-core/__tests__/exporter/sqlite_exporter/input/general_schema.in.dbml b/packages/dbml-core/__tests__/exporter/sqlite_exporter/input/general_schema.in.dbml new file mode 100644 index 000000000..86fe52493 --- /dev/null +++ b/packages/dbml-core/__tests__/exporter/sqlite_exporter/input/general_schema.in.dbml @@ -0,0 +1,81 @@ +Enum "orders_status_enum" { + "created" + "running" + "done" + "failure" +} + +Enum "products_status_enum" { + "Out of Stock" + "In Stock" +} + +Table "orders" { + "id" int [pk, increment] + "user_id" int [unique, not null] + "status" orders_status_enum + "created_at" varchar(255) [note: 'When order created'] + + Note: 'This is a note in table "orders"' +} + +Table "order_items" { + "order_id" int + "product_id" int + "quantity" int [default: 1] +} + +Table "products" { + "id" int + "name" varchar(255) + "merchant_id" int [not null] + "price" int + "status" products_status_enum + "created_at" datetime [default: `CURRENT_TIMESTAMP`] + + Indexes { + (id, name) [pk] + (merchant_id, status) [name: "product_status"] + id [type: hash, unique, name: "products_index_1"] + } + + Note: "This is a note in table 'products'" +} + +Table "users" { + "id" int [pk] + "full_name" varchar(255) + "email" varchar(255) [unique] + "gender" varchar(255) + "date_of_birth" varchar(255) + "created_at" varchar(255) + "country_code" int + + Note: 'This is a note in table "users"' +} + +Table "merchants" { + "id" int [pk] + "merchant_name" varchar(255) + "country_code" int + "created_at" varchar(255) + "admin_id" int +} + +Table "countries" { + "code" int [pk] + "name" varchar(255) + "continent_name" varchar(255) +} + +Ref:"orders"."id" < "order_items"."order_id" + +Ref:"products"."id" < "order_items"."product_id" + +Ref:"countries"."code" < "users"."country_code" + +Ref:"countries"."code" < "merchants"."country_code" + +Ref:"merchants"."id" < "products"."merchant_id" + +Ref:"users"."id" < "merchants"."admin_id" diff --git a/packages/dbml-core/__tests__/exporter/sqlite_exporter/input/many_to_many_relationship.in.dbml b/packages/dbml-core/__tests__/exporter/sqlite_exporter/input/many_to_many_relationship.in.dbml new file mode 100644 index 000000000..6ffc99457 --- /dev/null +++ b/packages/dbml-core/__tests__/exporter/sqlite_exporter/input/many_to_many_relationship.in.dbml @@ -0,0 +1,86 @@ +table "A"."a" { + "AB" integer [pk] + "BA" integer [pk] +} + +table "B"."b" { + "BC" integer [pk] + "CB" integer [pk] +} + +table "C"."c" { + "CD" integer [pk, ref: <> "D"."d"."DE"] + "DC" integer +} + +table "D"."d" { + "DE" integer [pk] + "ED" integer +} + +table "E"."e" { + "EF" integer [pk] + "FE" integer [pk] + "DE" integer + "ED" integer +} + +table "G"."g" { + "GH" integer [pk] + "HG" integer [pk] + "EH" integer + "HE" integer +} + +ref: "A"."a".("AB","BA") <> "B"."b".("BC","CB") +ref: "E"."e".("EF","FE") <> "G"."g".("GH","HG") + + +table t1 { + a int [pk] + b int [unique] +} + +table t2 { + a int [pk] + b int [unique] +} + +table t1_t2 { + a int +} + +ref: t1.a <> t2.a +ref: t1.b <> t2.b + +Table schema.image { + id integer [pk] + url varchar +} + +Table schema.content_item { + id integer [pk] + heading varchar + description varchar +} + +Ref: schema.image.id <> schema.content_item.id + +Table schema.footer_item { + id integer [pk] + left varchar + centre varchar + right varchar +} + +Table "schema1"."customers" { + "id" integer [pk] + "full_name" varchar +} + +Table "schema2"."orders" { + "id" integer [pk] + "total_price" integer +} + +Ref: "schema1"."customers"."id" <> "schema2"."orders"."id" diff --git a/packages/dbml-core/__tests__/exporter/sqlite_exporter/output/1_to_1_relations.out.sql b/packages/dbml-core/__tests__/exporter/sqlite_exporter/output/1_to_1_relations.out.sql new file mode 100644 index 000000000..a87553a9f --- /dev/null +++ b/packages/dbml-core/__tests__/exporter/sqlite_exporter/output/1_to_1_relations.out.sql @@ -0,0 +1,11 @@ +PRAGMA foreign_keys = ON; + +CREATE TABLE "father" ( + "obj_id" TEXT UNIQUE PRIMARY KEY +); + +CREATE TABLE "child" ( + "obj_id" TEXT UNIQUE PRIMARY KEY, + "father_obj_id" TEXT, + FOREIGN KEY ("father_obj_id") REFERENCES "father" ("obj_id") +); diff --git a/packages/dbml-core/__tests__/exporter/sqlite_exporter/output/general_schema.out.sql b/packages/dbml-core/__tests__/exporter/sqlite_exporter/output/general_schema.out.sql new file mode 100644 index 000000000..6d26a59c6 --- /dev/null +++ b/packages/dbml-core/__tests__/exporter/sqlite_exporter/output/general_schema.out.sql @@ -0,0 +1,58 @@ +PRAGMA foreign_keys = ON; + +CREATE TABLE "orders" ( + "id" INTEGER PRIMARY KEY AUTOINCREMENT, + "user_id" INTEGER UNIQUE NOT NULL, + "status" TEXT CHECK ("status" IN ('created','running','done','failure')), + "created_at" TEXT +); + +CREATE TABLE "countries" ( + "code" INTEGER PRIMARY KEY, + "name" TEXT, + "continent_name" TEXT +); + +CREATE TABLE "users" ( + "id" INTEGER PRIMARY KEY, + "full_name" TEXT, + "email" TEXT UNIQUE, + "gender" TEXT, + "date_of_birth" TEXT, + "created_at" TEXT, + "country_code" INTEGER, + FOREIGN KEY ("country_code") REFERENCES "countries" ("code") +); + +CREATE TABLE "merchants" ( + "id" INTEGER PRIMARY KEY, + "merchant_name" TEXT, + "country_code" INTEGER, + "created_at" TEXT, + "admin_id" INTEGER, + FOREIGN KEY ("country_code") REFERENCES "countries" ("code"), + FOREIGN KEY ("admin_id") REFERENCES "users" ("id") +); + +CREATE TABLE "products" ( + "id" INTEGER, + "name" TEXT, + "merchant_id" INTEGER NOT NULL, + "price" INTEGER, + "status" TEXT CHECK ("status" IN ('Out of Stock','In Stock')), + "created_at" TEXT DEFAULT (CURRENT_TIMESTAMP), + PRIMARY KEY ("id","name"), + FOREIGN KEY ("merchant_id") REFERENCES "merchants" ("id") +); + +CREATE TABLE "order_items" ( + "order_id" INTEGER, + "product_id" INTEGER, + "quantity" INTEGER DEFAULT 1, + FOREIGN KEY ("order_id") REFERENCES "orders" ("id"), + FOREIGN KEY ("product_id") REFERENCES "products" ("id") +); + +CREATE INDEX "product_status" ON "products" ("merchant_id","status"); + +CREATE UNIQUE INDEX "products_index_1" ON "products" ("id"); diff --git a/packages/dbml-core/__tests__/exporter/sqlite_exporter/output/many_to_many_relationship.out.sql b/packages/dbml-core/__tests__/exporter/sqlite_exporter/output/many_to_many_relationship.out.sql new file mode 100644 index 000000000..5b5eb18d1 --- /dev/null +++ b/packages/dbml-core/__tests__/exporter/sqlite_exporter/output/many_to_many_relationship.out.sql @@ -0,0 +1,141 @@ +PRAGMA foreign_keys = ON; + +CREATE TABLE "a" ( + "AB" INTEGER, + "BA" INTEGER, + PRIMARY KEY ("AB","BA") +); + +CREATE TABLE "b" ( + "BC" INTEGER, + "CB" INTEGER, + PRIMARY KEY ("BC","CB") +); + +CREATE TABLE "c" ( + "CD" INTEGER PRIMARY KEY, + "DC" INTEGER +); + +CREATE TABLE "d" ( + "DE" INTEGER PRIMARY KEY, + "ED" INTEGER +); + +CREATE TABLE "e" ( + "EF" INTEGER, + "FE" INTEGER, + "DE" INTEGER, + "ED" INTEGER, + PRIMARY KEY ("EF","FE") +); + +CREATE TABLE "g" ( + "GH" INTEGER, + "HG" INTEGER, + "EH" INTEGER, + "HE" INTEGER, + PRIMARY KEY ("GH","HG") +); + +CREATE TABLE "t1" ( + "a" INTEGER PRIMARY KEY, + "b" INTEGER UNIQUE +); + +CREATE TABLE "t2" ( + "a" INTEGER PRIMARY KEY, + "b" INTEGER UNIQUE +); + +CREATE TABLE "t1_t2" ( + "a" INTEGER +); + +CREATE TABLE "image" ( + "id" INTEGER PRIMARY KEY, + "url" TEXT +); + +CREATE TABLE "content_item" ( + "id" INTEGER PRIMARY KEY, + "heading" TEXT, + "description" TEXT +); + +CREATE TABLE "footer_item" ( + "id" INTEGER PRIMARY KEY, + "left" TEXT, + "centre" TEXT, + "right" TEXT +); + +CREATE TABLE "customers" ( + "id" INTEGER PRIMARY KEY, + "full_name" TEXT +); + +CREATE TABLE "orders" ( + "id" INTEGER PRIMARY KEY, + "total_price" INTEGER +); + +CREATE TABLE "d_c" ( + "d_DE" INTEGER, + "c_CD" INTEGER, + PRIMARY KEY ("d_DE","c_CD"), + FOREIGN KEY ("d_DE") REFERENCES "d" ("DE"), + FOREIGN KEY ("c_CD") REFERENCES "c" ("CD") +); + +CREATE TABLE "a_b" ( + "a_AB" INTEGER, + "a_BA" INTEGER, + "b_BC" INTEGER, + "b_CB" INTEGER, + PRIMARY KEY ("a_AB","a_BA","b_BC","b_CB"), + FOREIGN KEY ("a_AB","a_BA") REFERENCES "a" ("AB","BA"), + FOREIGN KEY ("b_BC","b_CB") REFERENCES "b" ("BC","CB") +); + +CREATE TABLE "e_g" ( + "e_EF" INTEGER, + "e_FE" INTEGER, + "g_GH" INTEGER, + "g_HG" INTEGER, + PRIMARY KEY ("e_EF","e_FE","g_GH","g_HG"), + FOREIGN KEY ("e_EF","e_FE") REFERENCES "e" ("EF","FE"), + FOREIGN KEY ("g_GH","g_HG") REFERENCES "g" ("GH","HG") +); + +CREATE TABLE "t1_t2(1)" ( + "t1_a" INTEGER, + "t2_a" INTEGER, + PRIMARY KEY ("t1_a","t2_a"), + FOREIGN KEY ("t1_a") REFERENCES "t1" ("a"), + FOREIGN KEY ("t2_a") REFERENCES "t2" ("a") +); + +CREATE TABLE "t1_t2(2)" ( + "t1_b" INTEGER, + "t2_b" INTEGER, + PRIMARY KEY ("t1_b","t2_b"), + FOREIGN KEY ("t1_b") REFERENCES "t1" ("b"), + FOREIGN KEY ("t2_b") REFERENCES "t2" ("b") +); + +CREATE TABLE "image_content_item" ( + "image_id" INTEGER, + "content_item_id" INTEGER, + PRIMARY KEY ("image_id","content_item_id"), + FOREIGN KEY ("image_id") REFERENCES "image" ("id"), + FOREIGN KEY ("content_item_id") REFERENCES "content_item" ("id") +); + +CREATE TABLE "customers_orders" ( + "customers_id" INTEGER, + "orders_id" INTEGER, + PRIMARY KEY ("customers_id","orders_id"), + FOREIGN KEY ("customers_id") REFERENCES "customers" ("id"), + FOREIGN KEY ("orders_id") REFERENCES "orders" ("id") +); diff --git a/packages/dbml-core/jestHelpers.js b/packages/dbml-core/jestHelpers.js index b6bf81000..e7893f05c 100644 --- a/packages/dbml-core/jestHelpers.js +++ b/packages/dbml-core/jestHelpers.js @@ -16,7 +16,7 @@ global.getFileExtension = (format) => { return 'rb'; } - const SQL_FORMATS = ['mysql', 'postgres', 'mssql', 'oracle', 'snowflake']; + const SQL_FORMATS = ['mysql', 'postgres', 'mssql', 'oracle', 'snowflake', 'sqlite']; if (SQL_FORMATS.includes(format)) { return 'sql'; } diff --git a/packages/dbml-core/src/export/SqliteExporter.js b/packages/dbml-core/src/export/SqliteExporter.js index b8b3c182e..f63658982 100644 --- a/packages/dbml-core/src/export/SqliteExporter.js +++ b/packages/dbml-core/src/export/SqliteExporter.js @@ -7,12 +7,14 @@ import { } from './utils'; import { shouldPrintSchemaName } from '../model_structure/utils'; -// SQLite doesn't have schemas or built-in rich types; we use affinities. -// Basic mapping from common Postgres types (and typical custom names) to SQLite affinities. +function escapeSingleQuotes(s) { + return String(s || '').replace(/'/g, "''"); +} + const SQLITE_TYPE_MAP = (() => { const int = new Set(['SMALLINT', 'INT2', 'INTEGER', 'INT', 'INT4', 'BIGINT', 'INT8', 'SMALLSERIAL', 'SERIAL', 'BIGSERIAL']); - const real = new Set(['REAL', 'FLOAT', 'DOUBLE', 'DOUBLE PRECISION', 'NUMERIC(.*)']); // NUMERIC often -> NUMERIC, but many use REAL for decimals + const real = new Set(['REAL', 'FLOAT', 'DOUBLE', 'DOUBLE PRECISION', 'NUMERIC(.*)']); const num = new Set(['DECIMAL', 'NUMERIC']); const text = new Set(['CHAR', 'CHARACTER', 'NCHAR', 'NVARCHAR', 'VARCHAR', 'CHARACTER VARYING', 'TEXT', 'NAME', 'BPCHAR', 'UUID', 'XML', 'JSON', 'JSONB', 'INET', 'CIDR', 'MACADDR', 'MACADDR8']); const blob = new Set(['BYTEA', 'BLOB']); @@ -21,56 +23,93 @@ const SQLITE_TYPE_MAP = (() => { return { int, real, num, text, blob, date, bool }; })(); -// Normalize incoming type name (strip params, collapse spaces) -function normalizeTypeName (tn) { +function normalizeTypeName(tn) { const t = (tn || '').trim().replace(/\s+/g, ' ').toUpperCase(); - return t.replace(/\(.*\)$/, ''); // drop length/precision for mapping + return t.replace(/\(.*\)$/, ''); } -function mapTypeToSQLite (originalTypeName) { +function mapTypeToSQLite(originalTypeName) { const t = normalizeTypeName(originalTypeName); if (SQLITE_TYPE_MAP.int.has(t)) return 'INTEGER'; - if (SQLITE_TYPE_MAP.bool.has(t)) return 'INTEGER'; // (optionally add CHECK later) + if (SQLITE_TYPE_MAP.bool.has(t)) return 'INTEGER'; if (SQLITE_TYPE_MAP.blob.has(t)) return 'BLOB'; if (SQLITE_TYPE_MAP.text.has(t)) return 'TEXT'; - if (SQLITE_TYPE_MAP.date.has(t)) return 'TEXT'; // store ISO strings; alternative: NUMERIC + if (SQLITE_TYPE_MAP.date.has(t)) return 'TEXT'; if (SQLITE_TYPE_MAP.num.has(t)) return 'NUMERIC'; if (t === 'REAL' || t === 'FLOAT' || t === 'DOUBLE' || t === 'DOUBLE PRECISION') return 'REAL'; - // Fallback: try to infer by keywords if (/INT/i.test(t)) return 'INTEGER'; if (/CHAR|CLOB|TEXT/i.test(t)) return 'TEXT'; if (/BLOB/i.test(t)) return 'BLOB'; if (/REAL|FLOA|DOUB/i.test(t)) return 'REAL'; if (/NUM|DEC|MONEY/i.test(t)) return 'NUMERIC'; - // Unknown/custom → TEXT to be safe return 'TEXT'; } +function topoSortTables(allTableIds, dependencyEdges) { + const parentsByChild = new Map(); + const childrenByParent = new Map(); + const indegree = new Map(); + + allTableIds.forEach(id => { + indegree.set(id, 0); + parentsByChild.set(id, new Set()); + childrenByParent.set(id, new Set()); + }); + + (dependencyEdges || []).forEach(([child, parent]) => { + if (!parentsByChild.has(child)) parentsByChild.set(child, new Set()); + if (!childrenByParent.has(parent)) childrenByParent.set(parent, new Set()); + if (!parentsByChild.get(child).has(parent)) { + parentsByChild.get(child).add(parent); + childrenByParent.get(parent).add(child); + indegree.set(child, (indegree.get(child) || 0) + 1); + } + }); + + const q = []; + indegree.forEach((deg, id) => { if (deg === 0) q.push(id); }); + + const ordered = []; + while (q.length) { + const node = q.shift(); + ordered.push(node); + (childrenByParent.get(node) || []).forEach(child => { + indegree.set(child, indegree.get(child) - 1); + if (indegree.get(child) === 0) q.push(child); + }); + } + + if (ordered.length !== allTableIds.length) { + return allTableIds; + } + return ordered; +} + + class SqliteExporter { - // Build enum name → value array map so we can add CHECK constraints - static buildEnumMap (model) { + static buildEnumMap(model) { const map = new Map(); Object.keys(model.enums).forEach((enumId) => { const e = model.enums[enumId]; const schema = model.schemas[e.schemaId]; - // Fully qualified DBML enum name (schema may exist in model, but SQLite won’t use it) const fq = `"${schema.name}"."${e.name}"`; const local = `"${e.name}"`; const vals = e.valueIds.map(id => model.enumValues[id].name); map.set(fq, vals); map.set(local, vals); - map.set(e.name, vals); // convenience + map.set(e.name, vals); }); return map; } // Collect per-table FK clauses (since SQLite needs inline FKs) - static collectForeignKeysByTable (refIds, model) { - const fksByTableId = new Map(); // tableId -> array of fk clause strings - const junctionCreates = []; // CREATE TABLE strings for M:N junctions (with inline FKs) + static collectForeignKeysByTable(refIds, model) { + const fksByTableId = new Map(); + const junctionCreates = []; + const dependencyEdges = []; const usedTableNames = new Set(Object.values(model.tables).map(t => t.name)); @@ -88,20 +127,18 @@ class SqliteExporter { const foreignField = model.fields[foreignEndpoint.fieldIds[0]]; const foreignTable = model.tables[foreignField.tableId]; - const refCols = SqliteExporter.buildFieldName(refEndpoint.fieldIds, model); // "(colA, colB)" + const refCols = SqliteExporter.buildFieldName(refEndpoint.fieldIds, model); const foreignCols = SqliteExporter.buildFieldName(foreignEndpoint.fieldIds, model); if (refOneIndex === -1) { - // Many-to-many: build a junction table with FKs inline const firstTableFieldsMap = buildJunctionFields1(refEndpoint.fieldIds, model); const secondTableFieldsMap = buildJunctionFields2(foreignEndpoint.fieldIds, model, firstTableFieldsMap); const newTableName = buildNewTableName(refTable.name, foreignTable.name, usedTableNames); let line = `CREATE TABLE "${newTableName}" (\n`; - const key1s = [...firstTableFieldsMap.keys()].join('", "'); - const key2s = [...secondTableFieldsMap.keys()].join('", "'); + const key1s = [...firstTableFieldsMap.keys()].join('","'); + const key2s = [...secondTableFieldsMap.keys()].join('","'); - // Columns firstTableFieldsMap.forEach((fieldType, fieldName) => { line += ` "${fieldName}" ${mapTypeToSQLite(fieldType)},\n`; }); @@ -109,12 +146,10 @@ class SqliteExporter { line += ` "${fieldName}" ${mapTypeToSQLite(fieldType)},\n`; }); - // Composite PK - line += ` PRIMARY KEY ("${key1s}", "${key2s}"),\n`; + line += ` PRIMARY KEY ("${key1s}","${key2s}"),\n`; - // FKs (inline) - const refColsList = [...firstTableFieldsMap.keys()].map(k => `"${k}"`).join(', '); - const forColsList = [...secondTableFieldsMap.keys()].map(k => `"${k}"`).join(', '); + const refColsList = [...firstTableFieldsMap.keys()].map(k => `"${k}"`).join(','); + const forColsList = [...secondTableFieldsMap.keys()].map(k => `"${k}"`).join(','); line += ` FOREIGN KEY (${refColsList}) REFERENCES "${refTable.name}" ${refCols}`; if (ref.onDelete) line += ` ON DELETE ${ref.onDelete.toUpperCase()}`; @@ -128,7 +163,6 @@ class SqliteExporter { junctionCreates.push(line); } else { - // 1:N: attach FK to the "many" side (foreignEndpoint.table) const fkClauseParts = []; fkClauseParts.push(`FOREIGN KEY ${foreignCols} REFERENCES "${refTable.name}" ${refCols}`); if (ref.onDelete) fkClauseParts.push(`ON DELETE ${ref.onDelete.toUpperCase()}`); @@ -138,35 +172,32 @@ class SqliteExporter { const tableId = foreignTable.id; if (!fksByTableId.has(tableId)) fksByTableId.set(tableId, []); fksByTableId.get(tableId).push(fkLine); + dependencyEdges.push([foreignTable.id, refTable.id]); } }); - return { fksByTableId, junctionCreates }; + return { fksByTableId, junctionCreates, dependencyEdges }; } - static buildFieldName (fieldIds, model) { - const fieldNames = fieldIds.map(fieldId => `"${model.fields[fieldId].name}"`).join(', '); + static buildFieldName(fieldIds, model) { + const fieldNames = fieldIds.map(fieldId => `"${model.fields[fieldId].name}"`).join(','); return `(${fieldNames})`; } - static getFieldLines (tableId, model, enumMap) { + static getFieldLines(tableId, model, enumMap) { const table = model.tables[tableId]; const lines = table.fieldIds.map((fieldId) => { const field = model.fields[fieldId]; - // Type & affinity let affinity; let isBoolean = false; let enumCheck = ''; if (!field.type.schemaName || !shouldPrintSchemaName(field.type.schemaName)) { - // Built-in or custom non-schema type string const originalTypeName = field.type.type_name; const upperType = normalizeTypeName(originalTypeName); - // Enum detection (from enumMap): DBML enums appear as schema.type or type - // Build potential keys to probe: const enumKeys = [ `"${field.type.schemaName}"."${field.type.type_name}"`, `"${field.type.type_name}"`, @@ -174,75 +205,48 @@ class SqliteExporter { ].filter(Boolean); let enumVals = null; - enumKeys.some((k) => { - if (enumMap.has(k)) { - enumVals = enumMap.get(k); - return true; - } - return false; - }); + for (const k of enumKeys) { + if (enumMap.has(k)) { enumVals = enumMap.get(k); break; } + } if (enumVals && enumVals.length) { affinity = 'TEXT'; - // CHECK uses the column name; applied after we build the base line - enumCheck = ` CHECK ("${field.name}" IN (${enumVals.map(v => `'${v.replace(/'/g, "''")}'`).join(', ')}))`; + enumCheck = ` CHECK ("${field.name}" IN (${enumVals.map(v => `'${escapeSingleQuotes(v)}'`).join(',')}))`; } else { affinity = mapTypeToSQLite(originalTypeName); isBoolean = SQLITE_TYPE_MAP.bool.has(upperType); } } else { - // A custom namespaced type → TEXT (conservative) affinity = 'TEXT'; } - // Start line with quoted identifier and affinity let line = `"${field.name}" ${affinity}`; - // AUTOINCREMENT/PK handling: - // In SQLite, AUTOINCREMENT only valid for a single-column INTEGER PRIMARY KEY. - // If field.increment is set, prefer the canonical pattern. if (field.increment) { if (affinity === 'INTEGER') { - // Make it the rowid PK. If user also marked pk, we’ll consume it here. line = `"${field.name}" INTEGER PRIMARY KEY AUTOINCREMENT`; - // Note: do NOT add UNIQUE/NOT NULL/etc. here; SQLite implies NOT NULL for rowid PK. - } else { - // Non-integer increment requested → fallback to INTEGER (rowid) semantics not possible - // Keep original affinity but ignore increment, as SQLite cannot autoincrement non-integer. - // (Optionally: emit a comment) - line += ''; } } else { - // Regular constraints if (field.unique) line += ' UNIQUE'; - // If PK on a non-increment column, allow either single or composite; for single, this will be a table-level or column-level PK. - if (field.pk) { - // Column-level PRIMARY KEY is valid (composite handled separately at table-level) - line += ' PRIMARY KEY'; - } + if (field.pk) line += ' PRIMARY KEY'; if (field.not_null) line += ' NOT NULL'; } - // Defaults if (field.dbdefault) { if (field.dbdefault.type === 'expression') { - line += ` DEFAULT (${field.dbdefault.value})`; + let expr = String(field.dbdefault.value || '').trim(); + line += ` DEFAULT (${expr})`; } else if (field.dbdefault.type === 'string') { - line += ` DEFAULT '${field.dbdefault.value.replace(/'/g, "''")}'`; + line += ` DEFAULT '${escapeSingleQuotes(field.dbdefault.value)}'`; } else { line += ` DEFAULT ${field.dbdefault.value}`; } } - // Boolean CHECK (optional): enforce 0/1 if desired. if (!field.increment && isBoolean) { line += ` CHECK ("${field.name}" IN (0,1))`; } - - // Enum CHECK - if (enumCheck) { - line += enumCheck; - } + if (enumCheck) line += enumCheck; return line; }); @@ -250,7 +254,7 @@ class SqliteExporter { return lines; } - static getCompositePKs (tableId, model) { + static getCompositePKs(tableId, model) { const table = model.tables[tableId]; const compositePkIds = table.indexIds ? table.indexIds.filter(indexId => model.indexes[indexId].pk) : []; const lines = compositePkIds.map((keyId) => { @@ -268,30 +272,33 @@ class SqliteExporter { columnArr.push(columnStr); }); - return `PRIMARY KEY (${columnArr.join(', ')})`; + return `PRIMARY KEY (${columnArr.join(',')})`; }); return lines; } - static exportTables (tableIds, model, enumMap, fksByTableId) { + static exportTables(tableIds, model, enumMap, fksByTableId) { const tableStrs = (tableIds || []).map((tableId) => { + const table = model.tables[tableId]; + const fieldContents = SqliteExporter.getFieldLines(tableId, model, enumMap); const compositePKs = SqliteExporter.getCompositePKs(tableId, model); const fkClauses = fksByTableId.get(tableId) || []; const content = [...fieldContents, ...compositePKs, ...fkClauses]; - const table = model.tables[tableId]; - // Ignore schemas in SQLite - const tableStr = `CREATE TABLE "${table.name}" (\n${content.map(line => ` ${line}`).join(',\n')}\n);\n`; + const tableStr = + `CREATE TABLE "${table.name}" (\n` + + `${content.map(line => ` ${line}`).join(',\n')}\n` + + `);\n`; + return tableStr; }); return tableStrs; } - static exportIndexes (indexIds, model) { - // exclude composite pk index + static exportIndexes(indexIds, model) { const indexArr = (indexIds || []).filter((indexId) => !model.indexes[indexId].pk).map((indexId) => { const index = model.indexes[indexId]; const table = model.tables[index.tableId]; @@ -303,7 +310,6 @@ class SqliteExporter { if (indexName) line += ` ${indexName}`; line += ` ON "${table.name}"`; - // SQLite has no USING method const columnArr = []; index.columnIds.forEach((columnId) => { const column = model.indexColumns[columnId]; @@ -316,7 +322,7 @@ class SqliteExporter { columnArr.push(columnStr); }); - line += ` (${columnArr.join(', ')})`; + line += ` (${columnArr.join(',')})`; line += ';\n'; return line; }); @@ -324,80 +330,34 @@ class SqliteExporter { return indexArr; } - static exportComments (comments, model) { - // SQLite does not support COMMENT ON; emit as SQL comments - const commentArr = comments.map((comment) => { - const table = model.tables[comment.tableId]; - switch (comment.type) { - case 'table': { - const txt = (table.note || '').replace(/'/g, "''"); - return `-- TABLE "${table.name}" COMMENT: '${txt}'\n`; - } - case 'column': { - const field = model.fields[comment.fieldId]; - const txt = (field.note || '').replace(/'/g, "''"); - return `-- COLUMN "${table.name}"."${field.name}" COMMENT: '${txt}'\n`; - } - default: - return ''; - } - }); - return commentArr; - } - - static export (model) { + static export(model) { const database = model.database['1']; - // Build enum map first const enumMap = SqliteExporter.buildEnumMap(model); - // Collect all refs once to produce inline FKs and any junction tables const allRefIds = _.flatten(database.schemaIds.map(sid => model.schemas[sid].refIds || [])); - const { fksByTableId, junctionCreates } = SqliteExporter.collectForeignKeysByTable(allRefIds, model); - - const statements = database.schemaIds.reduce((prev, schemaId) => { - const schema = model.schemas[schemaId]; - const { tableIds } = schema; + const { fksByTableId, junctionCreates, dependencyEdges } = SqliteExporter.collectForeignKeysByTable(allRefIds, model); - if (!_.isEmpty(tableIds)) { - prev.tables.push(...SqliteExporter.exportTables(tableIds, model, enumMap, fksByTableId)); - } + const allTableIds = _.flatten(database.schemaIds.map(sid => model.schemas[sid].tableIds || [])); + const orderedTableIds = topoSortTables(allTableIds, dependencyEdges); - const indexIds = _.flatten((tableIds || []).map((tableId) => model.tables[tableId].indexIds || [])); - if (!_.isEmpty(indexIds)) { - prev.indexes.push(...SqliteExporter.exportIndexes(indexIds, model)); - } + const tableCreates = SqliteExporter.exportTables(orderedTableIds, model, enumMap, fksByTableId); - const commentNodes = _.flatten((tableIds || []).map((tableId) => { - const { fieldIds, note } = model.tables[tableId]; - const fieldObjects = (fieldIds || []) - .filter((fieldId) => model.fields[fieldId].note) - .map((fieldId) => ({ type: 'column', fieldId, tableId })); - return note ? [{ type: 'table', tableId }].concat(fieldObjects) : fieldObjects; - })); - if (!_.isEmpty(commentNodes)) { - prev.comments.push(...SqliteExporter.exportComments(commentNodes, model)); - } - - return prev; - }, { - pragmas: [], - tables: [], - indexes: [], - comments: [], - junctions: [], - }); + const allIndexIds = _.flatten( + (database.schemaIds || []).map(sid => { + const tIds = model.schemas[sid].tableIds || []; + return _.flatten(tIds.map(tid => model.tables[tid].indexIds || [])); + }) + ); + const indexCreates = SqliteExporter.exportIndexes(allIndexIds, model); - // Enable FK enforcement - const pragmas = ['PRAGMA foreign_keys = ON;']; + const pragmas = ['PRAGMA foreign_keys = ON;\n']; const res = _.concat( pragmas, - // No schemas or enums in SQLite - statements.tables, - junctionCreates, // junction tables created after base tables (they only reference them) - statements.indexes, - statements.comments, + tableCreates, + junctionCreates, + indexCreates, ).join('\n'); return res; From 61a1b31c93ad3c1356c1ad13ed55c08f5e069178 Mon Sep 17 00:00:00 2001 From: Daniel Gary Date: Mon, 10 Nov 2025 15:02:55 -0600 Subject: [PATCH 3/3] removed typemapping, normalizeTypeName, and mapTypeToSQLite. --- .../dbml-core/src/export/SqliteExporter.js | 61 +++++-------------- 1 file changed, 16 insertions(+), 45 deletions(-) diff --git a/packages/dbml-core/src/export/SqliteExporter.js b/packages/dbml-core/src/export/SqliteExporter.js index f63658982..a477ec412 100644 --- a/packages/dbml-core/src/export/SqliteExporter.js +++ b/packages/dbml-core/src/export/SqliteExporter.js @@ -11,41 +11,13 @@ function escapeSingleQuotes(s) { return String(s || '').replace(/'/g, "''"); } -const SQLITE_TYPE_MAP = (() => { - const int = new Set(['SMALLINT', 'INT2', 'INTEGER', 'INT', 'INT4', 'BIGINT', 'INT8', - 'SMALLSERIAL', 'SERIAL', 'BIGSERIAL']); - const real = new Set(['REAL', 'FLOAT', 'DOUBLE', 'DOUBLE PRECISION', 'NUMERIC(.*)']); - const num = new Set(['DECIMAL', 'NUMERIC']); - const text = new Set(['CHAR', 'CHARACTER', 'NCHAR', 'NVARCHAR', 'VARCHAR', 'CHARACTER VARYING', 'TEXT', 'NAME', 'BPCHAR', 'UUID', 'XML', 'JSON', 'JSONB', 'INET', 'CIDR', 'MACADDR', 'MACADDR8']); - const blob = new Set(['BYTEA', 'BLOB']); - const date = new Set(['DATE', 'TIME', 'TIMESTAMP', 'TIMESTAMP WITH TIME ZONE', 'TIMESTAMP WITHOUT TIME ZONE', 'TIME WITH TIME ZONE', 'TIME WITHOUT TIME ZONE', 'INTERVAL']); - const bool = new Set(['BOOLEAN', 'BOOL']); - return { int, real, num, text, blob, date, bool }; -})(); - -function normalizeTypeName(tn) { - const t = (tn || '').trim().replace(/\s+/g, ' ').toUpperCase(); - return t.replace(/\(.*\)$/, ''); -} - -function mapTypeToSQLite(originalTypeName) { - const t = normalizeTypeName(originalTypeName); - - if (SQLITE_TYPE_MAP.int.has(t)) return 'INTEGER'; - if (SQLITE_TYPE_MAP.bool.has(t)) return 'INTEGER'; - if (SQLITE_TYPE_MAP.blob.has(t)) return 'BLOB'; - if (SQLITE_TYPE_MAP.text.has(t)) return 'TEXT'; - if (SQLITE_TYPE_MAP.date.has(t)) return 'TEXT'; - if (SQLITE_TYPE_MAP.num.has(t)) return 'NUMERIC'; - if (t === 'REAL' || t === 'FLOAT' || t === 'DOUBLE' || t === 'DOUBLE PRECISION') return 'REAL'; - - if (/INT/i.test(t)) return 'INTEGER'; - if (/CHAR|CLOB|TEXT/i.test(t)) return 'TEXT'; - if (/BLOB/i.test(t)) return 'BLOB'; - if (/REAL|FLOA|DOUB/i.test(t)) return 'REAL'; - if (/NUM|DEC|MONEY/i.test(t)) return 'NUMERIC'; +// Minimal boolean type detection - only used to add CHECK (field IN (0,1)) constraints +// for fields that should be boolean in nature +const BOOLEAN_TYPES = new Set(['BOOLEAN', 'BOOL']); - return 'TEXT'; +function isBooleanType(typeName) { + const normalized = (typeName || '').trim().toUpperCase(); + return BOOLEAN_TYPES.has(normalized); } function topoSortTables(allTableIds, dependencyEdges) { @@ -140,10 +112,10 @@ class SqliteExporter { const key2s = [...secondTableFieldsMap.keys()].join('","'); firstTableFieldsMap.forEach((fieldType, fieldName) => { - line += ` "${fieldName}" ${mapTypeToSQLite(fieldType)},\n`; + line += ` "${fieldName}" ${fieldType},\n`; }); secondTableFieldsMap.forEach((fieldType, fieldName) => { - line += ` "${fieldName}" ${mapTypeToSQLite(fieldType)},\n`; + line += ` "${fieldName}" ${fieldType},\n`; }); line += ` PRIMARY KEY ("${key1s}","${key2s}"),\n`; @@ -190,13 +162,12 @@ class SqliteExporter { const lines = table.fieldIds.map((fieldId) => { const field = model.fields[fieldId]; - let affinity; + let typeName; let isBoolean = false; let enumCheck = ''; if (!field.type.schemaName || !shouldPrintSchemaName(field.type.schemaName)) { const originalTypeName = field.type.type_name; - const upperType = normalizeTypeName(originalTypeName); const enumKeys = [ `"${field.type.schemaName}"."${field.type.type_name}"`, @@ -210,20 +181,20 @@ class SqliteExporter { } if (enumVals && enumVals.length) { - affinity = 'TEXT'; + typeName = 'TEXT'; enumCheck = ` CHECK ("${field.name}" IN (${enumVals.map(v => `'${escapeSingleQuotes(v)}'`).join(',')}))`; } else { - affinity = mapTypeToSQLite(originalTypeName); - isBoolean = SQLITE_TYPE_MAP.bool.has(upperType); + typeName = originalTypeName; + isBoolean = isBooleanType(originalTypeName); } } else { - affinity = 'TEXT'; + typeName = 'TEXT'; } - let line = `"${field.name}" ${affinity}`; + let line = `"${field.name}" ${typeName}`; if (field.increment) { - if (affinity === 'INTEGER') { + if (typeName.toUpperCase().includes('INT')) { line = `"${field.name}" INTEGER PRIMARY KEY AUTOINCREMENT`; } } else { @@ -364,4 +335,4 @@ class SqliteExporter { } } -export default SqliteExporter; +export default SqliteExporter; \ No newline at end of file