mirror of
https://github.com/rbock/sqlpp11.git
synced 2024-11-15 12:29:41 +08:00
ddl2cpp command line argument for custom types (#491)
* ddl2cpp command line argument for custom types - Updated the ddl2cpp script to allow custom/extended types through external csv file - Had to re-order the script to allow the command line to be parsed before setting up the parser - Updated README * Test for the command line argument - Script test only for now * Test the custom type argument - Firs a negative test - Last a positive test and compile test against the generated output * Expand the test - Ensure built in types still work - Check capitilisation - Ensure more than one custom works - Check type with spaces --------- Co-authored-by: Carel Combrink <carel.combrink@vastech.co.za>
This commit is contained in:
parent
eac9a6e5e3
commit
babd420ecb
36
README.md
36
README.md
@ -239,12 +239,46 @@ Create headers for them with provided Python script:
|
|||||||
```
|
```
|
||||||
%sqlpp11_dir%/scripts/ddl2cpp ~/temp/MyTable.ddl ~/temp/MyTable %DatabaseNamespaceForExample%
|
%sqlpp11_dir%/scripts/ddl2cpp ~/temp/MyTable.ddl ~/temp/MyTable %DatabaseNamespaceForExample%
|
||||||
```
|
```
|
||||||
(In case you’re getting notes about unsupported column type take a look at the other datatypes in sqlpp11/data_types. They are not hard to implement.)
|
|
||||||
|
In case you’re getting notes about unsupported column type consider:
|
||||||
|
- Take a look at the other datatypes in sqlpp11/data_types. They are not hard to implement.
|
||||||
|
- Use the `--datatype-file` command line argument as described below.
|
||||||
|
|
||||||
Include generated header (MyTable.h), that’s all.
|
Include generated header (MyTable.h), that’s all.
|
||||||
|
|
||||||
If you prefer Ruby over Python, you might want to take a look at https://github.com/douyw/sqlpp11gen
|
If you prefer Ruby over Python, you might want to take a look at https://github.com/douyw/sqlpp11gen
|
||||||
|
|
||||||
|
Unsupported column types:
|
||||||
|
-------------
|
||||||
|
__Map unsupported column types to supported column types with a csv file__:
|
||||||
|
|
||||||
|
One can use the `--datatype-file` command line argument for the ddl2cpp script to map unsupported column types to supported column types.
|
||||||
|
|
||||||
|
The format of the csv file is:
|
||||||
|
```
|
||||||
|
<dataType>, <col_type1>, <col_type2>
|
||||||
|
<dataType>, <col_type3>
|
||||||
|
```
|
||||||
|
|
||||||
|
Where `<dataType>` is one or more of the following internal types:
|
||||||
|
|
||||||
|
- `Boolean`
|
||||||
|
- `Integer`
|
||||||
|
- `Serial`
|
||||||
|
- `FloatingPoint`
|
||||||
|
- `Text`
|
||||||
|
- `Blob`
|
||||||
|
- `Date`
|
||||||
|
- `DateTime`
|
||||||
|
- `Time`
|
||||||
|
|
||||||
|
Example:
|
||||||
|
|
||||||
|
```
|
||||||
|
Boolean, one_or_zero
|
||||||
|
Text, url, uuid
|
||||||
|
```
|
||||||
|
|
||||||
Contact:
|
Contact:
|
||||||
--------
|
--------
|
||||||
* Issues at https://github.com/rbock/sqlpp11/issues
|
* Issues at https://github.com/rbock/sqlpp11/issues
|
||||||
|
296
scripts/ddl2cpp
296
scripts/ddl2cpp
@ -71,16 +71,12 @@ ddlBracedExpression << ddlLeft + ddlExpression + ddlRight
|
|||||||
ddlArguments = pp.Suppress(pp.Group(pp.delimitedList(ddlExpression)))
|
ddlArguments = pp.Suppress(pp.Group(pp.delimitedList(ddlExpression)))
|
||||||
ddlFunctionCall << ddlName + ddlLeft + pp.Optional(ddlArguments) + ddlRight
|
ddlFunctionCall << ddlName + ddlLeft + pp.Optional(ddlArguments) + ddlRight
|
||||||
|
|
||||||
# Column and constraint parsers
|
# Data types
|
||||||
ddlBooleanTypes = [
|
ddlBooleanTypes = [
|
||||||
"bool",
|
"bool",
|
||||||
"boolean",
|
"boolean",
|
||||||
]
|
]
|
||||||
|
|
||||||
ddlBoolean = pp.Or(
|
|
||||||
map(pp.CaselessLiteral, sorted(ddlBooleanTypes, reverse=True))
|
|
||||||
).setParseAction(pp.replaceWith("boolean"))
|
|
||||||
|
|
||||||
ddlIntegerTypes = [
|
ddlIntegerTypes = [
|
||||||
"bigint",
|
"bigint",
|
||||||
"int",
|
"int",
|
||||||
@ -92,20 +88,12 @@ ddlIntegerTypes = [
|
|||||||
"smallint",
|
"smallint",
|
||||||
"tinyint",
|
"tinyint",
|
||||||
]
|
]
|
||||||
ddlInteger = pp.Or(
|
|
||||||
map(pp.CaselessLiteral, sorted(ddlIntegerTypes, reverse=True))
|
|
||||||
).setParseAction(pp.replaceWith("integer"))
|
|
||||||
|
|
||||||
ddlSerialTypes = [
|
ddlSerialTypes = [
|
||||||
"bigserial", # PostgreSQL
|
"bigserial", # PostgreSQL
|
||||||
"serial", # PostgreSQL
|
"serial", # PostgreSQL
|
||||||
"smallserial", # PostgreSQL
|
"smallserial", # PostgreSQL
|
||||||
]
|
]
|
||||||
ddlSerial = (
|
|
||||||
pp.Or(map(pp.CaselessLiteral, sorted(ddlSerialTypes, reverse=True)))
|
|
||||||
.setParseAction(pp.replaceWith("integer"))
|
|
||||||
.setResultsName("hasAutoValue")
|
|
||||||
)
|
|
||||||
|
|
||||||
ddlFloatingPointTypes = [
|
ddlFloatingPointTypes = [
|
||||||
"decimal", # MYSQL
|
"decimal", # MYSQL
|
||||||
@ -116,9 +104,6 @@ ddlFloatingPointTypes = [
|
|||||||
"numeric", # PostgreSQL
|
"numeric", # PostgreSQL
|
||||||
"real",
|
"real",
|
||||||
]
|
]
|
||||||
ddlFloatingPoint = pp.Or(
|
|
||||||
map(pp.CaselessLiteral, sorted(ddlFloatingPointTypes, reverse=True))
|
|
||||||
).setParseAction(pp.replaceWith("floating_point"))
|
|
||||||
|
|
||||||
ddlTextTypes = [
|
ddlTextTypes = [
|
||||||
"char",
|
"char",
|
||||||
@ -136,10 +121,6 @@ ddlTextTypes = [
|
|||||||
"rational", # PostgreSQL pg_rationale extension
|
"rational", # PostgreSQL pg_rationale extension
|
||||||
]
|
]
|
||||||
|
|
||||||
ddlText = pp.Or(
|
|
||||||
map(pp.CaselessLiteral, sorted(ddlTextTypes, reverse=True))
|
|
||||||
).setParseAction(pp.replaceWith("text"))
|
|
||||||
|
|
||||||
ddlBlobTypes = [
|
ddlBlobTypes = [
|
||||||
"bytea",
|
"bytea",
|
||||||
"tinyblob",
|
"tinyblob",
|
||||||
@ -150,21 +131,10 @@ ddlBlobTypes = [
|
|||||||
"varbinary", # MYSQL
|
"varbinary", # MYSQL
|
||||||
]
|
]
|
||||||
|
|
||||||
ddlBlob = pp.Or(
|
|
||||||
map(pp.CaselessLiteral, sorted(ddlBlobTypes, reverse=True))
|
|
||||||
).setParseAction(pp.replaceWith("blob"))
|
|
||||||
|
|
||||||
|
|
||||||
ddlDateTypes = [
|
ddlDateTypes = [
|
||||||
"date",
|
"date",
|
||||||
]
|
]
|
||||||
|
|
||||||
ddlDate = (
|
|
||||||
pp.Or(map(pp.CaselessLiteral, sorted(ddlDateTypes, reverse=True)))
|
|
||||||
.setParseAction(pp.replaceWith("day_point"))
|
|
||||||
.setResultsName("warnTimezone")
|
|
||||||
)
|
|
||||||
|
|
||||||
ddlDateTimeTypes = [
|
ddlDateTimeTypes = [
|
||||||
"datetime",
|
"datetime",
|
||||||
"timestamp",
|
"timestamp",
|
||||||
@ -173,118 +143,158 @@ ddlDateTimeTypes = [
|
|||||||
"timestamptz", # PostgreSQL
|
"timestamptz", # PostgreSQL
|
||||||
]
|
]
|
||||||
|
|
||||||
ddlDateTime = pp.Or(
|
|
||||||
map(pp.CaselessLiteral, sorted(ddlDateTimeTypes, reverse=True))
|
|
||||||
).setParseAction(pp.replaceWith("time_point"))
|
|
||||||
|
|
||||||
ddlTimeTypes = [
|
ddlTimeTypes = [
|
||||||
"time",
|
"time",
|
||||||
"time without time zone", # PostgreSQL
|
"time without time zone", # PostgreSQL
|
||||||
"time with time zone", # PostgreSQL
|
"time with time zone", # PostgreSQL
|
||||||
]
|
]
|
||||||
|
|
||||||
ddlTime = pp.Or(
|
# Init the DLL parser
|
||||||
map(pp.CaselessLiteral, sorted(ddlTimeTypes, reverse=True))
|
def initDllParser():
|
||||||
).setParseAction(pp.replaceWith("time_of_day"))
|
global ddl
|
||||||
|
global ddlType
|
||||||
|
global ddlColumn
|
||||||
|
global ddlConstraint
|
||||||
|
global ddlCreateTable
|
||||||
|
# Column and constraint parsers
|
||||||
|
|
||||||
|
ddlBoolean = pp.Or(
|
||||||
|
map(pp.CaselessLiteral, sorted(ddlBooleanTypes, reverse=True))
|
||||||
|
).setParseAction(pp.replaceWith("boolean"))
|
||||||
|
|
||||||
ddlUnknown = pp.Word(pp.alphanums).setParseAction(pp.replaceWith("UNKNOWN"))
|
ddlInteger = pp.Or(
|
||||||
|
map(pp.CaselessLiteral, sorted(ddlIntegerTypes, reverse=True))
|
||||||
|
).setParseAction(pp.replaceWith("integer"))
|
||||||
|
|
||||||
ddlType = (
|
ddlSerial = (
|
||||||
ddlBoolean
|
pp.Or(map(pp.CaselessLiteral, sorted(ddlSerialTypes, reverse=True)))
|
||||||
| ddlInteger
|
.setParseAction(pp.replaceWith("integer"))
|
||||||
| ddlSerial
|
.setResultsName("hasAutoValue")
|
||||||
| ddlFloatingPoint
|
|
||||||
| ddlText
|
|
||||||
| ddlBlob
|
|
||||||
| ddlDateTime
|
|
||||||
| ddlDate
|
|
||||||
| ddlTime
|
|
||||||
| ddlUnknown
|
|
||||||
)
|
|
||||||
|
|
||||||
ddlUnsigned = pp.CaselessLiteral("UNSIGNED").setResultsName("isUnsigned")
|
|
||||||
ddlDigits = "," + pp.Word(pp.nums)
|
|
||||||
ddlWidth = ddlLeft + pp.Word(pp.nums) + pp.Optional(ddlDigits) + ddlRight
|
|
||||||
ddlTimezone = (
|
|
||||||
(pp.CaselessLiteral("with") | pp.CaselessLiteral("without"))
|
|
||||||
+ pp.CaselessLiteral("time")
|
|
||||||
+ pp.CaselessLiteral("zone")
|
|
||||||
)
|
|
||||||
|
|
||||||
ddlNotNull = pp.Group(
|
|
||||||
pp.CaselessLiteral("NOT") + pp.CaselessLiteral("NULL")
|
|
||||||
).setResultsName("notNull")
|
|
||||||
ddlDefaultValue = pp.CaselessLiteral("DEFAULT").setResultsName("hasDefaultValue")
|
|
||||||
|
|
||||||
ddlAutoKeywords = [
|
|
||||||
"AUTO_INCREMENT",
|
|
||||||
"AUTOINCREMENT",
|
|
||||||
"SMALLSERIAL",
|
|
||||||
"SERIAL",
|
|
||||||
"BIGSERIAL",
|
|
||||||
"GENERATED",
|
|
||||||
]
|
|
||||||
ddlAutoValue = pp.Or(map(pp.CaselessLiteral, sorted(ddlAutoKeywords, reverse=True)))
|
|
||||||
|
|
||||||
ddlConstraintKeywords = [
|
|
||||||
"CONSTRAINT",
|
|
||||||
"PRIMARY",
|
|
||||||
"FOREIGN",
|
|
||||||
"KEY",
|
|
||||||
"FULLTEXT",
|
|
||||||
"INDEX",
|
|
||||||
"UNIQUE",
|
|
||||||
"CHECK",
|
|
||||||
"PERIOD",
|
|
||||||
]
|
|
||||||
ddlConstraint = pp.Group(
|
|
||||||
pp.Or(map(pp.CaselessLiteral, sorted(ddlConstraintKeywords, reverse=True)))
|
|
||||||
+ ddlExpression
|
|
||||||
).setResultsName("isConstraint")
|
|
||||||
|
|
||||||
ddlColumn = pp.Group(
|
|
||||||
ddlName("name")
|
|
||||||
+ ddlType("type")
|
|
||||||
+ pp.Suppress(pp.Optional(ddlWidth))
|
|
||||||
+ pp.Suppress(pp.Optional(ddlTimezone))
|
|
||||||
+ pp.ZeroOrMore(
|
|
||||||
ddlUnsigned("isUnsigned")
|
|
||||||
| ddlNotNull("notNull")
|
|
||||||
| pp.CaselessLiteral("null")
|
|
||||||
| ddlAutoValue("hasAutoValue")
|
|
||||||
| ddlDefaultValue("hasDefaultValue")
|
|
||||||
| pp.Suppress(pp.OneOrMore(pp.Or(map(pp.CaselessLiteral, sorted(ddlConstraintKeywords, reverse=True)))))
|
|
||||||
| pp.Suppress(ddlExpression)
|
|
||||||
)
|
)
|
||||||
)
|
|
||||||
|
|
||||||
# CREATE TABLE parser
|
ddlFloatingPoint = pp.Or(
|
||||||
ddlIfNotExists = pp.Group(
|
map(pp.CaselessLiteral, sorted(ddlFloatingPointTypes, reverse=True))
|
||||||
pp.CaselessLiteral("IF") + pp.CaselessLiteral("NOT") + pp.CaselessLiteral("EXISTS")
|
).setParseAction(pp.replaceWith("floating_point"))
|
||||||
).setResultsName("ifNotExists")
|
|
||||||
ddlOrReplace = pp.Group(
|
ddlText = pp.Or(
|
||||||
pp.CaselessLiteral("OR") + pp.CaselessLiteral("REPLACE")
|
map(pp.CaselessLiteral, sorted(ddlTextTypes, reverse=True))
|
||||||
).setResultsName("orReplace")
|
).setParseAction(pp.replaceWith("text"))
|
||||||
ddlCreateTable = pp.Group(
|
|
||||||
pp.CaselessLiteral("CREATE")
|
|
||||||
+ pp.Suppress(pp.Optional(ddlOrReplace))
|
ddlBlob = pp.Or(
|
||||||
+ pp.CaselessLiteral("TABLE")
|
map(pp.CaselessLiteral, sorted(ddlBlobTypes, reverse=True))
|
||||||
+ pp.Suppress(pp.Optional(ddlIfNotExists))
|
).setParseAction(pp.replaceWith("blob"))
|
||||||
+ ddlName.setResultsName("tableName")
|
|
||||||
+ ddlLeft
|
ddlDate = (
|
||||||
+ pp.Group(pp.delimitedList(pp.Suppress(ddlConstraint) | ddlColumn)).setResultsName(
|
pp.Or(map(pp.CaselessLiteral, sorted(ddlDateTypes, reverse=True)))
|
||||||
"columns"
|
.setParseAction(pp.replaceWith("day_point"))
|
||||||
|
.setResultsName("warnTimezone")
|
||||||
)
|
)
|
||||||
+ ddlRight
|
|
||||||
).setResultsName("create")
|
|
||||||
# ddlString.setDebug(True) #uncomment to debug pyparsing
|
|
||||||
|
|
||||||
ddl = pp.OneOrMore(pp.Suppress(pp.SkipTo(ddlCreateTable, False)) + ddlCreateTable)
|
ddlDateTime = pp.Or(
|
||||||
|
map(pp.CaselessLiteral, sorted(ddlDateTimeTypes, reverse=True))
|
||||||
|
).setParseAction(pp.replaceWith("time_point"))
|
||||||
|
|
||||||
ddlComment = pp.oneOf(["--", "#"]) + pp.restOfLine
|
ddlTime = pp.Or(
|
||||||
ddl.ignore(ddlComment)
|
map(pp.CaselessLiteral, sorted(ddlTimeTypes, reverse=True))
|
||||||
|
).setParseAction(pp.replaceWith("time_of_day"))
|
||||||
|
|
||||||
|
ddlUnknown = pp.Word(pp.alphanums).setParseAction(pp.replaceWith("UNKNOWN"))
|
||||||
|
|
||||||
|
ddlType = (
|
||||||
|
ddlBoolean
|
||||||
|
| ddlInteger
|
||||||
|
| ddlSerial
|
||||||
|
| ddlFloatingPoint
|
||||||
|
| ddlText
|
||||||
|
| ddlBlob
|
||||||
|
| ddlDateTime
|
||||||
|
| ddlDate
|
||||||
|
| ddlTime
|
||||||
|
| ddlUnknown
|
||||||
|
)
|
||||||
|
|
||||||
|
ddlUnsigned = pp.CaselessLiteral("UNSIGNED").setResultsName("isUnsigned")
|
||||||
|
ddlDigits = "," + pp.Word(pp.nums)
|
||||||
|
ddlWidth = ddlLeft + pp.Word(pp.nums) + pp.Optional(ddlDigits) + ddlRight
|
||||||
|
ddlTimezone = (
|
||||||
|
(pp.CaselessLiteral("with") | pp.CaselessLiteral("without"))
|
||||||
|
+ pp.CaselessLiteral("time")
|
||||||
|
+ pp.CaselessLiteral("zone")
|
||||||
|
)
|
||||||
|
|
||||||
|
ddlNotNull = pp.Group(
|
||||||
|
pp.CaselessLiteral("NOT") + pp.CaselessLiteral("NULL")
|
||||||
|
).setResultsName("notNull")
|
||||||
|
ddlDefaultValue = pp.CaselessLiteral("DEFAULT").setResultsName("hasDefaultValue")
|
||||||
|
|
||||||
|
ddlAutoKeywords = [
|
||||||
|
"AUTO_INCREMENT",
|
||||||
|
"AUTOINCREMENT",
|
||||||
|
"SMALLSERIAL",
|
||||||
|
"SERIAL",
|
||||||
|
"BIGSERIAL",
|
||||||
|
"GENERATED",
|
||||||
|
]
|
||||||
|
ddlAutoValue = pp.Or(map(pp.CaselessLiteral, sorted(ddlAutoKeywords, reverse=True)))
|
||||||
|
|
||||||
|
ddlConstraintKeywords = [
|
||||||
|
"CONSTRAINT",
|
||||||
|
"PRIMARY",
|
||||||
|
"FOREIGN",
|
||||||
|
"KEY",
|
||||||
|
"FULLTEXT",
|
||||||
|
"INDEX",
|
||||||
|
"UNIQUE",
|
||||||
|
"CHECK",
|
||||||
|
"PERIOD",
|
||||||
|
]
|
||||||
|
ddlConstraint = pp.Group(
|
||||||
|
pp.Or(map(pp.CaselessLiteral, sorted(ddlConstraintKeywords, reverse=True)))
|
||||||
|
+ ddlExpression
|
||||||
|
).setResultsName("isConstraint")
|
||||||
|
|
||||||
|
ddlColumn = pp.Group(
|
||||||
|
ddlName("name")
|
||||||
|
+ ddlType("type")
|
||||||
|
+ pp.Suppress(pp.Optional(ddlWidth))
|
||||||
|
+ pp.Suppress(pp.Optional(ddlTimezone))
|
||||||
|
+ pp.ZeroOrMore(
|
||||||
|
ddlUnsigned("isUnsigned")
|
||||||
|
| ddlNotNull("notNull")
|
||||||
|
| pp.CaselessLiteral("null")
|
||||||
|
| ddlAutoValue("hasAutoValue")
|
||||||
|
| ddlDefaultValue("hasDefaultValue")
|
||||||
|
| pp.Suppress(pp.OneOrMore(pp.Or(map(pp.CaselessLiteral, sorted(ddlConstraintKeywords, reverse=True)))))
|
||||||
|
| pp.Suppress(ddlExpression)
|
||||||
|
)
|
||||||
|
)
|
||||||
|
|
||||||
|
# CREATE TABLE parser
|
||||||
|
ddlIfNotExists = pp.Group(
|
||||||
|
pp.CaselessLiteral("IF") + pp.CaselessLiteral("NOT") + pp.CaselessLiteral("EXISTS")
|
||||||
|
).setResultsName("ifNotExists")
|
||||||
|
ddlOrReplace = pp.Group(
|
||||||
|
pp.CaselessLiteral("OR") + pp.CaselessLiteral("REPLACE")
|
||||||
|
).setResultsName("orReplace")
|
||||||
|
ddlCreateTable = pp.Group(
|
||||||
|
pp.CaselessLiteral("CREATE")
|
||||||
|
+ pp.Suppress(pp.Optional(ddlOrReplace))
|
||||||
|
+ pp.CaselessLiteral("TABLE")
|
||||||
|
+ pp.Suppress(pp.Optional(ddlIfNotExists))
|
||||||
|
+ ddlName.setResultsName("tableName")
|
||||||
|
+ ddlLeft
|
||||||
|
+ pp.Group(pp.delimitedList(pp.Suppress(ddlConstraint) | ddlColumn)).setResultsName(
|
||||||
|
"columns"
|
||||||
|
)
|
||||||
|
+ ddlRight
|
||||||
|
).setResultsName("create")
|
||||||
|
# ddlString.setDebug(True) #uncomment to debug pyparsing
|
||||||
|
|
||||||
|
ddl = pp.OneOrMore(pp.Suppress(pp.SkipTo(ddlCreateTable, False)) + ddlCreateTable)
|
||||||
|
|
||||||
|
ddlComment = pp.oneOf(["--", "#"]) + pp.restOfLine
|
||||||
|
ddl.ignore(ddlComment)
|
||||||
|
|
||||||
def testBoolean():
|
def testBoolean():
|
||||||
for t in ddlBooleanTypes:
|
for t in ddlBooleanTypes:
|
||||||
@ -423,6 +433,7 @@ def testPrimaryKeyAutoIncrement():
|
|||||||
assert column.hasAutoValue
|
assert column.hasAutoValue
|
||||||
|
|
||||||
def testParser():
|
def testParser():
|
||||||
|
initDllParser()
|
||||||
testBoolean()
|
testBoolean()
|
||||||
testInteger()
|
testInteger()
|
||||||
testSerial()
|
testSerial()
|
||||||
@ -441,10 +452,8 @@ def testParser():
|
|||||||
testPrimaryKeyAutoIncrement()
|
testPrimaryKeyAutoIncrement()
|
||||||
|
|
||||||
|
|
||||||
# CODE GENERATOR
|
|
||||||
# HELPERS
|
# HELPERS
|
||||||
|
|
||||||
|
|
||||||
def get_include_guard_name(namespace, inputfile):
|
def get_include_guard_name(namespace, inputfile):
|
||||||
val = re.sub("[^A-Za-z0-9]+", "_", namespace + "_" + os.path.basename(inputfile))
|
val = re.sub("[^A-Za-z0-9]+", "_", namespace + "_" + os.path.basename(inputfile))
|
||||||
return val.upper()
|
return val.upper()
|
||||||
@ -485,6 +494,15 @@ def setArgumentBool(s, bool_value):
|
|||||||
var_name = first_lower(re.sub("(\s|-|[0-9])(\S)", repl_func_for_args, s))
|
var_name = first_lower(re.sub("(\s|-|[0-9])(\S)", repl_func_for_args, s))
|
||||||
globals()[var_name] = bool_value
|
globals()[var_name] = bool_value
|
||||||
|
|
||||||
|
def loadExtendedTypesFile(filename):
|
||||||
|
import csv
|
||||||
|
with open(filename, newline='') as csvfile:
|
||||||
|
reader = csv.DictReader(csvfile, fieldnames=["baseType"], restkey="extendedTypes", delimiter=',')
|
||||||
|
for row in reader:
|
||||||
|
var_values = [clean_val for value in row['extendedTypes'] if (clean_val := value.strip(" \"'"))]
|
||||||
|
if var_values:
|
||||||
|
var_name = f"ddl{row['baseType']}Types"
|
||||||
|
globals()[var_name].extend(var_values)
|
||||||
|
|
||||||
def escape_if_reserved(name):
|
def escape_if_reserved(name):
|
||||||
reserved_names = [
|
reserved_names = [
|
||||||
@ -524,6 +542,10 @@ def endHeader(header, nsList):
|
|||||||
def help_message():
|
def help_message():
|
||||||
arg_string = ""
|
arg_string = ""
|
||||||
pad = 0
|
pad = 0
|
||||||
|
|
||||||
|
# The dataTypeFileArg is handled differently from the normal optionalArgs
|
||||||
|
# and only added to the list here to make use of the formatting of the help.
|
||||||
|
optionalArgs[dataTypeFileArg] = f"path to a csv that contains custom datatype mappings. The format is '{dataTypeFileArg}=path/to/file.csv' (See the README)."
|
||||||
for argument in list(optionalArgs.keys()):
|
for argument in list(optionalArgs.keys()):
|
||||||
if len(argument) > pad:
|
if len(argument) > pad:
|
||||||
pad = len(argument)
|
pad = len(argument)
|
||||||
@ -560,7 +582,7 @@ noTimestampWarning = False
|
|||||||
autoId = False
|
autoId = False
|
||||||
identityNaming = False
|
identityNaming = False
|
||||||
splitTables = False
|
splitTables = False
|
||||||
|
dataTypeFileArg = "--datatype-file"
|
||||||
|
|
||||||
def createHeader():
|
def createHeader():
|
||||||
global noTimestampWarning
|
global noTimestampWarning
|
||||||
@ -575,6 +597,9 @@ def createHeader():
|
|||||||
if arg in list(optionalArgs.keys()):
|
if arg in list(optionalArgs.keys()):
|
||||||
setArgumentBool(arg, True)
|
setArgumentBool(arg, True)
|
||||||
firstPositional += 1
|
firstPositional += 1
|
||||||
|
if dataTypeFileArg in arg:
|
||||||
|
loadExtendedTypesFile(arg.split('=')[1])
|
||||||
|
firstPositional += 1
|
||||||
else:
|
else:
|
||||||
pass
|
pass
|
||||||
|
|
||||||
@ -590,6 +615,8 @@ def createHeader():
|
|||||||
pathToHeader = sys.argv[firstPositional + 1] + ("/" if splitTables else ".h")
|
pathToHeader = sys.argv[firstPositional + 1] + ("/" if splitTables else ".h")
|
||||||
namespace = sys.argv[firstPositional + 2]
|
namespace = sys.argv[firstPositional + 2]
|
||||||
|
|
||||||
|
initDllParser()
|
||||||
|
|
||||||
try:
|
try:
|
||||||
tableCreations = ddl.parseFile(pathToDdl)
|
tableCreations = ddl.parseFile(pathToDdl)
|
||||||
except pp.ParseException as e:
|
except pp.ParseException as e:
|
||||||
@ -738,8 +765,9 @@ def createHeader():
|
|||||||
print("Error: unsupported datatypes.")
|
print("Error: unsupported datatypes.")
|
||||||
print("Possible solutions:")
|
print("Possible solutions:")
|
||||||
print("A) Implement this datatype (examples: sqlpp11/data_types)")
|
print("A) Implement this datatype (examples: sqlpp11/data_types)")
|
||||||
print("B) Extend/upgrade ddl2cpp (edit types map)")
|
print(f"B) Use the '{dataTypeFileArg}' command line argument to map the type to a known type (example: README)")
|
||||||
print("C) Raise an issue on github")
|
print("C) Extend/upgrade ddl2cpp (edit types map)")
|
||||||
|
print("D) Raise an issue on github")
|
||||||
sys.exit(10) # return non-zero error code, we might need it for automation
|
sys.exit(10) # return non-zero error code, we might need it for automation
|
||||||
|
|
||||||
|
|
||||||
|
@ -84,5 +84,30 @@ if (${Python3_Interpreter_FOUND})
|
|||||||
"${sqlpp.scripts.generated.sample.include}.h")
|
"${sqlpp.scripts.generated.sample.include}.h")
|
||||||
target_link_libraries(sqlpp.scripts.compiled.${sample_name} PRIVATE sqlpp11)
|
target_link_libraries(sqlpp.scripts.compiled.${sample_name} PRIVATE sqlpp11)
|
||||||
endforeach()
|
endforeach()
|
||||||
|
|
||||||
|
set(custom_type_sql "ddl2cpp_sample_good_custom_type")
|
||||||
|
include_directories(${CMAKE_CURRENT_BINARY_DIR})
|
||||||
|
add_test(NAME sqlpp11.scripts.ddl2cpp.bad_custom_types
|
||||||
|
COMMAND "${Python3_EXECUTABLE}" "${CMAKE_CURRENT_LIST_DIR}/../../scripts/ddl2cpp"
|
||||||
|
"${CMAKE_CURRENT_LIST_DIR}/${custom_type_sql}.sql"
|
||||||
|
"${CMAKE_CURRENT_BINARY_DIR}/fail"
|
||||||
|
test)
|
||||||
|
set_tests_properties(sqlpp11.scripts.ddl2cpp.bad_custom_types PROPERTIES
|
||||||
|
PASS_REGULAR_EXPRESSION "Error: unsupported datatypes.")
|
||||||
|
|
||||||
|
set(sqlpp.scripts.generated.custom_type_sql.include "${CMAKE_CURRENT_BINARY_DIR}/${custom_type_sql}")
|
||||||
|
add_custom_command(
|
||||||
|
OUTPUT "${sqlpp.scripts.generated.custom_type_sql.include}.h"
|
||||||
|
COMMAND "${Python3_EXECUTABLE}" "${CMAKE_CURRENT_LIST_DIR}/../../scripts/ddl2cpp"
|
||||||
|
"--datatype-file=${CMAKE_CURRENT_LIST_DIR}/custom_types.csv"
|
||||||
|
"${CMAKE_CURRENT_LIST_DIR}/${custom_type_sql}.sql"
|
||||||
|
"${sqlpp.scripts.generated.custom_type_sql.include}"
|
||||||
|
test
|
||||||
|
DEPENDS "${CMAKE_CURRENT_LIST_DIR}/${custom_type_sql}.sql"
|
||||||
|
VERBATIM)
|
||||||
|
|
||||||
|
add_executable(sqlpp.scripts.compiled.${custom_type_sql} ${custom_type_sql}.cpp
|
||||||
|
"${sqlpp.scripts.generated.custom_type_sql.include}.h")
|
||||||
|
target_link_libraries(sqlpp.scripts.compiled.${custom_type_sql} PRIVATE sqlpp11)
|
||||||
endif()
|
endif()
|
||||||
endif()
|
endif()
|
9
tests/scripts/custom_types.csv
Normal file
9
tests/scripts/custom_types.csv
Normal file
@ -0,0 +1,9 @@
|
|||||||
|
Boolean, CustomBooleanType
|
||||||
|
Integer, CustomIntegerType, SPECIAL INT
|
||||||
|
Serial, CustomSerialType
|
||||||
|
FloatingPoint, CustomFloatingPointType
|
||||||
|
Text, CustomTextType, another_text_type
|
||||||
|
Blob, CustomBlobType
|
||||||
|
Date, CustomDateType
|
||||||
|
DateTime, CustomDateTimeType
|
||||||
|
Time, CustomTimeType
|
|
32
tests/scripts/ddl2cpp_sample_good_custom_type.cpp
Normal file
32
tests/scripts/ddl2cpp_sample_good_custom_type.cpp
Normal file
@ -0,0 +1,32 @@
|
|||||||
|
|
||||||
|
#include <sqlpp11/chrono.h>
|
||||||
|
#include <ddl2cpp_sample_good_custom_type.h>
|
||||||
|
|
||||||
|
int main()
|
||||||
|
{
|
||||||
|
test::TabFoo tab_foo;
|
||||||
|
tab_foo.myBoolean = true;
|
||||||
|
tab_foo.myInteger = 5;
|
||||||
|
tab_foo.mySerial = 10;
|
||||||
|
tab_foo.myFloatingPoint = 12.34;
|
||||||
|
tab_foo.myText = "test";
|
||||||
|
tab_foo.myBlob = "blob";
|
||||||
|
tab_foo.myDate = sqlpp::chrono::day_point{};
|
||||||
|
tab_foo.myDateTime = std::chrono::system_clock::now();
|
||||||
|
tab_foo.myTime = std::chrono::seconds{10};
|
||||||
|
// Special cases
|
||||||
|
tab_foo.mySecondText = "another text";
|
||||||
|
tab_foo.myTypeWithSpaces = 20;
|
||||||
|
// Capitalisation
|
||||||
|
tab_foo.capBoolean = false;
|
||||||
|
// Build in types
|
||||||
|
tab_foo.builtinBoolean = true;
|
||||||
|
tab_foo.builtinInteger = 5;
|
||||||
|
tab_foo.builtinSerial = 10;
|
||||||
|
tab_foo.builtinFloatingPoint = 12.34;
|
||||||
|
tab_foo.builtinText = "test";
|
||||||
|
tab_foo.builtinBlob = "blob";
|
||||||
|
tab_foo.builtinDate = sqlpp::chrono::day_point{};
|
||||||
|
tab_foo.builtinDateTime = std::chrono::system_clock::now();
|
||||||
|
tab_foo.builtinTime = std::chrono::seconds{10};
|
||||||
|
}
|
54
tests/scripts/ddl2cpp_sample_good_custom_type.sql
Normal file
54
tests/scripts/ddl2cpp_sample_good_custom_type.sql
Normal file
@ -0,0 +1,54 @@
|
|||||||
|
/*
|
||||||
|
* Copyright (c) 2013-2015, Roland Bock
|
||||||
|
* All rights reserved.
|
||||||
|
*
|
||||||
|
* Redistribution and use in source and binary forms, with or without modification,
|
||||||
|
* are permitted provided that the following conditions are met:
|
||||||
|
*
|
||||||
|
* Redistributions of source code must retain the above copyright notice, this
|
||||||
|
* list of conditions and the following disclaimer.
|
||||||
|
*
|
||||||
|
* Redistributions in binary form must reproduce the above copyright notice, this
|
||||||
|
* list of conditions and the following disclaimer in the documentation and/or
|
||||||
|
* other materials provided with the distribution.
|
||||||
|
*
|
||||||
|
* THIS SOFTWARE IS PROVIDED BY THE COPYRIGHT HOLDERS AND CONTRIBUTORS "AS IS" AND
|
||||||
|
* ANY EXPRESS OR IMPLIED WARRANTIES, INCLUDING, BUT NOT LIMITED TO, THE IMPLIED
|
||||||
|
* WARRANTIES OF MERCHANTABILITY AND FITNESS FOR A PARTICULAR PURPOSE ARE
|
||||||
|
* DISCLAIMED. IN NO EVENT SHALL THE COPYRIGHT HOLDER OR CONTRIBUTORS BE LIABLE FOR
|
||||||
|
* ANY DIRECT, INDIRECT, INCIDENTAL, SPECIAL, EXEMPLARY, OR CONSEQUENTIAL DAMAGES
|
||||||
|
* (INCLUDING, BUT NOT LIMITED TO, PROCUREMENT OF SUBSTITUTE GOODS OR SERVICES;
|
||||||
|
* LOSS OF USE, DATA, OR PROFITS; OR BUSINESS INTERRUPTION) HOWEVER CAUSED AND ON
|
||||||
|
* ANY THEORY OF LIABILITY, WHETHER IN CONTRACT, STRICT LIABILITY, OR TORT
|
||||||
|
* (INCLUDING NEGLIGENCE OR OTHERWISE) ARISING IN ANY WAY OUT OF THE USE OF THIS
|
||||||
|
* SOFTWARE, EVEN IF ADVISED OF THE POSSIBILITY OF SUCH DAMAGE.
|
||||||
|
*/
|
||||||
|
|
||||||
|
CREATE TABLE tab_foo
|
||||||
|
(
|
||||||
|
myBoolean CustomBooleanType,
|
||||||
|
myInteger CustomIntegerType,
|
||||||
|
mySerial CustomSerialType,
|
||||||
|
myFloatingPoint CustomFloatingPointType,
|
||||||
|
myText CustomTextType,
|
||||||
|
myBlob CustomBlobType,
|
||||||
|
myDate CustomDateType,
|
||||||
|
myDateTime CustomDateTimeType,
|
||||||
|
myTime CustomTimeType,
|
||||||
|
-- Some more special cases
|
||||||
|
mySecondText another_text_type,
|
||||||
|
myTypeWithSpaces SPECIAL INT,
|
||||||
|
-- Checking capitalisation of types
|
||||||
|
capBoolean CUSTOMBOOLEANTYPE,
|
||||||
|
-- Ensuring built in types still function
|
||||||
|
builtinBoolean BOOLEAN,
|
||||||
|
builtinInteger INTEGER,
|
||||||
|
builtinSerial SERIAL,
|
||||||
|
builtinFloatingPoint NUMERIC,
|
||||||
|
builtinText TEXT,
|
||||||
|
builtinBlob BINARY,
|
||||||
|
builtinDate DATE,
|
||||||
|
builtinDateTime TIMESTAMPTZ,
|
||||||
|
builtinTime TIME WITH TIME ZONE
|
||||||
|
|
||||||
|
) WITH SYSTEM VERSIONING; -- enable System-Versioning for this table
|
Loading…
Reference in New Issue
Block a user