DDL parase and Convert to BigQuery JSON schema and DDL statements module, available in Python.
- DDL parse and get table schema information.
- Currently, only the
CREATE TABLE
statement is supported. - Convert to BigQuery JSON schema and BigQuery DDL statements.
- Supported databases are MySQL, PostgreSQL, Oracle, Redshift.
- Python >= 3.4
- pyparsing
pip install:
$ pip install ddlparse
command install:
$ python setup.py install
pip update:
$ pip install ddlparse --upgrade
from ddlparse.ddlparse import DdlParse
sample_ddl = """
CREATE TABLE My_Schema.Sample_Table (
Id integer PRIMARY KEY COMMENT 'User ID',
Name varchar(100) NOT NULL COMMENT 'User name',
Total bigint NOT NULL,
Avg decimal(5,1) NOT NULL,
Created_At date, -- Oracle 'DATE' -> BigQuery 'DATETIME'
UNIQUE (NAME)
);
"""
# parse pattern (1-1)
table = DdlParse().parse(sample_ddl)
# parse pattern (1-2) : Specify source database
table = DdlParse().parse(ddl=sample_ddl, source_database=DdlParse.DATABASE.oracle)
# parse pattern (2-1)
parser = DdlParse(sample_ddl)
table = parser.parse()
print("* BigQuery Fields * : normal")
print(table.to_bigquery_fields())
# parse pattern (2-2) : Specify source database
parser = DdlParse(ddl=sample_ddl, source_database=DdlParse.DATABASE.oracle)
table = parser.parse()
# parse pattern (3-1)
parser = DdlParse()
parser.ddl = sample_ddl
table = parser.parse()
# parse pattern (3-2) : Specify source database
parser = DdlParse()
parser.source_database = DdlParse.DATABASE.oracle
parser.ddl = sample_ddl
table = parser.parse()
print("* BigQuery Fields * : Oracle")
print(table.to_bigquery_fields())
print("* TABLE *")
print("schema = {} : name = {} : is_temp = {}".format(table.schema, table.name, table.is_temp))
print("* BigQuery Fields *")
print(table.to_bigquery_fields())
print("* BigQuery Fields - column name to lower case / upper case *")
print(table.to_bigquery_fields(DdlParse.NAME_CASE.lower))
print(table.to_bigquery_fields(DdlParse.NAME_CASE.upper))
print("* COLUMN *")
for col in table.columns.values():
col_info = []
col_info.append("name = {}".format(col.name))
col_info.append("data_type = {}".format(col.data_type))
col_info.append("length = {}".format(col.length))
col_info.append("precision(=length) = {}".format(col.precision))
col_info.append("scale = {}".format(col.scale))
col_info.append("constraint = {}".format(col.constraint))
col_info.append("not_null = {}".format(col.not_null))
col_info.append("PK = {}".format(col.primary_key))
col_info.append("unique = {}".format(col.unique))
col_info.append("bq_legacy_data_type = {}".format(col.bigquery_legacy_data_type))
col_info.append("bq_standard_data_type = {}".format(col.bigquery_standard_data_type))
col_info.append("comment = '{}'".format(col.comment))
col_info.append("description(=comment) = '{}'".format(col.description))
col_info.append("BQ {}".format(col.to_bigquery_field()))
print(" : ".join(col_info))
print("* DDL (CREATE TABLE) statements *")
print(table.to_bigquery_ddl())
print("* DDL (CREATE TABLE) statements - dataset name, table name and column name to lower case / upper case *")
print(table.to_bigquery_ddl(DdlParse.NAME_CASE.lower))
print(table.to_bigquery_ddl(DdlParse.NAME_CASE.upper))
print("* Get Column object (case insensitive) *")
print(table.columns["total"])
Shinichi Takii shinichi.takii@gmail.com
- Repository : https://github.com/shinichi-takii/ddlparse
- PyPI Package : https://pypi.org/project/ddlparse/
- pyparsing : https://github.com/pyparsing/pyparsing