Skip to content

Commit

Permalink
Document Data Model & Mongo Query Language (#358)
Browse files Browse the repository at this point in the history
  • Loading branch information
datomo authored Oct 28, 2021
1 parent 3ee4dcf commit d9d3afa
Show file tree
Hide file tree
Showing 215 changed files with 15,655 additions and 1,177 deletions.
Original file line number Diff line number Diff line change
@@ -1,5 +1,5 @@
/*
* Copyright 2019-2020 The Polypheny Project
* Copyright 2019-2021 The Polypheny Project
*
* Licensed under the Apache License, Version 2.0 (the "License");
* you may not use this file except in compliance with the License.
Expand Down Expand Up @@ -157,19 +157,25 @@ public void implement( CassandraImplementContext context ) {
context.addInsertValues( valuesList );
} else {
context.visitChild( 0, getInput() );
final CassandraRules.RexToCassandraTranslator translator = new CassandraRules.RexToCassandraTranslator( (JavaTypeFactory) getCluster().getTypeFactory(), CassandraRules.cassandraPhysicalFieldNames( getInput().getRowType() ) );
final CassandraRules.RexToCassandraTranslator translator = new CassandraRules.RexToCassandraTranslator(
(JavaTypeFactory) getCluster().getTypeFactory(),
CassandraRules.cassandraPhysicalFieldNames( getInput().getRowType() ) );
final List<Selector> fields = new ArrayList<>();
for ( Pair<RexNode, String> pair : getNamedProjects() ) {
if ( pair.left instanceof RexInputRef ) {
final String name = pair.right;
String name = pair.right;
// getRowType()
// ((RexInputRef) pair.left);
final String originalName = pair.left.accept( translator );
if ( name.startsWith( "_" ) ) {
name = "\"" + name + "\"";
}
fields.add( Selector.column( originalName ).as( name ) );
}
}
context.addSelectColumns( fields );
}
}

}

Original file line number Diff line number Diff line change
@@ -1,5 +1,5 @@
/*
* Copyright 2019-2020 The Polypheny Project
* Copyright 2019-2021 The Polypheny Project
*
* Licensed under the Apache License, Version 2.0 (the "License");
* you may not use this file except in compliance with the License.
Expand Down Expand Up @@ -95,6 +95,7 @@ public static DataType getDataType( PolyType polyType, UserDefinedType arrayCont
case CHAR:
// TODO: What to return for char?
case VARCHAR:
case JSON:
return DataTypes.TEXT;
case BINARY:
case VARBINARY:
Expand Down Expand Up @@ -494,4 +495,5 @@ private static List<Object> createListForArrays( List<RexNode> operands ) {
}
return list;
}

}
96 changes: 63 additions & 33 deletions catalog/src/main/java/org/polypheny/db/catalog/CatalogImpl.java
Original file line number Diff line number Diff line change
Expand Up @@ -606,6 +606,7 @@ private void insertDefaultData() throws GenericCatalogException, UnknownUserExce
if ( !userNames.containsKey( "pa" ) ) {
addUser( "pa", "" );
}
Catalog.defaultUser = systemId;

//////////////
// init database
Expand All @@ -615,13 +616,14 @@ private void insertDefaultData() throws GenericCatalogException, UnknownUserExce
} else {
databaseId = getDatabase( "APP" ).id;
}
Catalog.defaultDatabaseId = databaseId;

//////////////
// init schema

long schemaId;
if ( !schemaNames.containsKey( new Object[]{ databaseId, "public" } ) ) {
schemaId = addSchema( "public", databaseId, 1, SchemaType.RELATIONAL );
schemaId = addSchema( "public", databaseId, 1, SchemaType.getDefault() );
} else {
schemaId = getSchema( "APP", "public" ).id;
}
Expand Down Expand Up @@ -668,6 +670,12 @@ private void insertDefaultData() throws GenericCatalogException, UnknownUserExce
restSettings.put( "port", "8089" );
restSettings.put( "maxUploadSizeMb", "10000" );
addQueryInterface( "rest", "org.polypheny.db.restapi.HttpRestServer", restSettings );

// Add REST interface
Map<String, String> mongoSettings = new HashMap<>();
mongoSettings.put( "port", "2717" );
mongoSettings.put( "maxUploadSizeMb", "10000" );
addQueryInterface( "mongo", "org.polypheny.db.mongoql.MongoQlServer", mongoSettings );
}

try {
Expand All @@ -686,38 +694,38 @@ private void addDefaultCsvColumns( CatalogAdapter csv ) throws UnknownSchemaExce
CatalogSchema schema = getSchema( "APP", "public" );
CatalogTable depts = getTable( schema.id, "depts" );

addDefaultColumn( csv, depts, "deptno", PolyType.INTEGER, null, 1, null );
addDefaultColumn( csv, depts, "name", PolyType.VARCHAR, Collation.CASE_INSENSITIVE, 2, 20 );
addDefaultCsvColumn( csv, depts, "deptno", PolyType.INTEGER, null, 1, null );
addDefaultCsvColumn( csv, depts, "name", PolyType.VARCHAR, Collation.CASE_INSENSITIVE, 2, 20 );

CatalogTable emps = getTable( schema.id, "emps" );
addDefaultColumn( csv, emps, "empid", PolyType.INTEGER, null, 1, null );
addDefaultColumn( csv, emps, "deptno", PolyType.INTEGER, null, 2, null );
addDefaultColumn( csv, emps, "name", PolyType.VARCHAR, Collation.CASE_INSENSITIVE, 3, 20 );
addDefaultColumn( csv, emps, "salary", PolyType.INTEGER, null, 4, null );
addDefaultColumn( csv, emps, "commission", PolyType.INTEGER, null, 5, null );
addDefaultCsvColumn( csv, emps, "empid", PolyType.INTEGER, null, 1, null );
addDefaultCsvColumn( csv, emps, "deptno", PolyType.INTEGER, null, 2, null );
addDefaultCsvColumn( csv, emps, "name", PolyType.VARCHAR, Collation.CASE_INSENSITIVE, 3, 20 );
addDefaultCsvColumn( csv, emps, "salary", PolyType.INTEGER, null, 4, null );
addDefaultCsvColumn( csv, emps, "commission", PolyType.INTEGER, null, 5, null );

CatalogTable emp = getTable( schema.id, "emp" );
addDefaultColumn( csv, emp, "employeeno", PolyType.INTEGER, null, 1, null );
addDefaultColumn( csv, emp, "age", PolyType.INTEGER, null, 2, null );
addDefaultColumn( csv, emp, "gender", PolyType.VARCHAR, Collation.CASE_INSENSITIVE, 3, 20 );
addDefaultColumn( csv, emp, "maritalstatus", PolyType.VARCHAR, Collation.CASE_INSENSITIVE, 4, 20 );
addDefaultColumn( csv, emp, "worklifebalance", PolyType.VARCHAR, Collation.CASE_INSENSITIVE, 5, 20 );
addDefaultColumn( csv, emp, "education", PolyType.VARCHAR, Collation.CASE_INSENSITIVE, 6, 20 );
addDefaultColumn( csv, emp, "monthlyincome", PolyType.INTEGER, null, 7, null );
addDefaultColumn( csv, emp, "relationshipjoy", PolyType.VARCHAR, Collation.CASE_INSENSITIVE, 8, 20 );
addDefaultColumn( csv, emp, "workingyears", PolyType.INTEGER, null, 9, null );
addDefaultColumn( csv, emp, "yearsatcompany", PolyType.INTEGER, null, 10, null );
addDefaultCsvColumn( csv, emp, "employeeno", PolyType.INTEGER, null, 1, null );
addDefaultCsvColumn( csv, emp, "age", PolyType.INTEGER, null, 2, null );
addDefaultCsvColumn( csv, emp, "gender", PolyType.VARCHAR, Collation.CASE_INSENSITIVE, 3, 20 );
addDefaultCsvColumn( csv, emp, "maritalstatus", PolyType.VARCHAR, Collation.CASE_INSENSITIVE, 4, 20 );
addDefaultCsvColumn( csv, emp, "worklifebalance", PolyType.VARCHAR, Collation.CASE_INSENSITIVE, 5, 20 );
addDefaultCsvColumn( csv, emp, "education", PolyType.VARCHAR, Collation.CASE_INSENSITIVE, 6, 20 );
addDefaultCsvColumn( csv, emp, "monthlyincome", PolyType.INTEGER, null, 7, null );
addDefaultCsvColumn( csv, emp, "relationshipjoy", PolyType.VARCHAR, Collation.CASE_INSENSITIVE, 8, 20 );
addDefaultCsvColumn( csv, emp, "workingyears", PolyType.INTEGER, null, 9, null );
addDefaultCsvColumn( csv, emp, "yearsatcompany", PolyType.INTEGER, null, 10, null );

CatalogTable work = getTable( schema.id, "work" );
addDefaultColumn( csv, work, "employeeno", PolyType.INTEGER, null, 1, null );
addDefaultColumn( csv, work, "educationfield", PolyType.VARCHAR, Collation.CASE_INSENSITIVE, 2, 20 );
addDefaultColumn( csv, work, "jobinvolvement", PolyType.VARCHAR, Collation.CASE_INSENSITIVE, 3, 20 );
addDefaultColumn( csv, work, "joblevel", PolyType.INTEGER, null, 4, null );
addDefaultColumn( csv, work, "jobrole", PolyType.VARCHAR, Collation.CASE_INSENSITIVE, 5, 30 );
addDefaultColumn( csv, work, "businesstravel", PolyType.VARCHAR, Collation.CASE_INSENSITIVE, 6, 20 );
addDefaultColumn( csv, work, "department", PolyType.VARCHAR, Collation.CASE_INSENSITIVE, 7, 25 );
addDefaultColumn( csv, work, "attrition", PolyType.VARCHAR, Collation.CASE_INSENSITIVE, 8, 20 );
addDefaultColumn( csv, work, "dailyrate", PolyType.INTEGER, null, 9, null );
addDefaultCsvColumn( csv, work, "employeeno", PolyType.INTEGER, null, 1, null );
addDefaultCsvColumn( csv, work, "educationfield", PolyType.VARCHAR, Collation.CASE_INSENSITIVE, 2, 20 );
addDefaultCsvColumn( csv, work, "jobinvolvement", PolyType.VARCHAR, Collation.CASE_INSENSITIVE, 3, 20 );
addDefaultCsvColumn( csv, work, "joblevel", PolyType.INTEGER, null, 4, null );
addDefaultCsvColumn( csv, work, "jobrole", PolyType.VARCHAR, Collation.CASE_INSENSITIVE, 5, 30 );
addDefaultCsvColumn( csv, work, "businesstravel", PolyType.VARCHAR, Collation.CASE_INSENSITIVE, 6, 20 );
addDefaultCsvColumn( csv, work, "department", PolyType.VARCHAR, Collation.CASE_INSENSITIVE, 7, 25 );
addDefaultCsvColumn( csv, work, "attrition", PolyType.VARCHAR, Collation.CASE_INSENSITIVE, 8, 20 );
addDefaultCsvColumn( csv, work, "dailyrate", PolyType.INTEGER, null, 9, null );

// set all needed primary keys
addPrimaryKey( depts.id, Collections.singletonList( getColumn( depts.id, "deptno" ).id ) );
Expand Down Expand Up @@ -745,7 +753,7 @@ private void addDefaultCsvColumns( CatalogAdapter csv ) throws UnknownSchemaExce
}


private void addDefaultColumn( CatalogAdapter csv, CatalogTable table, String name, PolyType type, Collation collation, int position, Integer length ) {
private void addDefaultCsvColumn( CatalogAdapter csv, CatalogTable table, String name, PolyType type, Collation collation, int position, Integer length ) {
if ( !checkIfExistsColumn( table.id, name ) ) {
long colId = addColumn( name, table.id, position, type, null, length, null, null, null, false, collation );
String filename = table.name + ".csv";
Expand All @@ -762,6 +770,15 @@ private void addDefaultColumn( CatalogAdapter csv, CatalogTable table, String na
}


private void addDefaultColumn( CatalogAdapter adapter, CatalogTable table, String name, PolyType type, Collation collation, int position, Integer length ) {
if ( !checkIfExistsColumn( table.id, name ) ) {
long colId = addColumn( name, table.id, position, type, null, length, null, null, null, false, collation );
addColumnPlacement( adapter.id, colId, PlacementType.AUTOMATIC, "col" + colId, table.name, name, null );
updateColumnPlacementPhysicalPosition( adapter.id, colId, position );
}
}


@Override
public void validateColumns() {
CatalogValidator validator = new CatalogValidator();
Expand Down Expand Up @@ -838,8 +855,9 @@ public void deleteDatabase( long databaseId ) {
* @param password of the user
* @return the id of the created user
*/
@Override
public int addUser( String name, String password ) {
CatalogUser user = new CatalogUser( userIdBuilder.getAndIncrement(), name, password );
CatalogUser user = new CatalogUser( userIdBuilder.getAndIncrement(), name, password, 1 );
synchronized ( this ) {
users.put( user.id, user );
userNames.put( user.name, user );
Expand All @@ -849,6 +867,18 @@ public int addUser( String name, String password ) {
}


@Override
public void setUserSchema( int userId, long schemaId ) {
CatalogUser user = getUser( userId );
CatalogUser newUser = new CatalogUser( user.id, user.name, user.password, schemaId );
synchronized ( this ) {
users.put( user.id, newUser );
userNames.put( user.name, newUser );
}
listeners.firePropertyChange( "user", null, user );
}


/**
* Get all databases
* if pattern is specified, only the ones which confirm to it
Expand Down Expand Up @@ -3499,7 +3529,7 @@ public void removePartitionFromGroup( long partitionGroupId, Long partitionId )
* Assign the partition to a new partitionGroup
*
* @param partitionId Partition to move
* @param partitionGroupId New target gorup to move the partion to
* @param partitionGroupId New target group to move the partition to
*/
@Override
public void updatePartition( long partitionId, Long partitionGroupId ) {
Expand Down Expand Up @@ -4161,9 +4191,9 @@ public void deletePartitionPlacement( int adapterId, long partitionId ) {
/**
* Returns a specific partition entity which is placed on a store.
*
* @param adapterId The adapter on which the requested partitions palcement resides
* @param adapterId The adapter on which the requested partitions placement resides
* @param partitionId The id of the requested partition
* @return The requested PartitionPlacement on that store for agiven is
* @return The PartitionPlacement on the specified store
*/
@Override
public CatalogPartitionPlacement getPartitionPlacement( int adapterId, long partitionId ) {
Expand All @@ -4178,7 +4208,7 @@ public CatalogPartitionPlacement getPartitionPlacement( int adapterId, long part


/**
* Returns a list of all Partition Placements which currently reside on a adpater, disregarded of the table.
* Returns a list of all Partition Placements which currently reside on an adapter, disregarded of the table.
*
* @param adapterId The adapter on which the requested partition placements reside
* @return A list of all Partition Placements, that are currently located on that specific store
Expand Down
9 changes: 3 additions & 6 deletions core/_docs/reference.md
Original file line number Diff line number Diff line change
Expand Up @@ -329,12 +329,9 @@ The following is a list of SQL keywords. Reserved keywords are **bold**.
**SYSTEM_TIME**,
**SYSTEM_USER**,
**TABLE**,
**TABLESAMPLE**,
TABLE_NAME,
**TEMPERATURE**,
TEMPORARY,
**THEN**,
TIES,
**TABLESAMPLE**, TABLE_NAME,
**TEMPERATURE**, TEMPORARY,
**THEN**, TIES,
**TABLESAMPLE**, TABLE_NAME, TEMPORARY,
**THEN**, TIES,
**TIME**,
Expand Down
34 changes: 28 additions & 6 deletions core/build.gradle
Original file line number Diff line number Diff line change
Expand Up @@ -57,6 +57,8 @@ dependencies {

implementation group: "com.drewnoakes", name: "metadata-extractor", version: metadata_extractor_version // Apache 2.0

implementation group: "org.mongodb", name: "mongodb-driver-sync", version: mongodb_driver_sync_version // Apache 2.0

// https://github.com/docker-java/docker-java
implementation group: 'com.github.docker-java', name: 'docker-java', version: java_docker_version // Apache 2.0
implementation group: 'com.github.docker-java', name: 'docker-java-transport-httpclient5', version: java_docker_version //TODO: should probably be independent version in future
Expand Down Expand Up @@ -104,24 +106,43 @@ sourceSets {
}
}

/**
* Additional MongoDB query language Compile Pipeline
*/
task generateFmppSourcesDocument {
inputs.dir("src/main/codegen/templates/mql")
outputs.dir(project.buildDir.absolutePath + "/generated-sources/fmpp")
doLast {
ant.fmpp(configuration: "src/main/codegen/config_doc.fmpp",
sourceRoot: "src/main/codegen/templates/mql",
outputRoot: project.buildDir.absolutePath + "/generated-sources/fmpp/mql")
}
}
task generateParserDocument (type: CompileJavaccTask) {
dependsOn("generateFmppSourcesDocument")
getConventionMapping().map("classpath", { configurations.javacc })
arguments = [static: "false", lookahead: "2"]
inputDirectory = file(project.buildDir.absolutePath + "/generated-sources/fmpp/mql/javacc")
outputDirectory = file(project.buildDir.absolutePath + "/generated-sources/org/polypheny/db/mql/parser/impl")
}

/**
* Compile Pipeline
*/
task generateFmppSources {
inputs.dir("src/main/codegen/templates")
inputs.dir("src/main/codegen/templates/sql")
outputs.dir(project.buildDir.absolutePath + "/generated-sources/fmpp")
doLast {
ant.fmpp(configuration: "src/main/codegen/config.fmpp",
sourceRoot: "src/main/codegen/templates",
outputRoot: project.buildDir.absolutePath + "/generated-sources/fmpp")
sourceRoot: "src/main/codegen/templates/sql",
outputRoot: project.buildDir.absolutePath + "/generated-sources/fmpp/sql")
}
}
task generateParser (type: CompileJavaccTask) {
dependsOn("generateFmppSources")
getConventionMapping().map("classpath", { configurations.javacc })
arguments = [static: "false", lookahead: "2"]
inputDirectory = file(project.buildDir.absolutePath + "/generated-sources/fmpp/javacc")
inputDirectory = file(project.buildDir.absolutePath + "/generated-sources/fmpp/sql/javacc")
outputDirectory = file(project.buildDir.absolutePath + "/generated-sources/org/polypheny/db/sql/parser/impl")
}
task generateJdbcVersionProperties(type: Copy) {
Expand All @@ -130,7 +151,7 @@ task generateJdbcVersionProperties(type: Copy) {
expand(version: project.version, versionMajor: versionMajor, versionMinor: versionMinor, buildTimestamp: new Date().format("yyyy-MM-dd'T'HH:mm:ssZ"))
}
compileJava {
dependsOn("generateParser", "generateJdbcVersionProperties")
dependsOn("generateParserDocument", "generateParser", "generateJdbcVersionProperties")
}


Expand All @@ -148,7 +169,8 @@ javadoc {
options.addBooleanOption("html5", true)
}
// Exclude build dir
exclude "fmpp/javacc/**"
exclude "fmpp/sql/javacc/**"
exclude "fmpp/mql/javacc/**"
}


Expand Down
2 changes: 2 additions & 0 deletions core/src/main/codegen/config.fmpp
Original file line number Diff line number Diff line change
Expand Up @@ -25,6 +25,7 @@ data: {
"org.polypheny.db.schema.ColumnStrategy"
"org.polypheny.db.sql.SqlCreate"
"org.polypheny.db.sql.SqlDrop"
"org.polypheny.db.catalog.Catalog.SchemaType"
"org.polypheny.db.sql.ddl.SqlDdlNodes"
"org.polypheny.db.sql.ddl.SqlTruncate"
"org.polypheny.db.sql.ddl.SqlAlterConfig"
Expand Down Expand Up @@ -283,6 +284,7 @@ data: {
"SQL_DECIMAL"
"SQL_DOUBLE"
"SQL_FLOAT"
"SQL_JSON"
"SQL_INTEGER"
"SQL_INTERVAL_DAY"
"SQL_INTERVAL_DAY_TO_HOUR"
Expand Down
37 changes: 37 additions & 0 deletions core/src/main/codegen/config_doc.fmpp
Original file line number Diff line number Diff line change
@@ -0,0 +1,37 @@
# This file is an FMPP (http://fmpp.sourceforge.net/) configuration file to allow clients to extend Polypheny-DB's MongoDB query language (MQL) parser to support application specific
# MQL statements
#
# This MQL parser file (DocumentParser.jj) is written in JavaCC and draws inspiration from the original
# sql parser
#
# Parser template file (DocumentParser.jj) along with this file are packaged as part of the polyphenydb-core-<version>.jar under "codegen" directory.


data: {
parser: {
# Generated parser implementation package and class name.
package: "org.polypheny.db.mql.mql.parser.impl",
class: "MqlParserImpl",

# List of additional classes and packages to import.
# Example. "org.polypheny.db.mql.mql.*", "java.util.List".
imports: [
]

# List of new keywords. Example: "DATABASES", "TABLES". If the keyword is not a reserved keyword add it to 'nonReservedKeywords' section.
keywords: [

]
nonReservedKeywords: [

]

includeCompoundIdentifier: true
includeBraces: true
includeAdditionalDeclarations: false
}
}

freemarkerLinks: {
includes: includes/
}
Loading

0 comments on commit d9d3afa

Please sign in to comment.