diff --git a/doc/design_intermediate_representation.md b/doc/design_intermediate_representation.md index 86fec3269a..1398947e48 100644 --- a/doc/design_intermediate_representation.md +++ b/doc/design_intermediate_representation.md @@ -21,77 +21,17 @@ We propose the following code structures. sql/ ... codegen/ + feature_column.go + intermediate_representation.go tensorflow/ - train.go - predict.go - analyze.go + ... xgboost/ ... ``` -The `tensorflow` package will expose function `func Train(ir sql.TrainIR) string, error`, which takes the `sql`'s `TrainIR` and returns a generated Python program. +The IR and feature column definition will resides in `sql/codegen`. Each code generator package forms a subdirectory in `sql/codegen` like `sql/codegen/tensorflow/`. ## Intermediate Representation -We propose the following struct as the IR for code generation. - -```go -package sql - -import ( - "github.com/sql-machine-learning/sqlflow/sql/columns" -) - -type FieldType int - -const ( - Int FieldType = iota - Float - String -) - -// FieldMeta contains the meta information for decoding and feature columns -type FieldMeta struct { - DType FieldType // e.g. "float", "int32" - Delimiter string // e.g. "," - Shape []int // e.g. [1], [1 2 3] - IsSparse bool // e.g. false - FeatureColumn []columns.FeatureColumn // e.g. [EmbeddingColumn, CategoryIDColumn] -} - -// TrainIR is the intermediate representation for code generation of a training job -type TrainIR struct { - DataSource string // e.g. "hive://root:root@localhost:10000/churn" - Select string // e.g. "select * from iris.train" - ValidationSelect string // e.g. "select * from iris.val;" - Estimator string // e.g. "DNNClassifier" - Attribute map[string]interface{} // e.g. {"train.epoch": 1000, "model.hidden_units": [10 10]} - Feature map[string]map[string]FieldMeta // e.g. {"feature_columns": {"sepal_length": {"float", "", [1], false}, ...}} - Label map[string]FieldMeta // e.g. {"class": {"int32", "", [1], false}} -} - -// PredictIR is the intermediate representation for code generation of a prediction job -type PredictIR struct { - DataSource string // e.g. "hive://root:root@localhost:10000/churn" - Select string // e.g. "select * from iris.test" - Estimator string // e.g. "DNNClassifier" - Attribute map[string]interface{} // e.g. {"predict.batch_size": 32} - Feature map[string]map[string]FieldMeta // e.g. {"feature_columns": {"sepal_length": {"float", "", [1], false}, ...}} - Label map[string]FieldMeta // e.g. {"class": {"int32", "", [1], false}} - ReusltTable string // e.g. "iris.predict" -} - -// AnalyzeIR is the intermediate representation for code generation of a analysis job -type AnalyzeIR struct { - DataSource string // e.g. "hive://root:root@localhost:10000/churn" - Select string // e.g. "select * from iris.train" - Estimator string // e.g. "DNNClassifier" - Attribute map[string]interface{} // e.g. {"analyze.plot_type": "bar"} - Feature map[string]map[string]FieldMeta // e.g. {"feature_columns": {"sepal_length": {"float", "", [1], false}, ...}} - Label map[string]FieldMeta // e.g. {"class": {"int32", "", [1], false}} -} -``` - -Please be aware that all the IR excludes the information of the current working directory. This information belongs to the `executor` in `sql` package. For a prediction/analyze job, the `executor` should recover everything produced by the training job. +Please refer to [sql/codegen/intermediate_representation.go](sql/codegen/intermediate_representation.go) and [sql/codegen/feature_column.go](sql/codegen/intermediate_representation.go) for implementation details. -Please be aware that `TrainIR` excludes the saving table name. This information belongs to the `executor` in `sql` package. diff --git a/sql/codegen/feature_column.go b/sql/codegen/feature_column.go new file mode 100644 index 0000000000..40ef857ca5 --- /dev/null +++ b/sql/codegen/feature_column.go @@ -0,0 +1,22 @@ +// Copyright 2019 The SQLFlow Authors. All rights reserved. +// Licensed under the Apache License, Version 2.0 (the "License"); +// you may not use this file except in compliance with the License. +// You may obtain a copy of the License at +// +// http://www.apache.org/licenses/LICENSE-2.0 +// +// Unless required by applicable law or agreed to in writing, software +// distributed under the License is distributed on an "AS IS" BASIS, +// WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. +// See the License for the specific language governing permissions and +// limitations under the License. + +package codegen + +// NumericColumn represents a dense tensor for the model input. +// +// FieldMeta indicates the meta information for decoding the field. Please be aware +// that FieldMeta also contains information for dimension and data type. +type NumericColumn struct { + FieldMeta *FieldMeta +} diff --git a/sql/codegen/intermediate_representation.go b/sql/codegen/intermediate_representation.go new file mode 100644 index 0000000000..cb19556daf --- /dev/null +++ b/sql/codegen/intermediate_representation.go @@ -0,0 +1,123 @@ +// Copyright 2019 The SQLFlow Authors. All rights reserved. +// Licensed under the Apache License, Version 2.0 (the "License"); +// you may not use this file except in compliance with the License. +// You may obtain a copy of the License at +// +// http://www.apache.org/licenses/LICENSE-2.0 +// +// Unless required by applicable law or agreed to in writing, software +// distributed under the License is distributed on an "AS IS" BASIS, +// WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. +// See the License for the specific language governing permissions and +// limitations under the License. + +package codegen + +// FieldType indicates the field type of a table column +type FieldType int + +const ( + // Int indicates the corresponding table column is an integer + Int FieldType = iota + // Float indicates the corresponding table column is a float + Float + // String indicates the corresponding table column is a string + String +) + +// FieldMeta contains the meta information for decoding. A field is a selected column of a SQL result. +// +// Name indicates the name for a field. +// +// DType indicates the data type for a field. For example: Int, Float, String. +// +// Delimiter indicates the decoding method of a field. For example, the field may +// contain a string like "1,23,42" which represent a 3-D tensor [1, 23, 42]. +// +// Shape indicates the shape of the tensor represented for a field. For exmaple, the +// field may contain a string like "1,23,42" which represent a 3-D tensor, the shape +// will be [3]. +// +// IsSparse indicates the type of tensor for a field. True means the tensor is a sparse tensor. +type FieldMeta struct { + Name string `json:"name"` // e.g. "spetal_length" + DType FieldType `json:"dtype"` // e.g. "float", "int32" + Delimiter string `json:"delimiter"` // e.g. "," + Shape []int `json:"shape"` // e.g. [1], [1 2 3] + IsSparse bool `json:"is_sparse"` // e.g. false +} + +// FeatureColumn indicates the feature column to be applied on the field. Please refer to +// github.com/sql-machine-learning/sqlflow/sql/codegen/feature_column.go for detailed list of all feature columns. +type FeatureColumn interface{} + +// Attribute represents an parsed entry in the WITH clause. +type Attribute struct { + Key string + Value interface{} +} + +// TrainIR is the intermediate representation for code generation of a training job. +// +// Please be aware that the TrainIR intentionally excludes the model table name in the +// INTO clause. The sql package will save the output files of a generated Python program. +// For prediction and analysis jobs, the sql will restore an identical working directly. +type TrainIR struct { + // DataSource contains the connection information. For example, "hive://root:root@localhost:10000/churn" + DataSource string + // Select specifies the query for fetching the training data. For example, "select * from iris.train;". + Select string + // ValidationSelect specifies the query for fetching the validation data. For example, "select * from iris.val;". + ValidationSelect string + // Estimator specifies the estimator type. For example, after parsing "select ... train DNNClassifier WITH ...", + // the Estimator will be "DNNClassifier". + Estimator string + // Attributes contain a list of parsed attribute in the WITH Clause. For example, after parsing + // "select ... train ... with train.epoch = 1000, model.hidden_units = [10, 10]", + // the Attributes will be {{"train.epoch", 1000}, {"model.hidden_units", [10 10]}}. + Attributes []Attribute + // Features contain a map of a list of feature columns in the COLUMN clause. + // For multiple COLUMN clauses like + // ``` + // column ... for deep_feature + // column ... for wide_feature + // ``` + // They will be parsed as {"deep_feature": {...}, "wide_feature": {...}} + // For single column clause like "column ...", "feature_columns" will be used as the default map key. + Features map[string][]FeatureColumn + // Label specifies the feature column in the LABEL clause. + Label FeatureColumn +} + +// PredictIR is the intermediate representation for code generation of a prediction job +// +// Please be aware the PredictionIR contains the result table name, so the +// generated Python program is responsible to create and write the result table. +type PredictIR struct { + // DataSource contains the connection information. For example, "hive://root:root@localhost:10000/churn" + DataSource string + // Select specifies the query for fetching the prediction data. For example, "select * from iris.test;". + Select string + // ResultTable specifies the table to store the prediction result. + ResultTable string + // Attributes contain a list of parsed attribute in the WITH clause. For example, after parsing + // "select ... predict ... with predict.batch_size = 32 into ...", + // the Attributes will be {{"predict.batch_size", 32}} + Attributes []Attribute + // TrainIR is the TrainIR used for generating the training job of the corresponding model + TrainIR TrainIR +} + +// AnalyzeIR is the intermediate representation for code generation of a analysis job +type AnalyzeIR struct { + // DataSource contains the connection information. For example, "hive://root:root@localhost:10000/churn" + DataSource string + // Select specifies the query for fetching the analysis data. For example, "select * from iris.test;". + Select string + // Attributes contain a list of parsed attribute in the WITH clause. For example, after parsing + // "select ... analyze ... with analyze.plot_type = "bar"", + // the Attributes will be {{"analyze.plot_type", "bar"}} + Attributes []Attribute + // TrainIR is the TrainIR used for generating the training job of the corresponding model + TrainIR TrainIR +}