Skip to content

Commit

Permalink
dev check-in for command line utils... run-ddl and run-sql should be …
Browse files Browse the repository at this point in the history
…DbLoad now, export-schema is DbDump
  • Loading branch information
erilong committed Apr 11, 2012
1 parent 1ad050a commit 5d127a9
Show file tree
Hide file tree
Showing 4 changed files with 259 additions and 62 deletions.
Expand Up @@ -21,21 +21,31 @@

package org.jumpmind.symmetric;

import java.io.IOException;
import java.io.OutputStream;
import java.io.OutputStreamWriter;
import java.io.Writer;
import java.text.SimpleDateFormat;
import java.util.ArrayList;
import java.util.Date;

import org.apache.commons.cli.CommandLine;
import org.apache.commons.cli.Options;
import org.apache.commons.logging.Log;
import org.apache.commons.logging.LogFactory;
import org.jumpmind.db.io.DatabaseIO;
import org.jumpmind.db.model.Column;
import org.jumpmind.db.model.Database;
import org.jumpmind.db.model.Table;
import org.jumpmind.db.platform.DatabasePlatformSettings;
import org.jumpmind.db.platform.IDatabasePlatform;
import org.jumpmind.db.platform.mysql.MySqlPlatform;
import org.jumpmind.db.sql.DmlStatement;
import org.jumpmind.db.sql.DmlStatement.DmlType;
import org.jumpmind.db.sql.ISqlRowMapper;
import org.jumpmind.db.sql.ISqlTemplate;
import org.jumpmind.db.sql.Row;
import org.jumpmind.symmetric.csv.CsvWriter;

/**
* Dump the structure and data from database tables to file.
Expand All @@ -46,7 +56,9 @@ public class DbDump extends AbstractCommandLauncher {
private static final Log log = LogFactory.getLog(DbDump.class);

private static final String OPTION_XML = "xml";


private static final String OPTION_CSV = "csv";

private static final String OPTION_COMPATIBLE = "compatible";

private static final String OPTION_ADD_DROP_TABLE = "add-drop-table";
Expand Down Expand Up @@ -75,6 +87,7 @@ protected void printHelp(Options options) {
protected void buildOptions(Options options) {
super.buildOptions(options);
addOption(options, "x", OPTION_XML, false);
addOption(options, null, OPTION_CSV, false);
addOption(options, null, OPTION_COMPATIBLE, true);
addOption(options, null, OPTION_ADD_DROP_TABLE, false);
addOption(options, null, OPTION_NO_CREATE_INFO, false);
Expand All @@ -87,31 +100,50 @@ protected boolean executeOptions(CommandLine line) throws Exception {
// TODO: get table names list as args

if (line.hasOption(OPTION_XML)) {
dumpSchemaAsXml(System.out);
dumpTablesAsXml(System.out, line.getArgs(), line.hasOption(OPTION_NO_CREATE_INFO),
line.hasOption(OPTION_NO_DATA), line.hasOption(OPTION_COMMENTS));
} else if (line.hasOption(OPTION_CSV)) {
dumpTablesAsCsv(System.out, line.getArgs(), line.hasOption(OPTION_NO_DATA), line.hasOption(OPTION_COMMENTS));
} else {
dumpSchemaAsSql(System.out, line.hasOption(OPTION_ADD_DROP_TABLE), line.hasOption(OPTION_NO_CREATE_INFO),
dumpTablesAsSql(System.out, line.getArgs(), line.hasOption(OPTION_ADD_DROP_TABLE), line.hasOption(OPTION_NO_CREATE_INFO),
line.hasOption(OPTION_NO_DATA), line.hasOption(OPTION_COMMENTS));
}
return true;
}

public void dumpSchemaAsXml(OutputStream output) throws Exception {
public void dumpTablesAsXml(OutputStream output, String[] tables, boolean noCreateInfo, boolean noData, boolean comments) throws Exception {
/* TODO:
* <dbdump> <database></database> </dbdump>
* <table_data name="mytable"><row><field name="myfield">value</field></row></table_data>
*/
IDatabasePlatform platform = getDatabasePlatform();
Database db = platform.readDatabase(platform.getDefaultCatalog(), platform.getDefaultSchema(), null);
String catalog = platform.getDefaultCatalog();
String schema = platform.getDefaultSchema();

Database db = platform.readDatabase(catalog, schema, null);
Writer writer = new OutputStreamWriter(output);
SimpleDateFormat df = new SimpleDateFormat("YYYY-MM-dd HH:mm:ss");

if (comments) {
writer.write("<!-- SymmetricDS " + Version.version() + " " + commandName + " -->\n");
writer.write("<!-- Catalog: " + (catalog == null ? "" : catalog) + " Schema: " + (schema == null ? "" : schema) + " -->\n");
writer.write("<!-- Started on " + df.format(new Date()) + " -->\n");
}

new DatabaseIO().write(db, output);
output.flush();
writer.flush();
writer.close();
}

public void dumpSchemaAsSql(OutputStream output, boolean addDropTable, boolean noCreateInfo, boolean noData, boolean comments) throws Exception {
public void dumpTablesAsSql(OutputStream output, String[] tables, boolean addDropTable, boolean noCreateInfo, boolean noData, boolean comments) throws Exception {
IDatabasePlatform platform = getDatabasePlatform();
String catalog = platform.getDefaultCatalog();
String schema = platform.getDefaultSchema();

Database db = platform.readDatabase(catalog, schema, null);

// IDatabasePlatform target = Factory.getPlatform("mysql");

IDatabasePlatform target = new MySqlPlatform(null, new DatabasePlatformSettings());
Writer writer = new OutputStreamWriter(output);
SimpleDateFormat df = new SimpleDateFormat("YYYY-MM-dd HH:mm:ss");
Expand All @@ -138,6 +170,64 @@ public void dumpSchemaAsSql(OutputStream output, boolean addDropTable, boolean n
writer.close();
}

public void dumpTablesAsCsv(OutputStream output, String[] tableNames, boolean noData, boolean comments) throws Exception {
final IDatabasePlatform platform = getDatabasePlatform();
Writer writer = new OutputStreamWriter(output);
String catalog = platform.getDefaultCatalog();
String schema = platform.getDefaultSchema();
ArrayList<Table> tableList = new ArrayList<Table>();

if (tableNames.length == 0) {
Database database = platform.readDatabase(catalog, schema, null);
for (Table table : database.getTables()) {
tableList.add(table);
}
} else {
for (String tableName : tableNames) {
Table table = platform.readTableFromDatabase(catalog, schema, tableName);
if (table != null) {
tableList.add(table);
} else {
throw new RuntimeException("Cannot find table " + tableName + " in catalog " + catalog +
" and schema " + schema);
}
}
}

for (Table table : tableList) {
final CsvWriter csvWriter = new CsvWriter(writer, ',');
csvWriter.setEscapeMode(CsvWriter.ESCAPE_MODE_BACKSLASH);

if (comments) {
csvWriter.writeComment(" Table: " + table.getFullyQualifiedTableName());
}

csvWriter.writeRecord(table.getColumnNames());

if (!noData) {
ISqlTemplate sqlTemplate = platform.getSqlTemplate();
DmlStatement stmt = platform.createDmlStatement(DmlType.SELECT_ALL, table);
final Column[] columns = table.getColumns();

sqlTemplate.queryForObject(stmt.getSql(), new ISqlRowMapper<Object>() {
public Object mapRow(Row row) {
String[] values = platform.getStringValues(columns, row);
try {
csvWriter.writeRecord(values, true);
} catch (IOException e) {
throw new RuntimeException(e);
}
return values;
}
});
}
csvWriter.flush();
}

writer.flush();
writer.close();
}

// public void copyFromTables(List<TableToExtract> tables) {
// long batchId = 1;
// for (TableToExtract tableToRead : tables) {
Expand Down
@@ -0,0 +1,154 @@
/*
* Licensed to JumpMind Inc under one or more contributor
* license agreements. See the NOTICE file distributed
* with this work for additional information regarding
* copyright ownership. JumpMind Inc licenses this file
* to you under the GNU Lesser General Public License (the
* "License"); you may not use this file except in compliance
* with the License.
*
* You should have received a copy of the GNU Lesser General Public
* License along with this library; if not, see
* <http://www.gnu.org/licenses/>.
*
* Unless required by applicable law or agreed to in writing,
* software distributed under the License is distributed on an
* "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY
* KIND, either express or implied. See the License for the
* specific language governing permissions and limitations
* under the License.
*/

package org.jumpmind.symmetric;

import java.io.File;
import java.io.FileInputStream;
import java.io.InputStream;
import java.io.InputStreamReader;
import java.util.List;

import org.apache.commons.cli.CommandLine;
import org.apache.commons.cli.Options;
import org.apache.commons.io.IOUtils;
import org.apache.commons.logging.Log;
import org.apache.commons.logging.LogFactory;
import org.jumpmind.db.io.DatabaseIO;
import org.jumpmind.db.model.Database;
import org.jumpmind.db.model.Table;
import org.jumpmind.db.platform.IDatabasePlatform;
import org.jumpmind.db.sql.DmlStatement;
import org.jumpmind.db.sql.DmlStatement.DmlType;
import org.jumpmind.db.sql.ISqlTemplate;
import org.jumpmind.db.sql.SqlScript;
import org.jumpmind.db.util.BinaryEncoding;
import org.jumpmind.symmetric.csv.CsvReader;

/**
* Load data from file to database tables.
*/
public class DbLoad extends AbstractCommandLauncher {

@SuppressWarnings("unused")
private static final Log log = LogFactory.getLog(DbLoad.class);

private static final String OPTION_XML = "xml";

private static final String OPTION_CSV = "csv";

public DbLoad(String commandName, String messageKeyPrefix) {
super(commandName, messageKeyPrefix);
}

public static void main(String[] args) throws Exception {
new DbLoad("dbload", "DbLoad.Option.").execute(args);
}

protected void printHelp(Options options) {
System.out.println(commandName + " version " + Version.version());
System.out.println("Load data from file to database tables.\n");
super.printHelp(options);
}

@Override
protected void buildOptions(Options options) {
super.buildOptions(options);
addOption(options, "x", OPTION_XML, false);
addOption(options, null, OPTION_CSV, false);
}

@Override
protected boolean executeOptions(CommandLine line) throws Exception {
String[] args = line.getArgs();

if (args.length == 0) {
executeOption(line, System.in);
} else {
for (String fileName : args) {
if (! new File(fileName).exists()) {
throw new RuntimeException("Cannot find file " + fileName);
}
}
for (String fileName : args) {
executeOption(line, new FileInputStream(fileName));
}
}

return true;
}

protected void executeOption(CommandLine line, InputStream in) throws Exception {
if (line.hasOption(OPTION_XML)) {
loadTablesFromXml(in);
} else if (line.hasOption(OPTION_CSV)) {
loadTablesFromCsv(in);
} else {
loadTablesFromSql(in);
}
}

public void loadTablesFromXml(InputStream in) throws Exception {
// TODO: read in data from XML also
IDatabasePlatform platform = getDatabasePlatform();
Database database = new DatabaseIO().read(in);
platform.createDatabase(database, false, true);
}

public void loadTablesFromCsv(InputStream in) throws Exception {
IDatabasePlatform platform = getDatabasePlatform();
ISqlTemplate sqlTemplate = platform.getSqlTemplate();
Table table = platform.readTableFromDatabase(platform.getDefaultCatalog(), platform.getDefaultSchema(), "item");
if (table == null) {
throw new RuntimeException("Unable to find table");
}
DmlStatement statement = platform.createDmlStatement(DmlType.INSERT, table);

CsvReader csvReader = new CsvReader(new InputStreamReader(in));
csvReader.setEscapeMode(CsvReader.ESCAPE_MODE_BACKSLASH);
csvReader.setSafetySwitch(false);
csvReader.setUseComments(true);
csvReader.readHeaders();

while (csvReader.readRecord()) {
String[] values = csvReader.getValues();
Object[] data = platform.getObjectValues(BinaryEncoding.HEX, table, csvReader.getHeaders(), values);
for (String value : values) {
System.out.print("|" + value);
}
System.out.print("\n");
int rows = sqlTemplate.update(statement.getSql(), data);
System.out.println(rows + " rows updated.");
}
csvReader.close();
}

public void loadTablesFromSql(InputStream in) throws Exception {
IDatabasePlatform platform = getDatabasePlatform();

// TODO: SqlScript should be able to stream from standard input to run large SQL script
List<String> lines = IOUtils.readLines(in);

SqlScript script = new SqlScript(lines, platform.getSqlTemplate(), true, SqlScript.QUERY_ENDS,
platform.getSqlScriptReplacementTokens());
script.execute();
}
}

0 comments on commit 5d127a9

Please sign in to comment.