001
014
015 package com.liferay.portal.tools.samplesqlbuilder;
016
017 import com.liferay.portal.dao.db.MySQLDB;
018 import com.liferay.portal.freemarker.FreeMarkerUtil;
019 import com.liferay.portal.kernel.dao.db.DB;
020 import com.liferay.portal.kernel.dao.db.DBFactoryUtil;
021 import com.liferay.portal.kernel.io.CharPipe;
022 import com.liferay.portal.kernel.io.OutputStreamWriter;
023 import com.liferay.portal.kernel.io.unsync.UnsyncBufferedReader;
024 import com.liferay.portal.kernel.io.unsync.UnsyncBufferedWriter;
025 import com.liferay.portal.kernel.io.unsync.UnsyncTeeWriter;
026 import com.liferay.portal.kernel.util.FileUtil;
027 import com.liferay.portal.kernel.util.GetterUtil;
028 import com.liferay.portal.kernel.util.SortedProperties;
029 import com.liferay.portal.kernel.util.StringBundler;
030 import com.liferay.portal.kernel.util.StringPool;
031 import com.liferay.portal.kernel.util.StringUtil;
032 import com.liferay.portal.util.InitUtil;
033
034 import java.io.File;
035 import java.io.FileInputStream;
036 import java.io.FileOutputStream;
037 import java.io.FileReader;
038 import java.io.FileWriter;
039 import java.io.IOException;
040 import java.io.Reader;
041 import java.io.Writer;
042
043 import java.nio.channels.FileChannel;
044
045 import java.util.ArrayList;
046 import java.util.HashMap;
047 import java.util.List;
048 import java.util.Map;
049 import java.util.Properties;
050
051
055 public class SampleSQLBuilder {
056
057 public static void main(String[] args) {
058 InitUtil.initWithSpring();
059
060 Reader reader = null;
061
062 try {
063 Properties properties = new SortedProperties();
064
065 reader = new FileReader(args[0]);
066
067 properties.load(reader);
068
069 DataFactory dataFactory = new DataFactory(properties);
070
071 new SampleSQLBuilder(properties, dataFactory);
072 }
073 catch (Exception e) {
074 e.printStackTrace();
075 }
076 finally {
077 if (reader != null) {
078 try {
079 reader.close();
080 }
081 catch (IOException ioe) {
082 ioe.printStackTrace();
083 }
084 }
085 }
086 }
087
088 public SampleSQLBuilder(Properties properties, DataFactory dataFactory)
089 throws Exception {
090
091 _dbType = properties.getProperty("sample.sql.db.type");
092
093 _csvFileNames = StringUtil.split(
094 properties.getProperty("sample.sql.output.csv.file.names"));
095 _optimizeBufferSize = GetterUtil.getInteger(
096 properties.getProperty("sample.sql.optimize.buffer.size"));
097 _outputDir = properties.getProperty("sample.sql.output.dir");
098 _script = properties.getProperty("sample.sql.script");
099
100 _dataFactory = dataFactory;
101
102
103
104 Reader reader = generateSQL();
105
106 File tempDir = new File(_outputDir, "temp");
107
108 tempDir.mkdirs();
109
110 try {
111
112
113
114 compressSQL(reader, tempDir);
115
116
117
118 boolean outputMerge = GetterUtil.getBoolean(
119 properties.getProperty("sample.sql.output.merge"));
120
121 if (outputMerge) {
122 File sqlFile = new File(
123 _outputDir, "sample-" + _dbType + ".sql");
124
125 FileUtil.delete(sqlFile);
126
127 mergeSQL(tempDir, sqlFile);
128 }
129 else {
130 File outputDir = new File(_outputDir, "output");
131
132 FileUtil.deltree(outputDir);
133
134 if (!tempDir.renameTo(outputDir)) {
135
136
137
138
139 FileUtil.copyDirectory(tempDir, outputDir);
140 }
141 }
142 }
143 finally {
144 FileUtil.deltree(tempDir);
145 }
146
147 StringBundler sb = new StringBundler();
148
149 for (String key : properties.stringPropertyNames()) {
150 if (!key.startsWith("sample.sql")) {
151 continue;
152 }
153
154 String value = properties.getProperty(key);
155
156 sb.append(key);
157 sb.append(StringPool.EQUAL);
158 sb.append(value);
159 sb.append(StringPool.NEW_LINE);
160 }
161
162 FileUtil.write(
163 new File(_outputDir, "benchmarks-actual.properties"),
164 sb.toString());
165 }
166
167 protected void compressSQL(
168 DB db, File directory, Map<String, Writer> insertSQLWriters,
169 Map<String, StringBundler> sqls, String insertSQL)
170 throws IOException {
171
172 String tableName = insertSQL.substring(0, insertSQL.indexOf(' '));
173
174 int index = insertSQL.indexOf(" values ") + 8;
175
176 StringBundler sb = sqls.get(tableName);
177
178 if ((sb == null) || (sb.index() == 0)) {
179 sb = new StringBundler();
180
181 sqls.put(tableName, sb);
182
183 sb.append("insert into ");
184 sb.append(insertSQL.substring(0, index));
185 sb.append("\n");
186 }
187 else {
188 sb.append(",\n");
189 }
190
191 String values = insertSQL.substring(index, insertSQL.length() - 1);
192
193 sb.append(values);
194
195 if (sb.index() >= _optimizeBufferSize) {
196 sb.append(";\n");
197
198 insertSQL = db.buildSQL(sb.toString());
199
200 sb.setIndex(0);
201
202 writeToInsertSQLFile(
203 directory, tableName, insertSQLWriters, insertSQL);
204 }
205 }
206
207 protected void compressSQL(Reader reader, File dir) throws IOException {
208 DB db = DBFactoryUtil.getDB(_dbType);
209
210 if (db instanceof MySQLDB) {
211 db = new SampleMySQLDB();
212 }
213
214 Map<String, Writer> insertSQLWriters = new HashMap<String, Writer>();
215 Map<String, StringBundler> insertSQLs =
216 new HashMap<String, StringBundler>();
217 List<String> miscSQLs = new ArrayList<String>();
218
219 UnsyncBufferedReader unsyncBufferedReader = new UnsyncBufferedReader(
220 reader);
221
222 String s = null;
223
224 while ((s = unsyncBufferedReader.readLine()) != null) {
225 s = s.trim();
226
227 if (s.length() > 0) {
228 if (s.startsWith("insert into ")) {
229 compressSQL(
230 db, dir, insertSQLWriters, insertSQLs, s.substring(12));
231 }
232 else {
233 miscSQLs.add(s);
234 }
235 }
236 }
237
238 unsyncBufferedReader.close();
239
240 for (Map.Entry<String, StringBundler> entry : insertSQLs.entrySet()) {
241 String tableName = entry.getKey();
242 StringBundler sb = entry.getValue();
243
244 if (sb.index() == 0) {
245 continue;
246 }
247
248 String insertSQL = db.buildSQL(sb.toString());
249
250 writeToInsertSQLFile(dir, tableName, insertSQLWriters, insertSQL);
251
252 Writer insertSQLWriter = insertSQLWriters.remove(tableName);
253
254 insertSQLWriter.write(";\n");
255
256 insertSQLWriter.close();
257 }
258
259 Writer miscSQLWriter = new FileWriter(new File(dir, "misc.sql"));
260
261 for (String miscSQL : miscSQLs) {
262 miscSQL = db.buildSQL(miscSQL);
263
264 miscSQLWriter.write(miscSQL);
265 miscSQLWriter.write(StringPool.NEW_LINE);
266 }
267
268 miscSQLWriter.close();
269 }
270
271 protected Writer createFileWriter(File file) throws IOException {
272 FileOutputStream fileOutputStream = new FileOutputStream(file);
273
274 Writer writer = new OutputStreamWriter(fileOutputStream);
275
276 return createUnsyncBufferedWriter(writer);
277 }
278
279 protected Writer createUnsyncBufferedWriter(Writer writer) {
280 return new UnsyncBufferedWriter(writer, _WRITER_BUFFER_SIZE) {
281
282 @Override
283 public void flush() {
284
285
286
287 }
288
289 };
290 }
291
292 protected Reader generateSQL() {
293 final CharPipe charPipe = new CharPipe(_PIPE_BUFFER_SIZE);
294
295 Thread thread = new Thread() {
296
297 @Override
298 public void run() {
299 try {
300 Writer sampleSQLWriter = new UnsyncTeeWriter(
301 createUnsyncBufferedWriter(charPipe.getWriter()),
302 createFileWriter(new File(_outputDir, "sample.sql")));
303
304 Map<String, Object> context = getContext();
305
306 FreeMarkerUtil.process(_script, context, sampleSQLWriter);
307
308 for (String csvFileName : _csvFileNames) {
309 Writer csvWriter = (Writer)context.get(
310 csvFileName + "CSVWriter");
311
312 csvWriter.close();
313 }
314
315 sampleSQLWriter.close();
316
317 charPipe.close();
318 }
319 catch (Exception e) {
320 e.printStackTrace();
321 }
322 }
323
324 };
325
326 thread.start();
327
328 return charPipe.getReader();
329 }
330
331 protected Map<String, Object> getContext() throws Exception {
332 Map<String, Object> context = new HashMap<String, Object>();
333
334 context.put("dataFactory", _dataFactory);
335
336 for (String csvFileName : _csvFileNames) {
337 Writer csvWriter = createFileWriter(
338 new File(_outputDir, csvFileName + ".csv"));
339
340 context.put(csvFileName + "CSVWriter", csvWriter);
341 }
342
343 return context;
344 }
345
346 protected Properties getProperties(String[] args) throws Exception {
347 Reader reader = null;
348
349 try {
350 Properties properties = new SortedProperties();
351
352 reader = new FileReader(args[0]);
353
354 properties.load(reader);
355
356 return properties;
357 }
358 finally {
359 if (reader != null) {
360 try {
361 reader.close();
362 }
363 catch (IOException ioe) {
364 ioe.printStackTrace();
365 }
366 }
367 }
368 }
369
370 protected void mergeSQL(File inputDir, File outputSQLFile)
371 throws IOException {
372
373 FileOutputStream outputSQLFileOutputStream = new FileOutputStream(
374 outputSQLFile);
375
376 FileChannel outputFileChannel = outputSQLFileOutputStream.getChannel();
377
378 File miscSQLFile = null;
379
380 for (File inputFile : inputDir.listFiles()) {
381 String inputFileName = inputFile.getName();
382
383 if (inputFileName.equals("misc.sql")) {
384 miscSQLFile = inputFile;
385
386 continue;
387 }
388
389 mergeSQL(inputFile, outputFileChannel);
390 }
391
392 if (miscSQLFile != null) {
393 mergeSQL(miscSQLFile, outputFileChannel);
394 }
395
396 outputFileChannel.close();
397 }
398
399 protected void mergeSQL(File inputFile, FileChannel outputFileChannel)
400 throws IOException {
401
402 FileInputStream inputFileInputStream = new FileInputStream(inputFile);
403
404 FileChannel inputFileChannel = inputFileInputStream.getChannel();
405
406 inputFileChannel.transferTo(
407 0, inputFileChannel.size(), outputFileChannel);
408
409 inputFileChannel.close();
410
411 inputFile.delete();
412 }
413
414 protected void writeToInsertSQLFile(
415 File dir, String tableName, Map<String, Writer> insertSQLWriters,
416 String insertSQL)
417 throws IOException {
418
419 Writer insertSQLWriter = insertSQLWriters.get(tableName);
420
421 if (insertSQLWriter == null) {
422 File file = new File(dir, tableName + ".sql");
423
424 insertSQLWriter = createFileWriter(file);
425
426 insertSQLWriters.put(tableName, insertSQLWriter);
427 }
428
429 insertSQLWriter.write(insertSQL);
430 }
431
432 private static final int _PIPE_BUFFER_SIZE = 16 * 1024 * 1024;
433
434 private static final int _WRITER_BUFFER_SIZE = 16 * 1024;
435
436 private String[] _csvFileNames;
437 private DataFactory _dataFactory;
438 private String _dbType;
439 private int _optimizeBufferSize;
440 private String _outputDir;
441 private String _script;
442
443 }