How to fast bulk insert from json data in sqlite in android - json

I have written this code to insert json data in to sqlite in android its working fine but i have 50000+ row to insert so its taking so much time to insert into sqlite database. So how can I insert this data in fastest way please kindly give me the code I am very new in android. thank in advance.
Below i have written my code to insert data
private void insertItemDetails() {
final ProgressDialog loading = ProgressDialog .show(this,"Updating Data From Tally","Please wait");
StringRequest stringRequest=new StringRequest(Request.Method.GET, url,
new Response.Listener<String>() {
#Override
public void onResponse(String response) {
try {
loading.show();
itemDatabaseCon.open();
itemDatabaseCon.delete();
itemDatabaseCon.close();
itemDatabaseCon.open();
itemDatabaseCon.createTable();
int a=response.length();
// boolean b=a.equalsIgnoreCase("no");
Log.d("value", String.valueOf(a));
if (a==2) {
Log.d("inside item if loop ",response);
}
else {
JSONObject jsonObject = new JSONObject(response);
JSONArray array = jsonObject.getJSONArray("posts");
for (int i = 0; i < array.length(); i++) {
JSONObject ob = array.getJSONObject(i);
String stockid = ob.getString("stockid");
String itemname = ob.getString("itemname");
String group = ob.getString("group");
String baseunit = ob.getString("baseunit");
String alternateunit = ob.getString("alternateunit");
String gst = ob.getString("gst");
String hsn = ob.getString("hsn");
String mrp = ob.getString("mrp");
String sdtsellrate = ob.getString("sdtsellrate");
String closingstock = ob.getString("closingstock");
ContentValues contentValues = new ContentValues();
contentValues.put(Constant2.key_itemstockid, stockid);
contentValues.put(Constant2.key_itemname, itemname);
contentValues.put(Constant2.key_itemgroup, group);
contentValues.put(Constant2.key_itembaseunit, baseunit);
contentValues.put(Constant2.key_itemalternateunit, alternateunit);
contentValues.put(Constant2.key_itemgst, gst);
contentValues.put(Constant2.key_itemhsn, hsn);
contentValues.put(Constant2.key_itemmrp, mrp);
contentValues.put(Constant2.key_itemsdtsellrate, sdtsellrate);
contentValues.put(Constant2.key_itemclosingstock, closingstock);
itemDatabaseCon.insert(Constant2.Table_name, contentValues);
}
}
loading.dismiss();
} catch (JSONException e) {
e.printStackTrace();
}
}
}, new Response.ErrorListener() {
#Override
public void onErrorResponse(VolleyError error) {
Log.d("got api error ffff" , error.getMessage());
}
});
RequestQueue requestQueue= Volley.newRequestQueue(this);
requestQueue.add(stringRequest);
}
Here is my database controller code.
public class ItemDatabaseCon {
String TAG = "DBAdapter";
private SQLiteDatabase db;
private ItemDatabaseCon.DBHelper dbHelper;
public ItemDatabaseCon (Context context) {
dbHelper = new ItemDatabaseCon.DBHelper(context);
}
public void open() {
if (null == db || !db.isOpen()) {
try {
db = dbHelper.getWritableDatabase();
} catch (SQLiteException sqLiteException) {
}
}
}
public void close() {
if (db != null) {
db.close();
}
}
public int insert(String table, ContentValues values) {
try {
db = dbHelper.getWritableDatabase();
int y = (int) db.insert(table, null, values);
db.close();
Log.e("Data Inserted", "Item Data Inserted");
Log.e("number of row", y + "");
return y;
} catch (Exception ex) {
Log.e("Error Insert", ex.getMessage().toString());
return 0;
}
}
public void delete() {
db.execSQL("DROP TABLE IF EXISTS " + Constant2.Table_name);
}
public int getCount()
{
db = dbHelper.getWritableDatabase();
String qry="SELECT * FROM "+Constant2.Table_name;
Cursor cursor=db.rawQuery(qry,null);
return cursor.getCount();
}
public void createTable()
{
String create_sql = "CREATE TABLE IF NOT EXISTS " + Constant2.Table_name + "("
+ Constant2.key_id + " INTEGER PRIMARY KEY AUTOINCREMENT,"
+ Constant2.key_itemstockid + " TEXT ," + Constant2.key_itemname + " TEXT ," + Constant2.key_itemgroup + " TEXT ,"
+ Constant2.key_itembaseunit + " TEXT ,"+ Constant2.key_itemalternateunit + " TEXT ,"+ Constant2.key_itemgst + " TEXT ,"
+ Constant2.key_itemhsn + " TEXT ,"+ Constant2.key_itemmrp + " TEXT ,"+ Constant2.key_itemsdtsellrate + " TEXT ,"
+ Constant2.key_itemclosingstock + " TEXT " + ")";
db.execSQL(create_sql);
}
public Cursor getAllRow(String table) {
return db.query(table, null, null, null, null, null, Constant2.key_id);
}
private class DBHelper extends SQLiteOpenHelper {
public DBHelper(Context context) {
super(context, Constant2.DB_Name, null, Constant2.Db_Version);
}
#Override
public void onCreate(SQLiteDatabase db) {
String create_sql = "CREATE TABLE IF NOT EXISTS " + Constant2.Table_name + "("
+ Constant2.key_id + " INTEGER PRIMARY KEY AUTOINCREMENT,"
+ Constant2.key_itemstockid + " TEXT ," + Constant2.key_itemname + " TEXT ," + Constant2.key_itemgroup + " TEXT ,"
+ Constant2.key_itembaseunit + " TEXT ,"+ Constant2.key_itemalternateunit + " TEXT ,"+ Constant2.key_itemgst + " TEXT ,"
+ Constant2.key_itemhsn + " TEXT ,"+ Constant2.key_itemmrp + " TEXT ,"+ Constant2.key_itemsdtsellrate + " TEXT ,"
+ Constant2.key_itemclosingstock + " TEXT " + ")";
db.execSQL(create_sql);
}
#Override
public void onUpgrade(SQLiteDatabase db, int oldVersion, int newVersion) {
db.execSQL("DROP TABLE IF EXISTS " + Constant2.Table_name);
}
}
}

You could do the inserts inside a single SQLite transaction. This would significantly reduce the disk writes from 50000+ to very few.
That is before the loops starts begin a transaction using the SQLiteDatabase's beginTransaction() method.
After the loop has completed (all rows have been inserted) successfully use the setTransactionSuccessful() method followed by the endTransactionMethod()
Note if you do not setTransactionSuccessful then the changes would be rolled back (so if you encounter an issue/error and want the changes (inserts) to not be applied use appropriate logic so that the setTransactionSuccessful is skipped but that the endTransaction is run)
e.g. The following might be suitable:-
....
else {
itemDatabaseCon.beginTransaction(); //<<<<<<<<<< ADDDED start the transaction
JSONObject jsonObject = new JSONObject(response);
JSONArray array = jsonObject.getJSONArray("posts");
for (int i = 0; i < array.length(); i++) {
JSONObject ob = array.getJSONObject(i);
String stockid = ob.getString("stockid");
String itemname = ob.getString("itemname");
String group = ob.getString("group");
String baseunit = ob.getString("baseunit");
String alternateunit = ob.getString("alternateunit");
String gst = ob.getString("gst");
String hsn = ob.getString("hsn");
String mrp = ob.getString("mrp");
String sdtsellrate = ob.getString("sdtsellrate");
String closingstock = ob.getString("closingstock");
ContentValues contentValues = new ContentValues();
contentValues.put(Constant2.key_itemstockid, stockid);
contentValues.put(Constant2.key_itemname, itemname);
contentValues.put(Constant2.key_itemgroup, group);
contentValues.put(Constant2.key_itembaseunit, baseunit);
contentValues.put(Constant2.key_itemalternateunit, alternateunit);
contentValues.put(Constant2.key_itemgst, gst);
contentValues.put(Constant2.key_itemhsn, hsn);
contentValues.put(Constant2.key_itemmrp, mrp);
contentValues.put(Constant2.key_itemsdtsellrate, sdtsellrate);
contentValues.put(Constant2.key_itemclosingstock, closingstock);
itemDatabaseCon.insert(Constant2.Table_name, contentValues);
}
itemDatabaseCon.setTransactionSuccessful(); //<<<<<<<<<< ADDED indicate that changes (inserts) are all good
itemDatabaseCon.endTransaction(); //<<<<<<<<<< ADDED end the transaction
}
loading.dismiss();
....
//<<<<<<<<<< indicates the changed/added code
Edit
However, considering the insert method the above will have no affect as you are closing the database after an insert. Closing the database and then re-opening it is very costly resource wise.
As such to benefit from running all the inserts in a single transaction you could use :-
public int insert(String table, ContentValues values) {
try {
db = dbHelper.getWritableDatabase();
int y = (int) db.insert(table, null, values);
//db.close(); //<<<<<<<<<< Commented out so as to not close the database
Log.e("Data Inserted", "Item Data Inserted");
Log.e("number of row", y + "");
return y;
} catch (Exception ex) {
Log.e("Error Insert", ex.getMessage().toString());
return 0;
}
}

Related

Change logging format of SpringBoot - micrometer to JSON

I have a SpringBoot application that uses micrometer to print out application metrics.
My pom.xml has:
<dependency>
<groupId>io.micrometer</groupId>
<artifactId>micrometer-core</artifactId>
<version>1.1.3</version>
</dependency>
My Config class is:
#Configuration
public class CoreConfiguration {
public static final String USER_REQUEST_CHANNEL = "userRequestChannel";
public static final String USER_RESPONSE_CHANNEL = "userResponseChannel";
public static final String MDC_ADD = "add";
public static final String DONE_CHANNEL = "nullChannel";
public static final String ADMIN_REQUEST_CHANNEL = "adminRequestChannel";
public static final String ADMIN_RESPONSE_CHANNEL = "adminResponseChannel";
public static final String SUPPORT_COMPLETED_CHANNEL = "supportCompletedChannel";
public static final String SUPPORT_RUNNING_CHANNEL = "nullChannel";
public static final String SUPPORT_ERROR_CHANNEL = "nullChannel";
#Bean(name = USER_REQUEST_CHANNEL)
public MessageChannel oAuthRequestChannel() {
return MessageChannels.direct().get();
}
#Bean(name = USER_RESPONSE_CHANNEL)
public MessageChannel oAuthResponseChannel() {
return MessageChannels.direct().get();
}
#Bean(name = FIRST_TRADE_CHANNEL)
public MessageChannel firstTradeChannel() {
return MessageChannels.direct().get();
}
#Bean(name = ADMIN_REQUEST_CHANNEL)
public MessageChannel instructionExecutionRequestChannel() {
return MessageChannels.direct().get();
}
#Bean(name = ADMIN_RESPONSE_CHANNEL)
public MessageChannel instructionExecutionResponseChannel() {
return MessageChannels.direct().get();
}
#Bean(name = SUPPORT_COMPLETED_CHANNEL)
public MessageChannel groupExecutionCompletedChannel() {
return MessageChannels.direct().get();
}
/**
* Turn on the Micrometer log file metrics.
*
* #return
*/
#Bean
public LoggingMeterRegistry loggingMeterRegistry(#Value("${micrometer.log.minutes}") long minutes) {
LoggingRegistryConfig config = new LoggingRegistryConfig() {
#Override
public String get(String s) {
return null;
}
#Override
public Duration step() {
return Duration.ofMinutes(minutes);
}
};
return LoggingMeterRegistry.builder(config).build();
}
}
USAGE IN CLASS:
public IntegrationFlow processRequest(HttpRequest request) {
return IntegrationFlows.from(INPUT_CHANNEL)
.enrichHeader(m -> m.headerExpression(REQUEST_ID,"payload.message.headers." + REQUEST_ID))
.log(LoggingHandler.Level.DEBUG, CoreConfiguration.class.getName(), m -> {
Throwable t = (Throwable) m.getPayload();
return throwableToString(t);})
.get();
}
I see the output of the metrics written to my log file as:
2019-02-25 14:40:23,337 | INFO | [logging-metrics-publisher] |
[meter.core.instrument.logging.LoggingMeterRegistry] | MY_SAMPLE_APP |
userId = [] | jvm.memory.max{area=heap,id=PS Survivor Space}
value=12.5 MiB
How do I log out in JSON format?
WHAT I NEED:
{
"ts": "2019-02-25 14:40:23,337" ,
"level" : "INFO",
"className" : "meter.core.instrument.logging.LoggingMeterRegistry",
"appName" : "MY_SAMPLE_APP",
"userId" : "",
"metric" :
{"metricType": "jvm.memory.max",
"area":"heap",
"id":"PS Survivor Space",
"value":"12.5 MiB"
}
}
Updating question with code as per Jon's answer.
#Jon, do you think the below code is correct? I have implemented a custom Meter Registry that extends the LoggingMeterRegistry.
The only difference between LoggingMeterRegistry and CustomMeterRegistry is that my custom class print out ID=
In LoggingMeterRegistry: this.loggingSink.accept(print.id() + " throughput=" + print.rate(count));
In CustomMeterRegistry: this.loggingSink.accept("ID=" + print.id() + " throughput=" + print.rate(count));
COMPLETE CODE:
public abstract class SplunkMeterRegistry extends LoggingMeterRegistry {
#Override
protected void publish() {
{
if (this.config.enabled()) {
this.getMeters().stream().sorted((m1, m2) -> {
int typeComp = m1.getId().getType().compareTo(m2.getId().getType());
return typeComp == 0 ? m1.getId().getName().compareTo(m2.getId().getName()) : typeComp;
}).forEach((m) -> {
LoggingMeterRegistry.Printer print = new LoggingMeterRegistry.Printer(m);
m.use((gauge) -> {
this.loggingSink.accept("ID=" + print.id() + " value=" + print.value(gauge.value()));
}, (counter) -> {
double count = counter.count();
if (this.config.logInactive() || count != 0.0D) {
this.loggingSink.accept("ID=" + print.id() + " throughput=" + print.rate(count));
}
}, (timer) -> {
HistogramSnapshot snapshot = timer.takeSnapshot();
long count = snapshot.count();
if (this.config.logInactive() || count != 0L) {
this.loggingSink.accept("ID=" + print.id() + " throughput=" + print.unitlessRate((double)count) + " mean=" + print.time(snapshot.mean(this.getBaseTimeUnit())) + " max=" + print.time(snapshot.max(this.getBaseTimeUnit())));
}
}, (summary) -> {
HistogramSnapshot snapshot = summary.takeSnapshot();
long count = snapshot.count();
if (this.config.logInactive() || count != 0L) {
this.loggingSink.accept("ID=" + print.id() + " throughput=" + print.unitlessRate((double)count) + " mean=" + print.value(snapshot.mean()) + " max=" + print.value(snapshot.max()));
}
}, (longTaskTimer) -> {
int activeTasks = longTaskTimer.activeTasks();
if (this.config.logInactive() || activeTasks != 0) {
this.loggingSink.accept("ID=" + print.id() + " active=" + print.value((double)activeTasks) + " duration=" + print.time(longTaskTimer.duration(this.getBaseTimeUnit())));
}
}, (timeGauge) -> {
double value = timeGauge.value(this.getBaseTimeUnit());
if (this.config.logInactive() || value != 0.0D) {
this.loggingSink.accept("ID=" + print.id() + " value=" + print.time(value));
}
}, (counter) -> {
double count = counter.count();
if (this.config.logInactive() || count != 0.0D) {
this.loggingSink.accept("ID=" + print.id() + " throughput=" + print.rate(count));
}
}, (timer) -> {
double count = timer.count();
if (this.config.logInactive() || count != 0.0D) {
this.loggingSink.accept("ID=" + print.id() + " throughput=" + print.rate(count) + " mean=" + print.time(timer.mean(this.getBaseTimeUnit())));
}
}, (meter) -> {
this.loggingSink.accept("ID=" + print.id() + StreamSupport.stream(meter.measure().spliterator(), false).map((ms) -> {
return ms.getStatistic().getTagValueRepresentation() + "=" + DoubleFormat.decimalOrNan(ms.getValue());
}));
});
});
}
}
}
}
You must implement a custom MeterRegistry, perhaps using LoggingMeterRegistry as a reference, that serializes the data in the format you desire. Effectively that's what push-based MeterRegistry implementations are is just different serialization formats for different consumers.

Read arbitrarily json data to a javafx treeview,and only show the first element of any array in it

I need to show a json file on a javafx treeview,the structure of the json is unknown.Like the web site: json viewer site
I show the tree for user to select path of a value(like xpath of xml),so if the json is too big,I only need to show the first element of any array in json.
for example,the original data is:
{
name:"tom",
schools:[
{
name:"school1",
tags:["maths","english"]
},
{
name:"school2",
tags:["english","biological"]
},
]
}
I want to show:
again:the structure of json is unknown,it is just one example.
There's no other option than recursively handling the json and create the TreeItem structure based on the element info.
(There's probably a better way of adding the symbols, but I didn't find appropriate icons.)
private static final String INPUT = "{\n"
+ " name:\"tom\",\n"
+ " schools:[\n"
+ " {\n"
+ " name:\"school1\",\n"
+ " tags:[\"maths\",\"english\"]\n"
+ " },\n"
+ " {\n"
+ " name:\"school2\",\n"
+ " tags:[\"english\",\"biological\"]\n"
+ " },\n"
+ " ]\n"
+ "}";
private static final Image JSON_IMAGE = new Image("https://i.stack.imgur.com/1slrh.png");
private static void prependString(TreeItem<Value> item, String string) {
String val = item.getValue().text;
item.getValue().text = (val == null
? string
: string + " : " + val);
}
private enum Type {
OBJECT(new Rectangle2D(45, 52, 16, 18)),
ARRAY(new Rectangle2D(61, 88, 16, 18)),
PROPERTY(new Rectangle2D(31, 13, 16, 18));
private final Rectangle2D viewport;
private Type(Rectangle2D viewport) {
this.viewport = viewport;
}
}
private static final class Value {
private String text;
private final Type type;
public Value(Type type) {
this.type = type;
}
public Value(String text, Type type) {
this.text = text;
this.type = type;
}
}
private static TreeItem<Value> createTree(JsonElement element) {
if (element.isJsonNull()) {
return new TreeItem<>(new Value("null", Type.PROPERTY));
} else if (element.isJsonPrimitive()) {
JsonPrimitive primitive = element.getAsJsonPrimitive();
return new TreeItem<>(new Value(primitive.isString()
? '"' + primitive.getAsString() + '"'
: primitive.getAsString(), Type.PROPERTY));
} else if (element.isJsonArray()) {
JsonArray array = element.getAsJsonArray();
TreeItem<Value> item = new TreeItem<>(new Value(Type.ARRAY));
// for (int i = 0, max = Math.min(1, array.size()); i < max; i++) {
for (int i = 0, max = array.size(); i < max; i++) {
TreeItem<Value> child = createTree(array.get(i));
prependString(child, Integer.toString(i));
item.getChildren().add(child);
}
return item;
} else {
JsonObject object = element.getAsJsonObject();
TreeItem<Value> item = new TreeItem<>(new Value(Type.OBJECT));
for (Map.Entry<String, JsonElement> property : object.entrySet()) {
TreeItem<Value> child = createTree(property.getValue());
prependString(child, property.getKey());
item.getChildren().add(child);
}
return item;
}
}
#Override
public void start(Stage primaryStage) {
JsonParser parser = new JsonParser();
JsonElement root = parser.parse(INPUT);
TreeItem<Value> treeRoot = createTree(root);
TreeView<Value> treeView = new TreeView<>(treeRoot);
treeView.setCellFactory(tv -> new TreeCell<Value>() {
private final ImageView imageView;
{
imageView = new ImageView(JSON_IMAGE);
imageView.setFitHeight(18);
imageView.setFitWidth(16);
imageView.setPreserveRatio(true);
setGraphic(imageView);
}
#Override
protected void updateItem(Value item, boolean empty) {
super.updateItem(item, empty);
if (empty || item == null) {
setText("");
imageView.setVisible(false);
} else {
setText(item.text);
imageView.setVisible(true);
imageView.setViewport(item.type.viewport);
}
}
});
final Scene scene = new Scene(treeView);
primaryStage.setScene(scene);
primaryStage.show();
}

Why am I getting negative compressions, for Gzip, Snappy, Smile?

I was trying to investigate what compression was suitable for my application for compression JSON string. The aim here is to compression entity JSON before persisting to REDIS.
Here are my results
Gzip Compression Trial
compression percent : -8.7719345 %
to json time : 151 microseconds
pure saveable compression : 3326 microseconds
gzip compression+convert to json time : 3477 microseconds
gzip de-compression to string time : 537 microseconds
Snappy Compression Trial
compression percent : -22.807014 %
to json time : 58 microseconds
pure saveable compression : 259490 microseconds
snappy compression+convert to json time : 259549 microseconds
snappy de-compression to string time : 84 microseconds
Smile (msgpack) Compression Trial
compression percent : -24.561401 %
smile compression time : 3314 microseconds
smile de-compression time : n/a
However what is quite Odd is Snappy is supposed to work much faster (from what i read) only the decompression is fast, but the compression takes longer.
Also strangely smile is producing a longer persistable string
Can anybody point out why or what I am doing wrong here?
Here is my code for this trial
import com.fasterxml.jackson.core.JsonProcessingException;
import com.fasterxml.jackson.databind.ObjectMapper;
import com.fasterxml.jackson.dataformat.smile.SmileFactory;
import com.fasterxml.jackson.dataformat.smile.SmileGenerator;
import com.fasterxml.jackson.dataformat.smile.SmileParser;
import org.xerial.snappy.Snappy;
import javax.xml.bind.DatatypeConverter;
import java.io.*;
import java.util.concurrent.TimeUnit;
import java.util.zip.GZIPInputStream;
import java.util.zip.GZIPOutputStream;
public class CompressionTrials {
public static void main(String[] args) {
jsonCompressionTrial();
}
public static void jsonCompressionTrial(){
SimpleDto originalDto = new SimpleDto();
originalDto.setFname("MyFirstName");
originalDto.setLname("MyLastName");
originalDto.setDescription("This is a long description. I am trying out compression options for JSON. Hopefully the results will help me decide on one approach");
originalDto.setCity("MyCity");
originalDto.setAge(36);
originalDto.setZip(2424);
gzipCompressionTrial(originalDto);
snappyCompressionTrial(originalDto);
smileCompressionTrial(originalDto);
}
public static void gzipCompressionTrial(SimpleDto simpleDto){
if(simpleDto == null){
return;
}
ObjectMapper mapper = new ObjectMapper();
String originalJsonString = null;
long compressionAndConversionMicroSeconds = 0;
long toJsonMicroSeconds = 0;
long compressionMicroSeconds = 0;
long decompressionMicroSeconds = 0;
SimpleDto restoredDto = null;
String restoredDtoJson = null;
try {
mapper.writeValueAsString(simpleDto);
long endConversionTime = 0;
long startTimeCompressionAndConvesion = System.nanoTime();
originalJsonString = mapper.writeValueAsString(simpleDto);
endConversionTime = System.nanoTime();
byte[] compressedBytes = gzipCompress(originalJsonString);
String compressedStringToSave = bytesToStringBase64(compressedBytes);
long endTimeCompression = System.nanoTime();
long startCompressionTime = endConversionTime;
toJsonMicroSeconds = TimeUnit.NANOSECONDS.toMicros((endConversionTime-startTimeCompressionAndConvesion));
compressionMicroSeconds = TimeUnit.NANOSECONDS.toMicros((endTimeCompression-startCompressionTime));
compressionAndConversionMicroSeconds = TimeUnit.NANOSECONDS.toMicros((endTimeCompression-startTimeCompressionAndConvesion));
long startTimeDecompression = System.nanoTime();
String unCompressedString = gzipDecompress(compressedBytes);
long endTimeDecompression = System.nanoTime();
decompressionMicroSeconds = TimeUnit.NANOSECONDS.toMicros(endTimeDecompression-startTimeDecompression); // TimeUnit.MILLISECONDS.convert((endTimeDecompression - startTimeDecompression), TimeUnit.NANOSECONDS);
int originalLength = originalJsonString.toString().length();
int compressedLength = compressedStringToSave.toString().length();
float compressionPercent = 100 - (( (float)compressedLength / (float)originalLength ) * 100);
restoredDto = mapper.readValue(originalJsonString, SimpleDto.class);
restoredDtoJson = mapper.writeValueAsString(restoredDto);
System.out.println("============================================================================================== ");
System.out.println(" Gzip Compression Trial");
System.out.println("----------------------------------------------------------------------------------------------");
// System.out.println("origin dto as json : " + originalJsonString );
// System.out.println( "original dto-json string length : " + originalLength);
// System.out.println( "compressed string length : " + compressedLength );
// System.out.println( "uncompressed json string : " + unCompressedString );
// System.out.println( " restored dto as json : " + restoredDtoJson );
// System.out.println( " is before-compressed = uncompressed : " + unCompressedString.equals(originalJsonString) );
// System.out.println( " is restored object json = original object json : " + originalJsonString.equals(restoredDtoJson) );
// System.out.println("----------------------------------------------------------------------------------------------");
System.out.println("compression percent : " + compressionPercent + " %" );
System.out.println("to json time : " + toJsonMicroSeconds + " microseconds" );
System.out.println(" pure saveable compression : " + compressionMicroSeconds + " microseconds" );
System.out.println("gzip compression+convert to json time : " + compressionAndConversionMicroSeconds + " microseconds" );
System.out.println("gzip de-compression to string time : " + decompressionMicroSeconds + " microseconds" );
System.out.println("============================================================================================== ");
} catch (IOException e) {
e.printStackTrace();
} catch (Exception e) {
e.printStackTrace();
}
}
public static void smileCompressionTrial(SimpleDto simpleDto){
if(simpleDto == null){
return;
}
ObjectMapper mapper = new ObjectMapper();
ObjectMapper smileMapper = getSmileObjectMapper();
String originalJsonString = null;
try {
originalJsonString = mapper.writeValueAsString(simpleDto);
} catch (JsonProcessingException e) {
e.printStackTrace();
return;
}
long compressionMicroSeconds = 0;
long decompressionMicroSeconds = 0;
SimpleDto restoredDto = null;
String restoredDtoJson = null;
try {
mapper.writeValueAsString(simpleDto);
long startTimeCompression = System.nanoTime();
byte[] compressedBytes = smileMapper.writeValueAsBytes(simpleDto);
//String compressedStringToSave = new String(compressedBytes, "UTF-8");// bytesToStringBase64(compressedBytes);
String compressedStringToSave = bytesToStringBase64(compressedBytes);
// System.out.println("smile compressed : " + compressedStringToSave);
// System.out.println("original length : " + originalJsonString.length() );
// System.out.println("length : " + compressedStringToSave.length() );
long endTimeCompression = System.nanoTime();
compressionMicroSeconds = TimeUnit.NANOSECONDS.toMicros((endTimeCompression-startTimeCompression)); //TimeUnit.MILLISECONDS.convert((endTimeCompression - startTimeCompression), TimeUnit.NANOSECONDS);
// long startTimeDecompression = System.nanoTime();
// String unCompressedString = gzipDecompress(compressedBytes);
// long endTimeDecompression = System.nanoTime();
// decompressionMicroSeconds = TimeUnit.NANOSECONDS.toMicros(endTimeDecompression-startTimeDecompression); // TimeUnit.MILLISECONDS.convert((endTimeDecompression - startTimeDecompression), TimeUnit.NANOSECONDS);
int originalLength = originalJsonString.toString().length();
int compressedLength = compressedStringToSave.toString().length();
float compressionPercent = 100 - (( (float)compressedLength / (float)originalLength ) * 100);
restoredDto = smileMapper.readValue( stringToBytesBase64(compressedStringToSave) , SimpleDto.class);
//restoredDto = smileMapper.readValue( compressedStringToSave.getBytes("UTF-8") , SimpleDto.class);
restoredDtoJson = mapper.writeValueAsString(restoredDto);
System.out.println("============================================================================================== ");
System.out.println(" Smile Compression Trial");
System.out.println("----------------------------------------------------------------------------------------------");
// System.out.println("origin dto as json : " + originalJsonString );
// System.out.println( "original dto-json string length : " + originalLength);
// System.out.println( "compressed string length : " + compressedLength );
// System.out.println( "uncompressed json string : n/a" /*+ unCompressedString*/ );
// System.out.println( " restored dto as json : " + restoredDtoJson );
// System.out.println( " is before-compressed = uncompressed : n/a " /*+ unCompressedString.equals(originalJsonString)*/ );
// System.out.println( " is restored object json = original object json : " + originalJsonString.equals(restoredDtoJson) );
// System.out.println("----------------------------------------------------------------------------------------------");
System.out.println("compression percent : " + compressionPercent + " %" );
System.out.println("smile compression time : " + compressionMicroSeconds + " microseconds" );
System.out.println("smile de-compression time : n/a " /*+ decompressionMicroSeconds + " microseconds"*/ );
System.out.println("============================================================================================== ");
} catch (IOException e) {
e.printStackTrace();
} catch (Exception e) {
e.printStackTrace();
}
}
public static void snappyCompressionTrial(SimpleDto simpleDto) {
if (simpleDto == null) {
return;
}
ObjectMapper mapper = new ObjectMapper();
String originalJsonString = null;
long compressionAndConversionMicroSeconds = 0;
long toJsonMicroSeconds = 0;
long compressionMicroSeconds = 0;
long decompressionMicroSeconds = 0;
SimpleDto restoredDto = null;
String restoredDtoJson = null;
try {
mapper.writeValueAsString(simpleDto);
long endConversionTime = 0;
long startTimeCompressionAndConvesion = System.nanoTime();
originalJsonString = mapper.writeValueAsString(simpleDto);
endConversionTime = System.nanoTime();
byte[] compressedBytes = snappyCompress(originalJsonString);
String compressedStringToSave = bytesToStringBase64(compressedBytes);
long endTimeCompression = System.nanoTime();
long startCompressionTime = endConversionTime;
toJsonMicroSeconds = TimeUnit.NANOSECONDS.toMicros((endConversionTime - startTimeCompressionAndConvesion));
compressionMicroSeconds = TimeUnit.NANOSECONDS.toMicros((endTimeCompression - startCompressionTime));
compressionAndConversionMicroSeconds = TimeUnit.NANOSECONDS.toMicros((endTimeCompression - startTimeCompressionAndConvesion));
long startTimeDecompression = System.nanoTime();
byte[] unCompressedBytes = snappyUncompress(compressedBytes);
String unCompressedString = bytesToStringUtf8(unCompressedBytes);
long endTimeDecompression = System.nanoTime();
decompressionMicroSeconds = TimeUnit.NANOSECONDS.toMicros(endTimeDecompression - startTimeDecompression);
int originalLength = originalJsonString.toString().length();
int compressedLength = compressedStringToSave.toString().length();
float compressionPercent = 100 - (((float) compressedLength / (float) originalLength) * 100);
//restoredDto = mapper.readValue(originalJsonString, SimpleDto.class);
restoredDto = mapper.readValue(unCompressedBytes, SimpleDto.class);
restoredDtoJson = mapper.writeValueAsString(restoredDto);
System.out.println("============================================================================================== ");
System.out.println(" Snappy Compression Trial");
System.out.println("----------------------------------------------------------------------------------------------");
// System.out.println("origin dto as json : " + originalJsonString );
// System.out.println( "original dto-json string length : " + originalLength);
// System.out.println( "compressed string length : " + compressedLength );
// System.out.println( "uncompressed json string : " + unCompressedString );
// System.out.println( " restored dto as json : " + restoredDtoJson );
// System.out.println( " is before-compressed = uncompressed : " + unCompressedString.equals(originalJsonString) );
// System.out.println( " is restored object json = original object json : " + originalJsonString.equals(restoredDtoJson) );
// System.out.println("----------------------------------------------------------------------------------------------");
System.out.println("compression percent : " + compressionPercent + " %");
System.out.println("to json time : " + toJsonMicroSeconds + " microseconds");
System.out.println(" pure saveable compression : " + compressionMicroSeconds + " microseconds");
System.out.println("gzip compression+convert to json time : " + compressionAndConversionMicroSeconds + " microseconds");
System.out.println("gzip de-compression to string time : " + decompressionMicroSeconds + " microseconds");
System.out.println("============================================================================================== ");
} catch (IOException e) {
e.printStackTrace();
} catch (Exception e) {
e.printStackTrace();
}
}
private static ObjectMapper getSmileObjectMapper() {
SmileFactory smileFactory = new SmileFactory();
smileFactory.configure(SmileGenerator.Feature.CHECK_SHARED_NAMES,true);
smileFactory.configure(SmileGenerator.Feature.CHECK_SHARED_STRING_VALUES,true);
smileFactory.configure(SmileGenerator.Feature.ENCODE_BINARY_AS_7BIT,true);
smileFactory.configure(SmileGenerator.Feature.WRITE_HEADER,true);
smileFactory.configure(SmileGenerator.Feature.WRITE_END_MARKER,false);
smileFactory.configure(SmileParser.Feature.REQUIRE_HEADER,false);
return new ObjectMapper(smileFactory);
}
public static byte[] gzipCompress(String str) throws IOException {
if (str == null || str.length() == 0) {
return null;
}
ByteArrayOutputStream out = new ByteArrayOutputStream();
GZIPOutputStream gzip = new GZIPOutputStream(out);
gzip.write(str.getBytes());
gzip.close();
return out.toByteArray();
// String outStr = out.toString("UTF-8");
// return outStr;
}
public static String gzipDecompress(byte[] bytes) throws Exception {
if (bytes == null || bytes.length == 0) {
return null;
}
GZIPInputStream gis = new GZIPInputStream(new ByteArrayInputStream(bytes));
BufferedReader bf = new BufferedReader(new InputStreamReader(gis, "UTF-8"));
String outStr = "";
String line;
while ((line=bf.readLine())!=null) {
outStr += line;
}
return outStr;
}
public static byte[] snappyCompress(String stringData) throws IOException {
return Snappy.compress(stringData);
}
public static byte[] snappyUncompress(byte[] bytes) throws IOException {
return Snappy.uncompress(bytes);
}
private static String bytesToStringBase64(byte[] bytes){
return DatatypeConverter.printBase64Binary(bytes);
}
private static byte[] stringToBytesBase64(String dataString){
return DatatypeConverter.parseBase64Binary(dataString);
}
private static String bytesToStringUtf8(byte[] bytes) throws UnsupportedEncodingException {
return new String(bytes, "UTF-8");
}
private static byte[] stringToBytesUtf8(String dataString) throws UnsupportedEncodingException {
return dataString.getBytes("UTF-8");
}
}
Environment details : windows 7, i7 2.4 Gz processor, 16 GB RAM, Java 8
Versions of libraries used:
<dependency>
<groupId>com.fasterxml.jackson.dataformat</groupId>
<artifactId>jackson-dataformat-smile</artifactId>
<version>2.6.4</version>
</dependency>
<dependency>
<groupId>com.fasterxml.jackson.core</groupId>
<artifactId>jackson-databind</artifactId>
<version>2.6.4</version>
</dependency>
<dependency>
<groupId>org.projectlombok</groupId>
<artifactId>lombok</artifactId>
<scope>provided</scope>
<version>1.16.6</version>
</dependency>
<dependency>
<groupId>org.xerial.snappy</groupId>
<artifactId>snappy-java</artifactId>
<version>1.1.2</version>
</dependency>
*** This is not a benchmark, but just a personal trial to decide compression strategy for my use-case.
Please let me know if anybody sees any mistake in my trial
Update:
Below is a simpler code to try
public static void stringCompressionTrial(){
String string = "I am what I am hhhhhhhhhhhhhhhhhhhhhhhhhhhhh"
+ "bjggujhhhhhhhhh"
+ "rggggggggggggggggggggggggg"
+ "esfffffffffffffffffffffffffffffff"
+ "esffffffffffffffffffffffffffffffff"
+ "esfekfgy enter code here`etd`enter code here wdd"
+ "heljwidgutwdbwdq8d"
+ "skdfgysrdsdnjsvfyekbdsgcu"
+"jbujsbjvugsduddbdj";
// uncomment below to use the json
// SimpleDto originalDto = new SimpleDto();
// originalDto.setFname("MyFirstName");
// originalDto.setLname("MySecondName");
// originalDto.setDescription("This is a long description. I am trying out compression options for JSON. Hopefully the results will help me decide on one approach");
// originalDto.setCity("MyCity");
// originalDto.setAge(36);
// originalDto.setZip(1111);
// ObjectMapper mapper = new ObjectMapper();
// try {
// string = mapper.writeValueAsString(originalDto);
// } catch (JsonProcessingException e) {
// e.printStackTrace();
// }
byte[] compressedBytes = null;
String compressedString = null;
try {
compressedBytes = gzipCompress(string);
compressedString = bytesToStringBase64(compressedBytes);
System.out.println("after gzipDecompress:" + compressedString);
//String decomp = gzipDecompress(compressedBytes);
String decompressedString = gzipDecompress( stringToBytesBase64(compressedString) );
System.out.println("decompressed string : " + decompressedString);
System.out.println( " original string length : " + string.length());
System.out.println( " compressedString length : " + compressedString.length() );
} catch (IOException e) {
e.printStackTrace();
} catch (Exception e) {
e.printStackTrace();
}
}
Because you are trying to compress short strings. Compression needs more data to find redundancy and to take advantage of skewed symbol frequencies.

JSP tag library to display MySQL rollup query with grouping and subtotals

I need to display several tables as HTML, using JSP, coming from MySQL GROUP BY a,b,c WITH ROLLUP queries. I'm looking for a good tag library to achieve this. I have found DisplayTag, but it'was last updated in 2008. And I would prefer using the subtotals calculated by MySQL, which seems to be tricky with DisplayTag.
MySQL does subtotals by adding extra rows to the resultset with the group field set to NULL.
Is there a better alternative? Printing the table is important, paging and sorting would be nice but I can live without them. No editing of any kind.
I wrote my own quick-and-dirty tag. Note that it expects the rollup data structure returned by MySQL, haven't tested it with anything else.
Usage example:
<xxx:rollupTable cssClass="data" data="${data}">
<xxx:rollupColumn title="Person" align="left" group="true" fieldName="personName" groupFieldName="personId" tooltipLink="person"/>
<xxx:rollupColumn title="City" align="left" group="true" fieldName="cityName" groupFieldName="cityId" tooltipLink="city"/>
<xxx:rollupColumn title="Price" align="right" format="#,##0.000" fieldName="price"/>
<xxx:rollupColumn title="Amount" align="right" format="#,##0" fieldName="amount"/>
</xxx:rollupTable>
The column tag does not much but adds the column definition to the table tag for later use.
package xxx.tags;
import...
public class RollupTableColumnTag extends SimpleTagSupport {
private String title;
private boolean group = false;
private boolean sum = false;
private String fieldName; // field name to output
private String groupFieldName; // field name to test for rollup level changes
private String align;
private String format;
private String tooltipLink;
private DecimalFormat formatter;
public void doTag() throws IOException, JspTagException {
RollupTableTag parent = (RollupTableTag)findAncestorWithClass(this, RollupTableTag.class);
if (parent == null) {
throw new JspTagException("Parent tag not found.");
}
parent.addColumnDefinition(this);
}
public void setFormat(String format) {
formatter = new DecimalFormat(format);
this.format = format;
}
public DecimalFormat getFormatter() {
return formatter;
}
// other getters and setters are standard, excluded
}
The table tag does the actual hard work:
package xxx.tags;
import ...
public class RollupTableTag extends BodyTagSupport {
protected String cssClass;
protected List<Map> data;
protected List<RollupTableColumnTag> columns;
protected List<Integer> groups;
public void setCssClass(String cssClass) {
this.cssClass = cssClass;
}
public void setData(List data) {
this.data = (List<Map>)data;
}
public int doStartTag() throws JspException {
columns = new ArrayList<RollupTableColumnTag>();
groups = new ArrayList<Integer>();
return EVAL_BODY_BUFFERED;
}
public int doEndTag() throws JspException {
try {
JspWriter writer = pageContext.getOut();
if (data.size() == 0) {
writer.println("<P>No data.</P>");
return EVAL_PAGE;
}
int nLevels = groups.size();
int nNormalRowCount = 0;
boolean[] bStartGroup = new boolean[nLevels];
String[] sSummaryTitle = new String[nLevels];
for (int i=0;i<nLevels;i++) {
bStartGroup[i] = true;
}
writer.println("<TABLE class=\"" + cssClass + "\">");
writer.println("<THEAD><TR>");
for (RollupTableColumnTag column : columns) {
writer.print("<TH");
if (column.getAlign() != null) {
writer.print(" align=\"" + column.getAlign() + "\"");
}
writer.print(">" + column.getTitle() + "</TH>");
}
writer.println("</TR></THEAD>");
writer.println("<TBODY>");
for (Map dataRow : data) {
StringBuffer out = new StringBuffer();
out.append("<TR>");
// grouping columns always come first
String cellClass = null;
for (int i=0;i<nLevels-1;i++) {
if (bStartGroup[i]) {
Object dataField = dataRow.get(columns.get(groups.get(i)).getFieldName());
sSummaryTitle[i] = dataField == null ? "" : dataField.toString();
}
}
int nLevelChanges = 0;
for (int i=0;i<nLevels;i++) {
if (dataRow.get( columns.get(groups.get(i)).getGroupFieldName() ) == null) {
if (i>0) {
bStartGroup[i-1] = true;
}
nLevelChanges++;
}
}
int nTotalLevel = nLevels - nLevelChanges;
if (nLevelChanges == nLevels) { // grand total row
cellClass = "grandtotal";
addCell(out, "Grand Total:", null, cellClass, nLevelChanges);
} else if (nLevelChanges > 0) { // other total row
boolean isOneLiner = (nNormalRowCount == 1);
nNormalRowCount = 0;
if (isOneLiner) continue; // skip one-line sums
cellClass = "total"+nTotalLevel;
for (int i=0;i<nLevels-nLevelChanges-1;i++) {
addCell(out," ",null,cellClass, 1);
}
addCell(out, sSummaryTitle[nLevels-nLevelChanges-1] + " total:", null, cellClass, nLevelChanges+1);
} else { // normal row
for (int i=0;i<nLevels;i++) {
if (bStartGroup[i]) {
RollupTableColumnTag column = columns.get(groups.get(i));
Object cellData = dataRow.get(column.getFieldName());
String displayVal = cellData != null ? cellData.toString() : "[n/a]";
if (column.getTooltipLink() != null && !column.getTooltipLink().isEmpty() && cellData != null) {
String tooltip = column.getTooltipLink();
int dataid = Integer.parseInt(dataRow.get(column.getGroupFieldName()).toString());
displayVal = "<div ajaxtooltip=\"" + tooltip + "\" ajaxtooltipid=\"" + dataid + "\">" + displayVal + "</div>";
}
addCell(out, displayVal, column.getAlign(), null, 1);
} else {
addCell(out," ", null, null, 1);
}
}
for (int i=0;i<nLevels-1;i++) {
bStartGroup[i] = false;
}
nNormalRowCount++;
}
// other columns
for (RollupTableColumnTag column : columns) {
if (!column.isGroup()) {
Object content = dataRow.get(column.getFieldName());
String displayVal = "";
if (content != null) {
if (column.getFormat() != null) {
float val = Float.parseFloat(content.toString());
displayVal = column.getFormatter().format(val);
} else {
displayVal = content.toString();
}
}
addCell(out,displayVal,column.getAlign(),cellClass,1);
}
}
out.append("</TR>");
// empty row for better readability
if (groups.size() > 2 && nLevelChanges == groups.size() - 1) {
out.append("<TR><TD colspan=\"" + columns.size() + "\"> </TD>");
}
writer.println(out);
}
writer.println("</TBODY>");
writer.println("</TABLE>");
} catch (IOException e) {
e.printStackTrace();
}
return EVAL_PAGE;
}
public void addCell(StringBuffer out, String content, String align, String cssClass, int colSpan) {
out.append("<TD");
if (align != null) {
out.append(" align=\"" + align + "\"");
}
if (cssClass != null) {
out.append(" class=\"" + cssClass + "\"");
}
if (colSpan > 1) {
out.append(" colspan=\"" + colSpan + "\"");
}
out.append(">");
out.append(content);
out.append("</TD>");
}
public void addColumnDefinition(RollupTableColumnTag cd) {
columns.add(cd);
if (cd.isGroup()) groups.add(columns.size()-1);
}
}

Processing OOP connecting to MySQL database

a friend and I are trying to write a program in Processing. The program needs to be able to connect to our MySQL database pull information at random and display it. we have gotten that much to work. with the following code
import de.bezier.data.sql.*;
MySQL dbconnection;
void setup()
{
size( 100, 100 );
String user = "username";
String pass = "password";
// name of the database to use
String database = "databasename";
// name of the table that will be created
//
String table = "tablename";
//
dbconnection = new MySQL( this, "ip", database, user, pass );
if ( dbconnection.connect() )
{
// now read it back out
//
dbconnection.query( "SELECT COUNT(id) FROM quiz_table" );
dbconnection.next();
int NumberOfRows = dbconnection.getInt(1);
float random = random(1, NumberOfRows);
int roundrandom = round(random);
println(" Row Number: " + roundrandom );
dbconnection.query( "SELECT * FROM quiz_table WHERE id =" + roundrandom);
while (dbconnection.next())
{
int n = dbconnection.getInt("id");
String a = dbconnection.getString("name");
String c = dbconnection.getString("charactor");
String m = dbconnection.getString("game");
int y = dbconnection.getInt("year");
String q= dbconnection.getString("quote");
println(n + " " + a + " " + c + " " + m + " " + y + " " + q);
}
}
else
{
// connection failed !
}
}
void draw()
{
// i know this is not really a visual sketch ...
}
this seems to work fine. however we plan to make the program preform many more tasks and to keep things more manageable we wanted to make somethings objects in this case i want to make an object that will connect to the database when its called. The following is what i have come up with but despite reworking several ways I can't quite get it to work.
import de.bezier.data.sql.*;
MySQL dbconnection;
connect1 myCon;
void setup()
{
size(300,300);
myCon = new connect1("username","password","database","table");
myCon.dbconnect();
}
void draw()
{
}
class connect1 {
String user;
String pass;
String data;
String table;
connect1(String tempuser, String temppass, String tempdata, String temptable) {
user = tempuser;
pass = temppass;
data = tempdata;
table = temptable;
}
void dbconnect(){
dbconnection = new MySQL( this, "ip", data, user, pass );
if ( dbconnection.connect() )
{
// now read it back out
dbconnection.query( "SELECT COUNT(id) FROM table" );
dbconnection.next();
int NumberOfRows = dbconnection.getInt(1);
float random = random(1, NumberOfRows);
int roundrandom = round(random);
println(" Row Number: " + roundrandom );
dbconnection.query( "SELECT * FROM table WHERE id =" + roundrandom);
while (dbconnection.next())
{
int n = dbconnection.getInt("id");
String a = dbconnection.getString("name");
String c = dbconnection.getString("charactor");
String m = dbconnection.getString("game");
int y = dbconnection.getInt("year");
String q= dbconnection.getString("quote");
println(n + " " + a + " " + c + " " + m + " " + y + " " + q);
}
}
else
{
println("fail");
}
}
//end of class
}
Sorry if that is at all hard to understand
The constructor of MySQL expects a PApplet as the first argument. When you call new MySQL(this inside your object, this does no longer refer to the main PApplet as it did in your first program.
The simplest way to fix this might be:
myCon.dbconnect(this); // send the PApplet as argument
...
void dbconnect(PApplet parent) {
dbconnection = new MySQL( parent, "ip", data, user, pass );
...
Another option would be to pass the PApplet to the constructor of your object, storing it in a property, and using that property when calling new MySQL.