org.json.JSONException how can i clear this using volley library - json

Pls if any one know answer this question.
I have a error of JSONArray can't be converted in to JSONObject..
and I am new to using this volley library
JsonArrayRequest req = new JsonArrayRequest(URL,
new Response.Listener<JSONArray>() {
#Override
public void onResponse(JSONArray response) {
Log.d(TAG, response.toString());
try {
// Parsing json array response
// loop through each json object
jsonResponse = "";
for (int i = 0; i < response.length(); i++) {
JSONArray product = (JSONArray) response
.get(i);
String productId = product.getString(Integer.parseInt("productId"));
String productName =product.getString(Integer.parseInt("productName"));
String productPrice = product.getString(Integer.parseInt("productPrice"));
String productImage =product.getString(Integer.parseInt("productImage"));
jsonResponse += "productId: " + productId + "\n\n";
jsonResponse += "productName: " + productName + "\n\n";
jsonResponse += "productPrice: " + productPrice + "\n\n";
jsonResponse += "productImage: " + productImage + "\n\n\n";
}

The response you are getting is a string and you are trying to assing it to jsonArray

Related

How to fast bulk insert from json data in sqlite in android

I have written this code to insert json data in to sqlite in android its working fine but i have 50000+ row to insert so its taking so much time to insert into sqlite database. So how can I insert this data in fastest way please kindly give me the code I am very new in android. thank in advance.
Below i have written my code to insert data
private void insertItemDetails() {
final ProgressDialog loading = ProgressDialog .show(this,"Updating Data From Tally","Please wait");
StringRequest stringRequest=new StringRequest(Request.Method.GET, url,
new Response.Listener<String>() {
#Override
public void onResponse(String response) {
try {
loading.show();
itemDatabaseCon.open();
itemDatabaseCon.delete();
itemDatabaseCon.close();
itemDatabaseCon.open();
itemDatabaseCon.createTable();
int a=response.length();
// boolean b=a.equalsIgnoreCase("no");
Log.d("value", String.valueOf(a));
if (a==2) {
Log.d("inside item if loop ",response);
}
else {
JSONObject jsonObject = new JSONObject(response);
JSONArray array = jsonObject.getJSONArray("posts");
for (int i = 0; i < array.length(); i++) {
JSONObject ob = array.getJSONObject(i);
String stockid = ob.getString("stockid");
String itemname = ob.getString("itemname");
String group = ob.getString("group");
String baseunit = ob.getString("baseunit");
String alternateunit = ob.getString("alternateunit");
String gst = ob.getString("gst");
String hsn = ob.getString("hsn");
String mrp = ob.getString("mrp");
String sdtsellrate = ob.getString("sdtsellrate");
String closingstock = ob.getString("closingstock");
ContentValues contentValues = new ContentValues();
contentValues.put(Constant2.key_itemstockid, stockid);
contentValues.put(Constant2.key_itemname, itemname);
contentValues.put(Constant2.key_itemgroup, group);
contentValues.put(Constant2.key_itembaseunit, baseunit);
contentValues.put(Constant2.key_itemalternateunit, alternateunit);
contentValues.put(Constant2.key_itemgst, gst);
contentValues.put(Constant2.key_itemhsn, hsn);
contentValues.put(Constant2.key_itemmrp, mrp);
contentValues.put(Constant2.key_itemsdtsellrate, sdtsellrate);
contentValues.put(Constant2.key_itemclosingstock, closingstock);
itemDatabaseCon.insert(Constant2.Table_name, contentValues);
}
}
loading.dismiss();
} catch (JSONException e) {
e.printStackTrace();
}
}
}, new Response.ErrorListener() {
#Override
public void onErrorResponse(VolleyError error) {
Log.d("got api error ffff" , error.getMessage());
}
});
RequestQueue requestQueue= Volley.newRequestQueue(this);
requestQueue.add(stringRequest);
}
Here is my database controller code.
public class ItemDatabaseCon {
String TAG = "DBAdapter";
private SQLiteDatabase db;
private ItemDatabaseCon.DBHelper dbHelper;
public ItemDatabaseCon (Context context) {
dbHelper = new ItemDatabaseCon.DBHelper(context);
}
public void open() {
if (null == db || !db.isOpen()) {
try {
db = dbHelper.getWritableDatabase();
} catch (SQLiteException sqLiteException) {
}
}
}
public void close() {
if (db != null) {
db.close();
}
}
public int insert(String table, ContentValues values) {
try {
db = dbHelper.getWritableDatabase();
int y = (int) db.insert(table, null, values);
db.close();
Log.e("Data Inserted", "Item Data Inserted");
Log.e("number of row", y + "");
return y;
} catch (Exception ex) {
Log.e("Error Insert", ex.getMessage().toString());
return 0;
}
}
public void delete() {
db.execSQL("DROP TABLE IF EXISTS " + Constant2.Table_name);
}
public int getCount()
{
db = dbHelper.getWritableDatabase();
String qry="SELECT * FROM "+Constant2.Table_name;
Cursor cursor=db.rawQuery(qry,null);
return cursor.getCount();
}
public void createTable()
{
String create_sql = "CREATE TABLE IF NOT EXISTS " + Constant2.Table_name + "("
+ Constant2.key_id + " INTEGER PRIMARY KEY AUTOINCREMENT,"
+ Constant2.key_itemstockid + " TEXT ," + Constant2.key_itemname + " TEXT ," + Constant2.key_itemgroup + " TEXT ,"
+ Constant2.key_itembaseunit + " TEXT ,"+ Constant2.key_itemalternateunit + " TEXT ,"+ Constant2.key_itemgst + " TEXT ,"
+ Constant2.key_itemhsn + " TEXT ,"+ Constant2.key_itemmrp + " TEXT ,"+ Constant2.key_itemsdtsellrate + " TEXT ,"
+ Constant2.key_itemclosingstock + " TEXT " + ")";
db.execSQL(create_sql);
}
public Cursor getAllRow(String table) {
return db.query(table, null, null, null, null, null, Constant2.key_id);
}
private class DBHelper extends SQLiteOpenHelper {
public DBHelper(Context context) {
super(context, Constant2.DB_Name, null, Constant2.Db_Version);
}
#Override
public void onCreate(SQLiteDatabase db) {
String create_sql = "CREATE TABLE IF NOT EXISTS " + Constant2.Table_name + "("
+ Constant2.key_id + " INTEGER PRIMARY KEY AUTOINCREMENT,"
+ Constant2.key_itemstockid + " TEXT ," + Constant2.key_itemname + " TEXT ," + Constant2.key_itemgroup + " TEXT ,"
+ Constant2.key_itembaseunit + " TEXT ,"+ Constant2.key_itemalternateunit + " TEXT ,"+ Constant2.key_itemgst + " TEXT ,"
+ Constant2.key_itemhsn + " TEXT ,"+ Constant2.key_itemmrp + " TEXT ,"+ Constant2.key_itemsdtsellrate + " TEXT ,"
+ Constant2.key_itemclosingstock + " TEXT " + ")";
db.execSQL(create_sql);
}
#Override
public void onUpgrade(SQLiteDatabase db, int oldVersion, int newVersion) {
db.execSQL("DROP TABLE IF EXISTS " + Constant2.Table_name);
}
}
}
You could do the inserts inside a single SQLite transaction. This would significantly reduce the disk writes from 50000+ to very few.
That is before the loops starts begin a transaction using the SQLiteDatabase's beginTransaction() method.
After the loop has completed (all rows have been inserted) successfully use the setTransactionSuccessful() method followed by the endTransactionMethod()
Note if you do not setTransactionSuccessful then the changes would be rolled back (so if you encounter an issue/error and want the changes (inserts) to not be applied use appropriate logic so that the setTransactionSuccessful is skipped but that the endTransaction is run)
e.g. The following might be suitable:-
....
else {
itemDatabaseCon.beginTransaction(); //<<<<<<<<<< ADDDED start the transaction
JSONObject jsonObject = new JSONObject(response);
JSONArray array = jsonObject.getJSONArray("posts");
for (int i = 0; i < array.length(); i++) {
JSONObject ob = array.getJSONObject(i);
String stockid = ob.getString("stockid");
String itemname = ob.getString("itemname");
String group = ob.getString("group");
String baseunit = ob.getString("baseunit");
String alternateunit = ob.getString("alternateunit");
String gst = ob.getString("gst");
String hsn = ob.getString("hsn");
String mrp = ob.getString("mrp");
String sdtsellrate = ob.getString("sdtsellrate");
String closingstock = ob.getString("closingstock");
ContentValues contentValues = new ContentValues();
contentValues.put(Constant2.key_itemstockid, stockid);
contentValues.put(Constant2.key_itemname, itemname);
contentValues.put(Constant2.key_itemgroup, group);
contentValues.put(Constant2.key_itembaseunit, baseunit);
contentValues.put(Constant2.key_itemalternateunit, alternateunit);
contentValues.put(Constant2.key_itemgst, gst);
contentValues.put(Constant2.key_itemhsn, hsn);
contentValues.put(Constant2.key_itemmrp, mrp);
contentValues.put(Constant2.key_itemsdtsellrate, sdtsellrate);
contentValues.put(Constant2.key_itemclosingstock, closingstock);
itemDatabaseCon.insert(Constant2.Table_name, contentValues);
}
itemDatabaseCon.setTransactionSuccessful(); //<<<<<<<<<< ADDED indicate that changes (inserts) are all good
itemDatabaseCon.endTransaction(); //<<<<<<<<<< ADDED end the transaction
}
loading.dismiss();
....
//<<<<<<<<<< indicates the changed/added code
Edit
However, considering the insert method the above will have no affect as you are closing the database after an insert. Closing the database and then re-opening it is very costly resource wise.
As such to benefit from running all the inserts in a single transaction you could use :-
public int insert(String table, ContentValues values) {
try {
db = dbHelper.getWritableDatabase();
int y = (int) db.insert(table, null, values);
//db.close(); //<<<<<<<<<< Commented out so as to not close the database
Log.e("Data Inserted", "Item Data Inserted");
Log.e("number of row", y + "");
return y;
} catch (Exception ex) {
Log.e("Error Insert", ex.getMessage().toString());
return 0;
}
}

Read arbitrarily json data to a javafx treeview,and only show the first element of any array in it

I need to show a json file on a javafx treeview,the structure of the json is unknown.Like the web site: json viewer site
I show the tree for user to select path of a value(like xpath of xml),so if the json is too big,I only need to show the first element of any array in json.
for example,the original data is:
{
name:"tom",
schools:[
{
name:"school1",
tags:["maths","english"]
},
{
name:"school2",
tags:["english","biological"]
},
]
}
I want to show:
again:the structure of json is unknown,it is just one example.
There's no other option than recursively handling the json and create the TreeItem structure based on the element info.
(There's probably a better way of adding the symbols, but I didn't find appropriate icons.)
private static final String INPUT = "{\n"
+ " name:\"tom\",\n"
+ " schools:[\n"
+ " {\n"
+ " name:\"school1\",\n"
+ " tags:[\"maths\",\"english\"]\n"
+ " },\n"
+ " {\n"
+ " name:\"school2\",\n"
+ " tags:[\"english\",\"biological\"]\n"
+ " },\n"
+ " ]\n"
+ "}";
private static final Image JSON_IMAGE = new Image("https://i.stack.imgur.com/1slrh.png");
private static void prependString(TreeItem<Value> item, String string) {
String val = item.getValue().text;
item.getValue().text = (val == null
? string
: string + " : " + val);
}
private enum Type {
OBJECT(new Rectangle2D(45, 52, 16, 18)),
ARRAY(new Rectangle2D(61, 88, 16, 18)),
PROPERTY(new Rectangle2D(31, 13, 16, 18));
private final Rectangle2D viewport;
private Type(Rectangle2D viewport) {
this.viewport = viewport;
}
}
private static final class Value {
private String text;
private final Type type;
public Value(Type type) {
this.type = type;
}
public Value(String text, Type type) {
this.text = text;
this.type = type;
}
}
private static TreeItem<Value> createTree(JsonElement element) {
if (element.isJsonNull()) {
return new TreeItem<>(new Value("null", Type.PROPERTY));
} else if (element.isJsonPrimitive()) {
JsonPrimitive primitive = element.getAsJsonPrimitive();
return new TreeItem<>(new Value(primitive.isString()
? '"' + primitive.getAsString() + '"'
: primitive.getAsString(), Type.PROPERTY));
} else if (element.isJsonArray()) {
JsonArray array = element.getAsJsonArray();
TreeItem<Value> item = new TreeItem<>(new Value(Type.ARRAY));
// for (int i = 0, max = Math.min(1, array.size()); i < max; i++) {
for (int i = 0, max = array.size(); i < max; i++) {
TreeItem<Value> child = createTree(array.get(i));
prependString(child, Integer.toString(i));
item.getChildren().add(child);
}
return item;
} else {
JsonObject object = element.getAsJsonObject();
TreeItem<Value> item = new TreeItem<>(new Value(Type.OBJECT));
for (Map.Entry<String, JsonElement> property : object.entrySet()) {
TreeItem<Value> child = createTree(property.getValue());
prependString(child, property.getKey());
item.getChildren().add(child);
}
return item;
}
}
#Override
public void start(Stage primaryStage) {
JsonParser parser = new JsonParser();
JsonElement root = parser.parse(INPUT);
TreeItem<Value> treeRoot = createTree(root);
TreeView<Value> treeView = new TreeView<>(treeRoot);
treeView.setCellFactory(tv -> new TreeCell<Value>() {
private final ImageView imageView;
{
imageView = new ImageView(JSON_IMAGE);
imageView.setFitHeight(18);
imageView.setFitWidth(16);
imageView.setPreserveRatio(true);
setGraphic(imageView);
}
#Override
protected void updateItem(Value item, boolean empty) {
super.updateItem(item, empty);
if (empty || item == null) {
setText("");
imageView.setVisible(false);
} else {
setText(item.text);
imageView.setVisible(true);
imageView.setViewport(item.type.viewport);
}
}
});
final Scene scene = new Scene(treeView);
primaryStage.setScene(scene);
primaryStage.show();
}

get Direction and get path android studio

i am trying to choose form the autocomplete a place and drew a path to it and when i pick a place from the autocomplete the app crashes.
please see the
--------- beginning of crash
E/AndroidRuntime: FATAL EXCEPTION: main
Process: com.saoutimohamed.tewsila, PID: 5924
java.lang.IllegalStateException: no included points
at com.google.android.gms.common.internal.Preconditions.checkState(Unknown
Source:8)
at com.google.android.gms.maps.model.LatLngBounds$Builder.build(Unknown
Source:10)
at com.saoutimohamed.tewsila.WelcomeDriver$4.onResponse(WelcomeDriver.java:271)
at retrofit2.ExecutorCallAdapterFactory$ExecutorCallbackCall$1$1.run(ExecutorCallAdapterFactory.java:70)
at android.os.Handler.handleCallback(Handler.java:789)
at android.os.Handler.dispatchMessage(Handler.java:98)
at android.os.Looper.loop(Looper.java:164)
at android.app.ActivityThread.main(ActivityThread.java:6938)
at java.lang.reflect.Method.invoke(Native Method)
at com.android.internal.os.Zygote$MethodAndArgsCaller.run(Zygote.java:327)
at com.android.internal.os.ZygoteInit.main(ZygoteInit.java:1374)
Application terminated.
logcat and tall me what is wrong with my code
and this is the code
private void getDirection() {
String requestApi;
try {
requestApi = "https://maps.googleapis.com/maps/api/directions/json?" +
"mode=driving&" +
"transit_routing_preference=less_driving&" +
"origin=" + Common.mLastLocation.getLatitude() + "," + Common.mLastLocation.getLongitude() + "&" +
"destination=" + lat+","+lng + "&" +
"key=" + getResources().getString(R.string.google_direction_api);
Log.d("SAOUTI", requestApi);
mService.getPath(requestApi)
.enqueue(new Callback<String>() {
#Override
public void onResponse(Call<String> call, Response<String> response) {
try {
JSONObject jsonObject = new JSONObject(response.body().toString());
JSONArray jsonArray = jsonObject.getJSONArray("routes");
for (int i = 0; i < jsonArray.length(); i++) {
JSONObject route = jsonArray.getJSONObject(i);
JSONObject poly = route.getJSONObject("overview_polyline");
String polyline = poly.getString("points");
polyLineList = decodePoly(polyline);
}
LatLngBounds.Builder builder = new LatLngBounds.Builder();
for (LatLng latLng : polyLineList)
builder.include(latLng);
LatLngBounds bounds = builder.build();
CameraUpdate mCameraUpdate = CameraUpdateFactory.newLatLngBounds(bounds, 5);
mMap.animateCamera(mCameraUpdate);
polylineOptions = new PolylineOptions();
polylineOptions.color(Color.GRAY);
polylineOptions.width(5);
polylineOptions.startCap(new SquareCap());
polylineOptions.endCap(new SquareCap());
polylineOptions.jointType(JointType.ROUND);
polylineOptions.addAll(polyLineList);
greyPolyline = mMap.addPolyline(polylineOptions);
blackPolylineOptions = new PolylineOptions();
blackPolylineOptions.color(Color.BLACK);
blackPolylineOptions.width(5);
blackPolylineOptions.startCap(new SquareCap());
blackPolylineOptions.endCap(new SquareCap());
blackPolylineOptions.jointType(JointType.ROUND);
blackPolyline = mMap.addPolyline(blackPolylineOptions);
mMap.addMarker(new MarkerOptions()
.position(polyLineList.get(polyLineList.size() - 1))
.title("Pickup Location"));
ValueAnimator polyLineAnimator = ValueAnimator.ofInt(0, 100);
polyLineAnimator.setDuration(2000);
polyLineAnimator.setInterpolator(new LinearInterpolator());
polyLineAnimator.addUpdateListener(new ValueAnimator.AnimatorUpdateListener() {
#Override
public void onAnimationUpdate(ValueAnimator valueAnimator) {
List<LatLng> points = greyPolyline.getPoints();
int percentValue = (int) valueAnimator.getAnimatedValue();
int size = points.size();
int newPoints = (int) (size * (percentValue / 100.0f));
List<LatLng> p = points.subList(0, newPoints);
blackPolyline.setPoints(p);
}
});
polyLineAnimator.start();
carMarker = mMap.addMarker(new MarkerOptions().position(currentPosition)
.flat(true)
.icon(BitmapDescriptorFactory.fromResource(R.mipmap.tewsila_car)));
handler = new Handler();
} catch (JSONException e) {
e.printStackTrace();
index = -1;
next = 1;
handler.postDelayed(drawPathRunnable, 3000);
}
}
#Override
public void onFailure(Call<String> call, Throwable t) {
Toast.makeText(WelcomeDriver.this, "" + t.getMessage(), Toast.LENGTH_SHORT).show();
}
});
} catch (Exception e) {
e.printStackTrace();
}
}
the line that not responding in the logcat is (LatLngBounds bounds = builder.build();)
I had this same issue and i removed
"+getResources().getString(R.string.google_direction_api)" part from the requestApi. Perfectlr worked for me. But i don't know the reason.
requestApi = "https://maps.googleapis.com/maps/api/directions/json?" +
"mode=driving&" +
"transit_routing_preference=less_driving&" +
"origin=" + Common.mLastLocation.getLatitude() + "," + Common.mLastLocation.getLongitude() + "&" +
"destination=" + lat+","+lng + "&" +
"key=";

Facebook deserializing

I need help deserializing the JSON i get back from facebook.
I've been trying numerous ways to parse it but no success.
The only thing i seem to be parsing is the number of friends who have highscores, which is 2 :
The issue comes when I try to parse the name and score of the people in the json.
InvalidCastException: Cannot cast from source type to destination type.
I/Unity (21869): at FacebookScript.GETCallback (IGraphResult result) [0x00000] in <filename unknown>:0
I/Unity (21869): at Facebook.Unity.AsyncRequestString+<Start>c__Iterator1.MoveNext () [0x00000] in <filename unknown>:0
The raw result which I recieve (seen from logcat):
Raw:{"data":[{"score":60,"user":{"name":"JOHNY JOHN","id":"0000000000000"}},{"score":50,"user":{"name":"JOHN JOHN","id":"0000000000000"}}]}
Here is my code:
public void GETCallback(IGraphResult result)
{
if (result.ResultDictionary != null)
{
Debug.Log("Raw:" + result.RawResult);
var dict = Json.Deserialize(result.RawResult) as Dictionary<string, object>;
var friendList = new List<object>();
friendList = (List<object>)(dict["data"]);
int _friendCount = friendList.Count;
Debug.Log("Items found:" + _friendCount);
List<string> friendIDsFromFB = new List<string>();
/*for (int i = 0; i < _friendCount; i++) // Tried this, same error.
{
foreach(KeyValuePair<string, object> entry in friendList)
{
Debug.Log(entry.Key + "|" + entry.Value);
}
string friendFBID = getDataValueForKey((Dictionary<string, object>)(friendList[i]), "id");
string friendName = getDataValueForKey((Dictionary<string, object>)(friendList[i]), "name");
Debug.Log(i + "/" + _friendCount + "|" + friendFBID +"|"+ friendName);
NPBinding.UI.ShowToast(i + "/" + _friendCount + "|" + friendFBID + "|" + friendName, VoxelBusters.NativePlugins.eToastMessageLength.LONG);
//friendIDsFromFB.Add(friendFBID);
}*/
foreach(KeyValuePair<string, object> entry in friendList) // Tried this, same error.
{
Debug.Log(entry.Key + "|" + entry.Value);
}
}
else
{
NPBinding.UI.ShowToast("result.ResultDictionary is null", VoxelBusters.NativePlugins.eToastMessageLength.LONG);
}
}
private string getDataValueForKey(Dictionary<string, object> dict, string key)
{
object objectForKey;
if (dict.TryGetValue(key, out objectForKey))
{
return (string)objectForKey;
}
else {
return "";
}
}
I'm assuming that you're using MiniJSON (at least the version that used to come with the FB SDK)
N.B. Not tested for typos. Typing straight here in SO
var dict = Json.Deserialize(result.RawResult) as Dictionary<string, object>;
var datas = (List<object>)dict["data"];
foreach(var iterator in datas) {
var data = iterator as Dictionary<string, object>;
Debug.Log("Score is :: "+data["score"]);
//var score = int.Parse((string)data["score"]); //Parse to int after casting to string if you want the value
var userData = data["user"] as Dictionary<string, object>;
Debug.Log("Name is :: "+userData["name"]);
Debug.Log("ID is :: "+userData["id"]);
//var name = (string)userData["name"]; //Get the name
//var id = (string)userData["id"]; //...and the ID
}

Why am I getting negative compressions, for Gzip, Snappy, Smile?

I was trying to investigate what compression was suitable for my application for compression JSON string. The aim here is to compression entity JSON before persisting to REDIS.
Here are my results
Gzip Compression Trial
compression percent : -8.7719345 %
to json time : 151 microseconds
pure saveable compression : 3326 microseconds
gzip compression+convert to json time : 3477 microseconds
gzip de-compression to string time : 537 microseconds
Snappy Compression Trial
compression percent : -22.807014 %
to json time : 58 microseconds
pure saveable compression : 259490 microseconds
snappy compression+convert to json time : 259549 microseconds
snappy de-compression to string time : 84 microseconds
Smile (msgpack) Compression Trial
compression percent : -24.561401 %
smile compression time : 3314 microseconds
smile de-compression time : n/a
However what is quite Odd is Snappy is supposed to work much faster (from what i read) only the decompression is fast, but the compression takes longer.
Also strangely smile is producing a longer persistable string
Can anybody point out why or what I am doing wrong here?
Here is my code for this trial
import com.fasterxml.jackson.core.JsonProcessingException;
import com.fasterxml.jackson.databind.ObjectMapper;
import com.fasterxml.jackson.dataformat.smile.SmileFactory;
import com.fasterxml.jackson.dataformat.smile.SmileGenerator;
import com.fasterxml.jackson.dataformat.smile.SmileParser;
import org.xerial.snappy.Snappy;
import javax.xml.bind.DatatypeConverter;
import java.io.*;
import java.util.concurrent.TimeUnit;
import java.util.zip.GZIPInputStream;
import java.util.zip.GZIPOutputStream;
public class CompressionTrials {
public static void main(String[] args) {
jsonCompressionTrial();
}
public static void jsonCompressionTrial(){
SimpleDto originalDto = new SimpleDto();
originalDto.setFname("MyFirstName");
originalDto.setLname("MyLastName");
originalDto.setDescription("This is a long description. I am trying out compression options for JSON. Hopefully the results will help me decide on one approach");
originalDto.setCity("MyCity");
originalDto.setAge(36);
originalDto.setZip(2424);
gzipCompressionTrial(originalDto);
snappyCompressionTrial(originalDto);
smileCompressionTrial(originalDto);
}
public static void gzipCompressionTrial(SimpleDto simpleDto){
if(simpleDto == null){
return;
}
ObjectMapper mapper = new ObjectMapper();
String originalJsonString = null;
long compressionAndConversionMicroSeconds = 0;
long toJsonMicroSeconds = 0;
long compressionMicroSeconds = 0;
long decompressionMicroSeconds = 0;
SimpleDto restoredDto = null;
String restoredDtoJson = null;
try {
mapper.writeValueAsString(simpleDto);
long endConversionTime = 0;
long startTimeCompressionAndConvesion = System.nanoTime();
originalJsonString = mapper.writeValueAsString(simpleDto);
endConversionTime = System.nanoTime();
byte[] compressedBytes = gzipCompress(originalJsonString);
String compressedStringToSave = bytesToStringBase64(compressedBytes);
long endTimeCompression = System.nanoTime();
long startCompressionTime = endConversionTime;
toJsonMicroSeconds = TimeUnit.NANOSECONDS.toMicros((endConversionTime-startTimeCompressionAndConvesion));
compressionMicroSeconds = TimeUnit.NANOSECONDS.toMicros((endTimeCompression-startCompressionTime));
compressionAndConversionMicroSeconds = TimeUnit.NANOSECONDS.toMicros((endTimeCompression-startTimeCompressionAndConvesion));
long startTimeDecompression = System.nanoTime();
String unCompressedString = gzipDecompress(compressedBytes);
long endTimeDecompression = System.nanoTime();
decompressionMicroSeconds = TimeUnit.NANOSECONDS.toMicros(endTimeDecompression-startTimeDecompression); // TimeUnit.MILLISECONDS.convert((endTimeDecompression - startTimeDecompression), TimeUnit.NANOSECONDS);
int originalLength = originalJsonString.toString().length();
int compressedLength = compressedStringToSave.toString().length();
float compressionPercent = 100 - (( (float)compressedLength / (float)originalLength ) * 100);
restoredDto = mapper.readValue(originalJsonString, SimpleDto.class);
restoredDtoJson = mapper.writeValueAsString(restoredDto);
System.out.println("============================================================================================== ");
System.out.println(" Gzip Compression Trial");
System.out.println("----------------------------------------------------------------------------------------------");
// System.out.println("origin dto as json : " + originalJsonString );
// System.out.println( "original dto-json string length : " + originalLength);
// System.out.println( "compressed string length : " + compressedLength );
// System.out.println( "uncompressed json string : " + unCompressedString );
// System.out.println( " restored dto as json : " + restoredDtoJson );
// System.out.println( " is before-compressed = uncompressed : " + unCompressedString.equals(originalJsonString) );
// System.out.println( " is restored object json = original object json : " + originalJsonString.equals(restoredDtoJson) );
// System.out.println("----------------------------------------------------------------------------------------------");
System.out.println("compression percent : " + compressionPercent + " %" );
System.out.println("to json time : " + toJsonMicroSeconds + " microseconds" );
System.out.println(" pure saveable compression : " + compressionMicroSeconds + " microseconds" );
System.out.println("gzip compression+convert to json time : " + compressionAndConversionMicroSeconds + " microseconds" );
System.out.println("gzip de-compression to string time : " + decompressionMicroSeconds + " microseconds" );
System.out.println("============================================================================================== ");
} catch (IOException e) {
e.printStackTrace();
} catch (Exception e) {
e.printStackTrace();
}
}
public static void smileCompressionTrial(SimpleDto simpleDto){
if(simpleDto == null){
return;
}
ObjectMapper mapper = new ObjectMapper();
ObjectMapper smileMapper = getSmileObjectMapper();
String originalJsonString = null;
try {
originalJsonString = mapper.writeValueAsString(simpleDto);
} catch (JsonProcessingException e) {
e.printStackTrace();
return;
}
long compressionMicroSeconds = 0;
long decompressionMicroSeconds = 0;
SimpleDto restoredDto = null;
String restoredDtoJson = null;
try {
mapper.writeValueAsString(simpleDto);
long startTimeCompression = System.nanoTime();
byte[] compressedBytes = smileMapper.writeValueAsBytes(simpleDto);
//String compressedStringToSave = new String(compressedBytes, "UTF-8");// bytesToStringBase64(compressedBytes);
String compressedStringToSave = bytesToStringBase64(compressedBytes);
// System.out.println("smile compressed : " + compressedStringToSave);
// System.out.println("original length : " + originalJsonString.length() );
// System.out.println("length : " + compressedStringToSave.length() );
long endTimeCompression = System.nanoTime();
compressionMicroSeconds = TimeUnit.NANOSECONDS.toMicros((endTimeCompression-startTimeCompression)); //TimeUnit.MILLISECONDS.convert((endTimeCompression - startTimeCompression), TimeUnit.NANOSECONDS);
// long startTimeDecompression = System.nanoTime();
// String unCompressedString = gzipDecompress(compressedBytes);
// long endTimeDecompression = System.nanoTime();
// decompressionMicroSeconds = TimeUnit.NANOSECONDS.toMicros(endTimeDecompression-startTimeDecompression); // TimeUnit.MILLISECONDS.convert((endTimeDecompression - startTimeDecompression), TimeUnit.NANOSECONDS);
int originalLength = originalJsonString.toString().length();
int compressedLength = compressedStringToSave.toString().length();
float compressionPercent = 100 - (( (float)compressedLength / (float)originalLength ) * 100);
restoredDto = smileMapper.readValue( stringToBytesBase64(compressedStringToSave) , SimpleDto.class);
//restoredDto = smileMapper.readValue( compressedStringToSave.getBytes("UTF-8") , SimpleDto.class);
restoredDtoJson = mapper.writeValueAsString(restoredDto);
System.out.println("============================================================================================== ");
System.out.println(" Smile Compression Trial");
System.out.println("----------------------------------------------------------------------------------------------");
// System.out.println("origin dto as json : " + originalJsonString );
// System.out.println( "original dto-json string length : " + originalLength);
// System.out.println( "compressed string length : " + compressedLength );
// System.out.println( "uncompressed json string : n/a" /*+ unCompressedString*/ );
// System.out.println( " restored dto as json : " + restoredDtoJson );
// System.out.println( " is before-compressed = uncompressed : n/a " /*+ unCompressedString.equals(originalJsonString)*/ );
// System.out.println( " is restored object json = original object json : " + originalJsonString.equals(restoredDtoJson) );
// System.out.println("----------------------------------------------------------------------------------------------");
System.out.println("compression percent : " + compressionPercent + " %" );
System.out.println("smile compression time : " + compressionMicroSeconds + " microseconds" );
System.out.println("smile de-compression time : n/a " /*+ decompressionMicroSeconds + " microseconds"*/ );
System.out.println("============================================================================================== ");
} catch (IOException e) {
e.printStackTrace();
} catch (Exception e) {
e.printStackTrace();
}
}
public static void snappyCompressionTrial(SimpleDto simpleDto) {
if (simpleDto == null) {
return;
}
ObjectMapper mapper = new ObjectMapper();
String originalJsonString = null;
long compressionAndConversionMicroSeconds = 0;
long toJsonMicroSeconds = 0;
long compressionMicroSeconds = 0;
long decompressionMicroSeconds = 0;
SimpleDto restoredDto = null;
String restoredDtoJson = null;
try {
mapper.writeValueAsString(simpleDto);
long endConversionTime = 0;
long startTimeCompressionAndConvesion = System.nanoTime();
originalJsonString = mapper.writeValueAsString(simpleDto);
endConversionTime = System.nanoTime();
byte[] compressedBytes = snappyCompress(originalJsonString);
String compressedStringToSave = bytesToStringBase64(compressedBytes);
long endTimeCompression = System.nanoTime();
long startCompressionTime = endConversionTime;
toJsonMicroSeconds = TimeUnit.NANOSECONDS.toMicros((endConversionTime - startTimeCompressionAndConvesion));
compressionMicroSeconds = TimeUnit.NANOSECONDS.toMicros((endTimeCompression - startCompressionTime));
compressionAndConversionMicroSeconds = TimeUnit.NANOSECONDS.toMicros((endTimeCompression - startTimeCompressionAndConvesion));
long startTimeDecompression = System.nanoTime();
byte[] unCompressedBytes = snappyUncompress(compressedBytes);
String unCompressedString = bytesToStringUtf8(unCompressedBytes);
long endTimeDecompression = System.nanoTime();
decompressionMicroSeconds = TimeUnit.NANOSECONDS.toMicros(endTimeDecompression - startTimeDecompression);
int originalLength = originalJsonString.toString().length();
int compressedLength = compressedStringToSave.toString().length();
float compressionPercent = 100 - (((float) compressedLength / (float) originalLength) * 100);
//restoredDto = mapper.readValue(originalJsonString, SimpleDto.class);
restoredDto = mapper.readValue(unCompressedBytes, SimpleDto.class);
restoredDtoJson = mapper.writeValueAsString(restoredDto);
System.out.println("============================================================================================== ");
System.out.println(" Snappy Compression Trial");
System.out.println("----------------------------------------------------------------------------------------------");
// System.out.println("origin dto as json : " + originalJsonString );
// System.out.println( "original dto-json string length : " + originalLength);
// System.out.println( "compressed string length : " + compressedLength );
// System.out.println( "uncompressed json string : " + unCompressedString );
// System.out.println( " restored dto as json : " + restoredDtoJson );
// System.out.println( " is before-compressed = uncompressed : " + unCompressedString.equals(originalJsonString) );
// System.out.println( " is restored object json = original object json : " + originalJsonString.equals(restoredDtoJson) );
// System.out.println("----------------------------------------------------------------------------------------------");
System.out.println("compression percent : " + compressionPercent + " %");
System.out.println("to json time : " + toJsonMicroSeconds + " microseconds");
System.out.println(" pure saveable compression : " + compressionMicroSeconds + " microseconds");
System.out.println("gzip compression+convert to json time : " + compressionAndConversionMicroSeconds + " microseconds");
System.out.println("gzip de-compression to string time : " + decompressionMicroSeconds + " microseconds");
System.out.println("============================================================================================== ");
} catch (IOException e) {
e.printStackTrace();
} catch (Exception e) {
e.printStackTrace();
}
}
private static ObjectMapper getSmileObjectMapper() {
SmileFactory smileFactory = new SmileFactory();
smileFactory.configure(SmileGenerator.Feature.CHECK_SHARED_NAMES,true);
smileFactory.configure(SmileGenerator.Feature.CHECK_SHARED_STRING_VALUES,true);
smileFactory.configure(SmileGenerator.Feature.ENCODE_BINARY_AS_7BIT,true);
smileFactory.configure(SmileGenerator.Feature.WRITE_HEADER,true);
smileFactory.configure(SmileGenerator.Feature.WRITE_END_MARKER,false);
smileFactory.configure(SmileParser.Feature.REQUIRE_HEADER,false);
return new ObjectMapper(smileFactory);
}
public static byte[] gzipCompress(String str) throws IOException {
if (str == null || str.length() == 0) {
return null;
}
ByteArrayOutputStream out = new ByteArrayOutputStream();
GZIPOutputStream gzip = new GZIPOutputStream(out);
gzip.write(str.getBytes());
gzip.close();
return out.toByteArray();
// String outStr = out.toString("UTF-8");
// return outStr;
}
public static String gzipDecompress(byte[] bytes) throws Exception {
if (bytes == null || bytes.length == 0) {
return null;
}
GZIPInputStream gis = new GZIPInputStream(new ByteArrayInputStream(bytes));
BufferedReader bf = new BufferedReader(new InputStreamReader(gis, "UTF-8"));
String outStr = "";
String line;
while ((line=bf.readLine())!=null) {
outStr += line;
}
return outStr;
}
public static byte[] snappyCompress(String stringData) throws IOException {
return Snappy.compress(stringData);
}
public static byte[] snappyUncompress(byte[] bytes) throws IOException {
return Snappy.uncompress(bytes);
}
private static String bytesToStringBase64(byte[] bytes){
return DatatypeConverter.printBase64Binary(bytes);
}
private static byte[] stringToBytesBase64(String dataString){
return DatatypeConverter.parseBase64Binary(dataString);
}
private static String bytesToStringUtf8(byte[] bytes) throws UnsupportedEncodingException {
return new String(bytes, "UTF-8");
}
private static byte[] stringToBytesUtf8(String dataString) throws UnsupportedEncodingException {
return dataString.getBytes("UTF-8");
}
}
Environment details : windows 7, i7 2.4 Gz processor, 16 GB RAM, Java 8
Versions of libraries used:
<dependency>
<groupId>com.fasterxml.jackson.dataformat</groupId>
<artifactId>jackson-dataformat-smile</artifactId>
<version>2.6.4</version>
</dependency>
<dependency>
<groupId>com.fasterxml.jackson.core</groupId>
<artifactId>jackson-databind</artifactId>
<version>2.6.4</version>
</dependency>
<dependency>
<groupId>org.projectlombok</groupId>
<artifactId>lombok</artifactId>
<scope>provided</scope>
<version>1.16.6</version>
</dependency>
<dependency>
<groupId>org.xerial.snappy</groupId>
<artifactId>snappy-java</artifactId>
<version>1.1.2</version>
</dependency>
*** This is not a benchmark, but just a personal trial to decide compression strategy for my use-case.
Please let me know if anybody sees any mistake in my trial
Update:
Below is a simpler code to try
public static void stringCompressionTrial(){
String string = "I am what I am hhhhhhhhhhhhhhhhhhhhhhhhhhhhh"
+ "bjggujhhhhhhhhh"
+ "rggggggggggggggggggggggggg"
+ "esfffffffffffffffffffffffffffffff"
+ "esffffffffffffffffffffffffffffffff"
+ "esfekfgy enter code here`etd`enter code here wdd"
+ "heljwidgutwdbwdq8d"
+ "skdfgysrdsdnjsvfyekbdsgcu"
+"jbujsbjvugsduddbdj";
// uncomment below to use the json
// SimpleDto originalDto = new SimpleDto();
// originalDto.setFname("MyFirstName");
// originalDto.setLname("MySecondName");
// originalDto.setDescription("This is a long description. I am trying out compression options for JSON. Hopefully the results will help me decide on one approach");
// originalDto.setCity("MyCity");
// originalDto.setAge(36);
// originalDto.setZip(1111);
// ObjectMapper mapper = new ObjectMapper();
// try {
// string = mapper.writeValueAsString(originalDto);
// } catch (JsonProcessingException e) {
// e.printStackTrace();
// }
byte[] compressedBytes = null;
String compressedString = null;
try {
compressedBytes = gzipCompress(string);
compressedString = bytesToStringBase64(compressedBytes);
System.out.println("after gzipDecompress:" + compressedString);
//String decomp = gzipDecompress(compressedBytes);
String decompressedString = gzipDecompress( stringToBytesBase64(compressedString) );
System.out.println("decompressed string : " + decompressedString);
System.out.println( " original string length : " + string.length());
System.out.println( " compressedString length : " + compressedString.length() );
} catch (IOException e) {
e.printStackTrace();
} catch (Exception e) {
e.printStackTrace();
}
}
Because you are trying to compress short strings. Compression needs more data to find redundancy and to take advantage of skewed symbol frequencies.