I am trying to produce some messages to a single topic having 2 partitions. All the messages are going to partition number 2 only.
I would expect that a producer stream would distribute the messages across all partitions.
const kafka = require('kafka-node')
const { Transform } = require('stream');
const _ = require('lodash');
const client = new kafka.KafkaClient({ kafkaHost: 'localhost:9092' })
, streamproducer = new kafka.ProducerStream({kafkaClient: client});
const stdinTransform = new Transform({
objectMode: true,
decodeStrings: true,
transform (text, encoding, callback) {
let num = parseInt(text);
let message = { num: num, method: 'two' }
console.log('pushing message')
callback(null, {
topic: 'topic356',
messages: JSON.stringify(message)
});
}
});
stdinTransform.pipe(streamproducer);
function send() {
var message = new Date().toString();
stdinTransform.write([{ messages: [message] }]);
}
setInterval(send, 100);
ConsumerGroup:
var consumerOptions = {
kafkaHost: '127.0.0.1:9092',
groupId: 'ExampleTestGroup',
sessionTimeout: 15000,
protocol: ['roundrobin'],
fromOffset: 'latest' // equivalent of auto.offset.reset valid values are 'none', 'latest', 'earliest'
};
var topics = 'topic356';
var consumerGroup = new ConsumerGroup(Object.assign({ id: 'consumer1' }, consumerOptions), topics);
consumerGroup.on('data', onMessage);
var consumerGroup2 = new ConsumerGroup(Object.assign({ id: 'consumer2' }, consumerOptions), topics);
consumerGroup2.on('data', onMessage);
consumerGroup2.on('connect', function () {
setTimeout(function () {
consumerGroup2.close(true, function (error) {
console.log('consumer2 closed', error);
});
}, 25000);
});
function onMessage (message) {
console.log(
` partition: ${message.partition} `
);
}
Do you produce messages with a key? In Kafka, messages with the same key are published to the same partition.
use partitionerType in options, the default is 0,
Partitioner type (default = 0, random = 1, cyclic = 2, keyed = 3, custom = 4), default 0
new kafka.Producer(new kafka.KafkaClient({ kafkaHost: 'localhost:9092' }),{
partitionerType:1
});
https://github.com/SOHU-Co/kafka-node/issues/1094
Related
Here I created several classes for each sprite. Examples are DeviceOne and DeviceTwo. All went well when only creating and loading 1 sprite. However when I call DeviceTwo and reload dataVizExtension I always get the following error.
Uncaught TypeError: Cannot read properties of undefined (reading 'dbId')
This is the code for DeviceOne, basically class for DeviceTwo is same. The different only the sprite images.
export class DeviceOne {
constructor(viewer, dataVizExtn) {
this.viewer = viewer;
this.dataVizExtn = null;
this.DataVizCore = null;
this.viewableType = null;
this.viewableData = null;
this.baseURL = "http://localhost:3000/assets/images/sprite/";
this.sensorPositions = {
Dasloop: {
x: 10,
y: -3,
z: 20,
},
Warning: {
x: 0,
y: 10,
z: 3,
},
};
this.dasloops = [
"img_gps_dasloop_online.svg",
"img_gps_dasloop_online-1.svg",
"img_gps_dasloop_online-2.svg",
"img_gps_dasloop_online-3.svg",
];
this.warnings = ["ic_warning.svg", "ic_warning-2.svg"];
this.startAnim1 = 0;
this.startAnim2 = 0;
this.startAnim3 = 0;
this.infoChart = new InfoChart(this.viewer, this.options);
}
/**
* #return {ViewableData} resulting viable data that contains all viewables (icons)
*/
async onSpriteLoadedToScene() {
this.dataVizExtn = await this.viewer.loadExtension(
"Autodesk.DataVisualization"
);
const dataVizCore = Autodesk.DataVisualization.Core;
this.onSpriteHovering = this.onSpriteHovering.bind(this);
this.viewer.addEventListener(
dataVizCore.MOUSE_HOVERING,
this.onSpriteHovering
);
this.onSpriteClicked = this.onSpriteClicked.bind(this);
this.viewer.addEventListener(dataVizCore.MOUSE_CLICK, this.onSpriteClicked);
const viewableType = dataVizCore.ViewableType.SPRITE;
const spriteColor = new THREE.Color(0xffffff);
const highlightedColor = new THREE.Color(0xe0e0ff);
const spriteIconUrl = `${this.baseURL}${"img_gps_dasloop_online.svg"}`;
const dasloopStyles = new dataVizCore.ViewableStyle(
viewableType,
spriteColor,
spriteIconUrl,
highlightedColor,
`${this.baseURL}${this.dasloops[0]}`,
this.dasloops.map((dasloop) => `${this.baseURL}${dasloop}`)
);
const warningStyles = new dataVizCore.ViewableStyle(
viewableType,
spriteColor,
`${this.baseURL}${"ic_warning.svg"}`,
highlightedColor,
`${this.baseURL}${this.warnings[0]}`,
this.warnings.map((warning) => `${this.baseURL}${warning}`)
);
this.viewableData = new dataVizCore.ViewableData();
this.viewableData.spriteSize = 30;
const simulationData = [
{ position: { x: 0, y: 0, z: 10 } },
{ position: { x: 5, y: -3, z: 10 } },
];
const warningData = [{ position: { x: 0, y: 0, z: 0 } }];
simulationData.forEach((myData, index) => {
const dbId = 10 + index;
const position = myData.position;
const viewable = new dataVizCore.SpriteViewable(
position,
dasloopStyles,
dbId
);
this.viewableData.addViewable(viewable);
});
warningData.forEach((myData, index) => {
const dbId = 15 + index;
const position = myData.position;
const viewableWarning = new dataVizCore.SpriteViewable(
position,
warningStyles,
dbId
);
this.viewableData.addViewable(viewableWarning);
});
await this.viewableData.finish();
this.dataVizExtn.addViewables(this.viewableData);
this.spriteToUpdate = this.dataVizExtn.viewableData.viewables.map(
(sprite) => sprite.dbId
);
this.animate = setInterval(this.getAnimateSprite.bind(this), 500);
}
getAnimateSprite() {
this.dataVizExtn.invalidateViewables(this.spriteToUpdate, (viewable) => {
switch (viewable.dbId) {
case 10:
return {
url: `${this.baseURL}${
this.dasloops[this.startAnim1++ % this.dasloops.length]
}`,
};
case 15:
return {
url: `${this.baseURL}${
this.warnings[this.startAnim2++ % this.warnings.length]
}`,
};
case 11:
return {
url: `${this.baseURL}${
this.dasloops[this.startAnim3++ % this.dasloops.length]
}`,
};
default:
break;
}
});
}
Sorry, it's a little hard for me to tell where the error came from with the above code snippet, but you mentioned reload dataVizExtension. So, I would advise you to check your this.animate = setInterval(this.getAnimateSprite.bind(this), 500);.
Did you clear the setInterval call by clearInterval(this.animate) while unloading and before reloading your extension?
Currently we are trying to input uber data that includes time of day and ride fare into our TensorFlow.js model. We noticed that when we ran the model on the browser, the points are showing up on our scatterplot but during the training the loss and mean squared error values are not showing up and most importantly our model is not displaying a prediction line.
var userData = [
{
"City": "San Francisco",
"Product_Type": "UberEATS Marketplace",
"Trip_or_Order_Status": "COMPLETED",
"Request_Time": "2019-06-16 04:10:44 +0000 UTC",
"Begin_Trip_Time": "2019-06-16 04:44:40 +0000 UTC",
"Begin_Trip_Lat": "37.7352602",
"Begin_Trip_Lng": "-122.4203465",
"Begin_Trip_Address": "",
"Dropoff_Time": "2019-06-16 04:44:40 +0000 UTC",
"Dropoff_Lat": "37.7352602",
"Dropoff_Lng": "-122.4203465",
"Dropoff_Address": "",
"Distance_miles": "2.04",
"Fare_Amount": "32.34",
"Fare_Currency": "USD"
}...]
async function getData() {
const carsData = await userData;
// Here we map out the values for each car and filter out the list item that do not have an day or a pay value
const cleaned = carsData.map(car => ({
timeInMinutes: calculateMins(car.Request_Time),
pay_rate: normalizeUberPrice(car.Distance_miles, car.Fare_Amount),
}))
.filter(car => (car.day != null && car.pay != null));
return cleaned;
}
async function run() {
const data = await getData();
const values = data.map(d => ({
x: d.day,
y: d.pay,
}));
tfvis.render.scatterplot(
{ name: 'Horsepower v MPG' },
{ values },
{
xAxisDomain: [0, 1600],
yAxisDomain: [0,10],
xLabel: 'Day',
yLabel: 'Pay',
height: 300
}
);
const model = createModel();
tfvis.show.modelSummary({ name: 'Model Summary' }, model);
// Convert the data to a form we can use for training.
const tensorData = convertToTensor(data);
console.log(tensorData)
const { inputs, labels } = tensorData;
// Train the model
await trainModel(model, inputs, labels);
console.log('Done Training');
testModel(model, data, tensorData);
}
function createModel() {
const model = tf.sequential();
model.add(tf.layers.dense({ inputShape: [1], units: 25, useBias: true }));
model.add(tf.layers.dense({ units: 50, activation: "sigmoid" }));
model.add(tf.layers.dense({ units: 1, useBias: true }));
return model;
}
function convertToTensor(data) {
return tf.tidy(() => {
tf.util.shuffle(data);
const inputs = data.map(d => d.pay)
const labels = data.map(d => d.day);
const inputTensor = tf.tensor2d(inputs, [inputs.length, 1]);
const labelTensor = tf.tensor2d(labels, [labels.length, 1]);
//Step 3. Normalize the data to the range 0 - 1 using min-max scaling
const inputMax = inputTensor.max();
const inputMin = inputTensor.min();
const labelMax = labelTensor.max();
const labelMin = labelTensor.min();
const normalizedInputs = inputTensor.sub(inputMin).div(inputMax.sub(inputMin));
const normalizedLabels = labelTensor.sub(labelMin).div(labelMax.sub(labelMin));
return {
inputs: normalizedInputs,
labels: normalizedLabels,
// Return the min/max bounds so we can use them later.
inputMax,
inputMin,
labelMax,
labelMin,
}
});
}
async function trainModel(model, inputs, labels) {
model.compile({
optimizer: tf.train.adam(),
loss: tf.losses.meanSquaredError,
metrics: ['mse'],
});
const batchSize = 32;
const epochs = 30;
callbacks: tfvis.show.fitCallbacks(
{ name: 'Training Performance' },
['loss', 'mse'],
{
xAxisDomain: [0, 100],
yAxisDomain: [0,1],
height: 200,
callbacks: ['onEpochEnd'] }
// ',onBatchEnd'
),
history: tfvis.show.history({
name: 'History'},
history,
["loss","mse"])
});
}
function testModel(model, inputData, normalizationData) {
const { inputMax, inputMin, labelMin, labelMax } = normalizationData;
const [xs, preds] = tf.tidy(() => {
const xs = tf.linspace(0, 1, 100);
const preds = model.predict(xs.reshape([100, 1]));
const unNormXs = xs
.mul(inputMax.sub(inputMin))
.add(inputMin);
const unNormPreds = preds
.mul(labelMax.sub(labelMin))
.add(labelMin);
return [unNormXs.dataSync(), unNormPreds.dataSync()];
});
const predictedPoints = Array.from(xs).map((val, i) => {
return { x: val, y: preds[i] }
});
const originalPoints = inputData.map(d => ({
x: d.pay, y: d.day,
}));
console.log("ORIGINAL POINTS:")
console.log(originalPoints)
tfvis.render.scatterplot(
{ name: 'Model Predictions vs Original Data' },
{ values: [originalPoints, predictedPoints], series: ['original', 'predicted'] },
{
xAxisDomain: [0,10],
yAxisDomain: [0,1600],
xLabel: 'Horsepower',
yLabel: 'MPG',
height: 1000
}
);
}
document.addEventListener('DOMContentLoaded', run);
Basically we want to see a predicted line for our data but were not getting anything back.
It worked when we used data like this:
var userData = [{
day: 1
pay: 20
},...]
The data processing is not well performed. Thus, the values used for prediction contain NaN and Infinity. As a result, the error computed by model.fit is NaN and could therefore not be displayed on the chart of tfjs-vis.
The filtering
.filter(car => (car.day != null && car.pay != null));
is not removing NaN and Infinity. Instead, this condition can be used:
.filter(car => isFinite(car.pay + car.day) && !isNaN(car.pay + car.day));
Though, the NaN and Infinity are found within the values of car.day, here a general filtering is made over car.pay and car.day - thus the additional operation - to make sure that these values will not appear anywhere in the cleaned data.
here you can see how to display the loss.
I managed to load multiple models into the same viewer and now I am trying to extract properties and values of the elements of each model; however, when I use getPropertyDb() and executeUserFunction(), I get back only the properties of the initial model.
I started with the code from this repo and used this article to understand how to load multiple models.
First model is loaded after a redirect from the server.
function onDocumentLoadSuccess(doc) {
const geometries = doc.getRoot().search({ type: 'geometry' });
if (geometries.length === 0) {
console.error('Document contains no viewables.');
return;
}
const initViewable = geometries[0];
const svfUrl = doc.getViewablePath(initViewable);
const mat = new THREE.Matrix4();
const modelOptions = {
placementTransform: mat,
globalOffset: { x: 0, y: 0, z: 0 },
sharedPropertyDbPath: doc.getPropertyDbPath()
};
const viewerDiv = document.getElementById('MyViewerDiv');
const config = {
extensions: myExtensions
};
viewer = new Autodesk.Viewing.Private.GuiViewer3D(viewerDiv, config);
viewer.start(svfUrl, modelOptions, onLoadModelSuccess, onLoadModelError);
}
After the geometry of each model is loaded an extension does some stuff.
function MyExtension(viewer, options) {
Autodesk.Viewing.Extension.call(this, viewer, options);
}
MyExtension.prototype = Object.create(Autodesk.Viewing.Extension.prototype);
MyExtension.prototype.constructor = MyExtension;
MyExtension.prototype.onGeometryLoadEvent = function(event) {
const myPromise = this.viewer.model
.getPropertyDb()
.executeUserFunction(userFunction);
myPromise
.then(function(retValue) {
if (!retValue) {
console.log('Model doesn\'t contain valid elemens.');
}
// do stuff...
})
.catch(err => console.log(err));
};
MyExtension.prototype.load = function() {
this.onGeometryLoadBinded = this.onGeometryLoadEvent.bind(this);
this.viewer.addEventListener(
Autodesk.Viewing.GEOMETRY_LOADED_EVENT,
this.onGeometryLoadBinded
);
return true;
};
MyExtension.prototype.unload = function() {
this.viewer.removeEventListener(
Autodesk.Viewing.GEOMETRY_LOADED_EVENT,
this.onGeometryLoadBinded
);
this.onGeometryLoadBinded = null;
return true;
};
Autodesk.Viewing.theExtensionManager.registerExtension(
'MyExtension',
MyExtension
);
function userFunction(pdb) {
// get properties of the elements
}
New models are loaded in the same viewer using an extension as well.
MyOtherExtension.prototype.onDocumentLoadSuccess = function(doc) {
// get the svfUrl of the initial geometry and set the loading options
this.viewer.loadModel(
svfUrl,
loaderOptions,
this.onLoadModelSuccessBinded,
this.onLoadModelErrorBinded
);
};
How do I update the Property Database in order to get the properties and values for all the models that are currently loaded into the viewer?
Try access a specific database through the model object:
viewer.impl.modelQueue().getModels()[index].getPropertyDb()
We are working with a 3rd party grid (telerik kendo) that has paging/sorting/filtering built in. It will send the requests in a certain way when making the GET call and I'm trying to determine if there is a way to translate these requests to AutoQuery friendly requests.
Query string params
Sort Pattern:
sort[{0}][field] and sort[{0}][dir]
Filtering:
filter[filters][{0}][field]
filter[filters][{0}][operator]
filter[filters][{0}][value]
So this which is populated in the querystring:
filter[filters][0][field]
filter[filters][0][operator]
filter[filters][0][value]
would need to be translated to.
FieldName=1 // filter[filters][0][field]+filter[filters][0][operator]+filter[filters][0][value] in a nutshell (not exactly true)
Should I manipulate the querystring object in a plugin by removing the filters (or just adding the ones I need) ? Is there a better option here?
I'm not sure there is a clean way to do this on the kendo side either.
I will explain the two routes I'm going down, I hope to see a better answer.
First, I tried to modify the querystring in a request filter, but could not. I ended up having to run the autoqueries manually by getting the params and modifying them before calling AutoQuery.Execute. Something like this:
var requestparams = Request.ToAutoQueryParams();
var q = AutoQueryDb.CreateQuery(requestobject, requestparams);
AutoQueryDb.Execute(requestobject, q);
I wish there was a more global way to do this. The extension method just loops over all the querystring params and adds the ones that I need.
After doing the above work, I wasn't very happy with the result so I investigated doing it differently and ended up with the following:
Register the Kendo grid filter operations to their equivalent Service Stack auto query ones:
var aq = new AutoQueryFeature { MaxLimit = 100, EnableAutoQueryViewer=true };
aq.ImplicitConventions.Add("%neq", aq.ImplicitConventions["%NotEqualTo"]);
aq.ImplicitConventions.Add("%eq", "{Field} = {Value}");
Next, on the grid's read operation, we need to reformat the the querystring:
read: {
url: "/api/stuff?format=json&isGrid=true",
data: function (options) {
if (options.sort && options.sort.length > 0) {
options.OrderBy = (options.sort[0].dir == "desc" ? "-" : "") + options.sort[0].field;
}
if (options.filter && options.filter.filters.length > 0) {
for (var i = 0; i < options.filter.filters.length; i++) {
var f = options.filter.filters[i];
console.log(f);
options[f.field + f.operator] = f.value;
}
}
}
Now, the grid will send the operations in a Autoquery friendly manner.
I created an AutoQueryDataSource ts class that you may or may not find useful.
It's usage is along the lines of:
this.gridDataSource = AutoQueryKendoDataSource.getDefaultInstance<dtos.QueryDbSubclass, dtos.ListDefinition>('/api/autoQueryRoute', { orderByDesc: 'createdOn' });
export default class AutoQueryKendoDataSource<queryT extends dtos.QueryDb_1<T>, T> extends kendo.data.DataSource {
private constructor(options: kendo.data.DataSourceOptions = {}, public route?: string, public request?: queryT) {
super(options)
}
defer: ng.IDeferred<any>;
static exportToExcel(columns: kendo.ui.GridColumn[], dataSource: kendo.data.DataSource, filename: string) {
let rows = [{ cells: columns.map(d => { return { value: d.field }; }) }];
dataSource.fetch(function () {
var data = this.data();
for (var i = 0; i < data.length; i++) {
//push single row for every record
rows.push({
cells: _.map(columns, d => { return { value: data[i][d.field] } })
})
}
var workbook = new kendo.ooxml.Workbook({
sheets: [
{
columns: _.map(columns, d => { return { autoWidth: true } }),
// Title of the sheet
title: filename,
// Rows of the sheet
rows: rows
}
]
});
//save the file as Excel file with extension xlsx
kendo.saveAs({ dataURI: workbook.toDataURL(), fileName: filename });
})
}
static getDefaultInstance<queryT extends dtos.QueryDb_1<T>, T>(route: string, request: queryT, $q?: ng.IQService, model?: any) {
let sortInfo: {
orderBy?: string,
orderByDesc?: string,
skip?: number
} = {
};
let opts = {
transport: {
read: {
url: route,
dataType: 'json',
data: request
},
parameterMap: (data, type) => {
if (type == 'read') {
if (data.sort) {
data.sort.forEach((s: any) => {
if (s.field.indexOf('.') > -1) {
var arr = _.split(s.field, '.')
s.field = arr[arr.length - 1];
}
})
}//for autoquery to work, need only field names not entity names.
sortInfo = {
orderByDesc: _.join(_.map(_.filter(data.sort, (s: any) => s.dir == 'desc'), 'field'), ','),
orderBy: _.join(_.map(_.filter(data.sort, (s: any) => s.dir == 'asc'), 'field'), ','),
skip: 0
}
if (data.page)
sortInfo.skip = (data.page - 1) * data.pageSize,
_.extend(data, request);
//override sorting if done via grid
if (sortInfo.orderByDesc) {
(<any>data).orderByDesc = sortInfo.orderByDesc;
(<any>data).orderBy = null;
}
if (sortInfo.orderBy) {
(<any>data).orderBy = sortInfo.orderBy;
(<any>data).orderByDesc = null;
}
(<any>data).skip = sortInfo.skip;
return data;
}
return data;
},
},
requestStart: (e: kendo.data.DataSourceRequestStartEvent) => {
let ds = <AutoQueryKendoDataSource<queryT, T>>e.sender;
if ($q)
ds.defer = $q.defer();
},
requestEnd: (e: kendo.data.DataSourceRequestEndEvent) => {
new DatesToStringsService().convert(e.response);
let ds = <AutoQueryKendoDataSource<queryT, T>>e.sender;
if (ds.defer)
ds.defer.resolve();
},
schema: {
data: (response: dtos.QueryResponse<T>) => {
return response.results;
},
type: 'json',
total: 'total',
model: model
},
pageSize: request.take || 40,
page: 1,
serverPaging: true,
serverSorting: true
}
let ds = new AutoQueryKendoDataSource<queryT, T>(opts, route, request);
return ds;
}
}
I have a problem I am receiving large amount of data from the server and am then converting it to Json format, to be then viewed in JqGrid. It works for small amount of data say for example 200 rows but when doing this for 10000 rows it throws the following error
System.InvalidOperationException: Error during serialization or deserialization using the JSON JavaScriptSerializer. The length of the string exceeds the value set on the maxJsonLength property
I have tried using the javascript serializer and set it to maxjsonLenght = int32.MaxValue but still no luck
Following is my code please give me suggestions with examples how I can fix this? Thanks all!
GridConfig
public JqGridConfig(String db, String jobGroup, String jobName, String detailTable, String filterBatchControl, String filterDate, String filterTime, int page)
{
var entityhelper = new EntityHelper();
var s = new JsonSerializer();
try
{
//Populate Grid Model, Column Names, Grid Column Model, Grid Data
entityhelper.PopulateDetailGridInit(db, jobGroup, jobName, detailTable, filterBatchControl, filterDate, filterTime);
JqGridDetailAttributes = entityhelper.GridDetailAttributes;
JqGridDetailColumnNames = entityhelper.GridDetailColumnNames;
//JqGridDetailsColumnNamesForExport = entityhelper.GridDetailColumnNamesForExport;
JqGridDetailColumnModel = entityhelper.GridDetailColumnModel;
//Dynamic Data
JqGridDynamicDetailData = entityhelper.GridDetailData;
#region Column Model
foreach (KeyValuePair<String, JqGridColModel> kvp in entityhelper.GridDetailColumnModel)
{
s.Serialize(kvp.Key, kvp.Value.Attributes);
}
JqGridDetailColumnModelJson = s.Json();
#endregion
#region Concrete data. 1. List<dynamic> populated, 2. Convert to Json String, 3: Convert back to List<Detail>
JqGridDetailData = JsonSerializer.ConvertDynamicDetailsToJson(JqGridDynamicDetailData); // this is where the error occurs
}
catch (Exception ex)
{
//TODO: Logging
System.Diagnostics.Debug.WriteLine(ex.Message);
}
}
Json Serializer
public static IList<Detail> ConvertDynamicDetailsToJson(IList<dynamic> list)
{
if (list.Count == 0)
return new List<Detail>();
var sb = new StringBuilder();
var contents = new List<String>();
sb.Append("[");
foreach (var item in list)
{
var d = item as IDictionary<String, Object>;
sb.Append("{");
foreach (KeyValuePair<String, Object> kvp in d)
{
contents.Add(String.Format("{0}: {1}", "\"" + kvp.Key + "\"", JsonConvert.SerializeObject(kvp.Value)));
}
sb.Append(String.Join(",", contents.ToArray()));
sb.Append("},");
}
sb.Append("]");
//remove trailing comma
sb.Remove(sb.Length - 2, 1);
var jarray = JsonConvert.DeserializeObject<List<Detail>>(sb.ToString());
return jarray;
}
Controller that return Json result from server
public JsonResult DetailGridData(TheParams param)
{
dynamic config= "";
switch (param.JobGroup)
{
case "a":
config = new BLL.abcBLL().GetDetailGridData("rid", "desc", 1, 20, null,
param.FilterBatchControl,
param.JobName, param.DetailTable,
param.JobGroup, param.BatchDate,
param.Source);
break;
}
return Json(config, JsonRequestBehavior.AllowGet); // this reurns successfully json result
}
View where the Jqgrid exists and does not populate the grid
<script type="text/javascript">
var jobGroup = '#ViewBag.JobGroup';
var jobName = '#ViewBag.JobName';
var detailTable = '#ViewBag.DetailTable';
var filterBatchControl = '#ViewBag.FilterBatchControl';
var controlDate = '#ViewBag.ControlDate';
var controlTime = '#ViewBag.ControlTime';
var source = '#ViewBag.DetailSource';
var page = '#ViewBag.page';
function loadDetailData() {
var param = new Object();
param.BatchDate = controlDate;
param.BatchTime = controlTime;
param.JobGroup = jobGroup;
param.JobName = jobName;
param.DetailTable = detailTable;
param.FilterBatchControl = filterBatchControl;
param.Source = source;
param.page = page;
window.parent.loadingDetailsHeader();
$.ajax({
url: "/control/detailgriddata",
dataType: 'json',
type: 'POST',
data: param,
async: false,
success: function (response) {
try {
jgGridDetailColumnNames = response.JqGridDetailColumnNames;
//jqGridDetailColumnData = response.JqGridDetailData;
jqGridDetailColumnData = response.config;
$('#detailGrid').jqGrid('setGridParam', {colNames: jgGridDetailColumnNames});
$('#detailGrid').jqGrid('setGridParam', {data: jqGridDetailColumnData}).trigger('reloadGrid');
parent.loadingDetailsHeaderComplete();
}
catch(e) {
window.parent.loadingDetailsHeaderException(e.Message);
}
return false;
},
error: function (xhr, ajaxOptions, thrownError) {
alert(xhr.status);
alert(thrownError);
}
});
}
function exportdetails(date) {
var param = new Object();
param.db = source;
param.jobGroup = jobGroup;
param.jobName = jobName;
param.detailTable = detailTable;
param.filterBatchControl = filterBatchControl;
param.filterDate = date;
param.filterTime = "NULL";
$.ajax({
type: 'POST',
contentType: 'application/json; charset=utf-8',
url: '#Url.Action("ExportDetailsCsv", "Control")',
dataType: 'json',
data: $.toJSON(param),
async: false,
success: function (response) {
window.location.assign(response.fileName);
},
error: function (xhr, ajaxOptions, thrownError) {
alert("Details Export Exception: " + xhr.status);
}
});
}
//<![CDATA[
$(document).ready(function () {
'use strict';
$(window).resize(function () {
$("#detailGrid").setGridWidth($(window).width());
}).trigger('resize');
var dgrid = $("#detailGrid");
$('#detailGrid').jqGrid('clearGridData');
loadDetailData();
dgrid.jqGrid({
datatype: 'json',
data: jqGridDetailColumnData,
colNames: jgGridDetailColumnNames,
colModel: [ #Html.Raw(#ViewBag.ColModelDetail) ],
rowNum: 25,
rowList: [25, 50, 100],
pager: '#detailPager',
gridview: true,
autoencode: false,
ignoreCase: true,
viewrecords: true,
altrows: false,
autowidth: true,
shrinkToFit: true,
headertitles: true,
hoverrows: true,
height: 300,
onSelectRow: function (rowId) {
//This is a demo dialog with a jqGrid embedded
//use this as the base for viewing detail data of details
//$('#dialogGrid').dialog();
//gridDialog();
},
loadComplete: function (data) {},
gridComplete: function (data) {
//if (parseInt(data.records,10) < 50) {
$('#detailPager').show();
//} else {
//$('#detailPager').show();
//}
}
}).jqGrid('navGrid', '#detailPager', { edit: false, add: false, del: false, search: false }, {});
});
//]]>
</script>
<table id="detailGrid">
<tr>
<td />
</tr>
</table>
<div id="detailPager"></div>
<div id="dialogGrid"></div>
Probably you should consider to use server side paging instead of returning 10000 rows to the client? Server side paging of SQL data can be implemented much more effectively as client side paging (sorting of large non-indexed data in JavaScript program).
One more option which you have is the usage of another JSON serializer. For example it can be protobuf-net, ServiceStack.Text (see here too), Json.NET and other. In the way you can additionally improve performance of your application comparing with JavaScriptSerializer.