Here is a working fiddle that demonstrates it. For more information, see the Newtonsoft documentation on Custom JsonConverter.
DataTableJsonConverter
Convert a DataTable into a custom JSON string.
public class DataTableJsonConverter : JsonConverter
{
public override void WriteJson(JsonWriter w, object v, JsonSerializer s)
{
w.WriteStartObject();
w.WritePropertyName("data");
w.WriteStartArray();
foreach(DataRow r in (v as DataTable).Rows)
{
w.WriteStartArray();
foreach(var c in r.ItemArray)
{
w.WriteValue(c);
}
w.WriteEndArray();
}
w.WriteEndArray();
w.WriteEndObject();
}
public override object ReadJson(JsonReader r, Type t, object v, JsonSerializer s)
{
throw new NotImplementedException("Unnecessary: CanRead is false.");
}
public override bool CanRead { get { return false; } }
public override bool CanConvert(Type objectType)
{
return objectType == typeof(DataTable);
}
}
Here's How to Use It
public class Program
{
public static void Main()
{
var dt = SeedData();
var json = JsonConvert.SerializeObject(
dt, Newtonsoft.Json.Formatting.Indented,
new DataTableJsonConverter());
Console.WriteLine(json);
}
public static DataTable SeedData()
{
var dt = new DataTable();
dt.Columns.Add("Name");
dt.Columns.Add("Position");
for (var i = 0; i < 2; ++i)
{
dt.Rows.Add(new object[] { "Shaun", "Developer" });
}
return dt;
}
}
Here's Its Output
{
"data": [
[
"Shaun",
"Developer"
],
[
"Shaun",
"Developer"
]
]
}
Performance
For those that are interested, here is a fork of the fiddle that tries to show the performance of three different methods from me, dbc, and
warheat1990 over 1500 data rows and two runs each. They are all very close and for reasons unknown to me, the second run is always faster.
DataTableJsonConverter:6 ms
DataTableJsonConverter:2 ms
DataTableTo2dArrayConverter:251 ms
DataTableTo2dArrayConverter:11 ms
JqueryDatatablesConverter:1580 ms
JqueryDatatablesConverter:16 ms
DataTablelooks problematic.